gix-ref-0.43.0/.cargo_vcs_info.json0000644000000001450000000000100124610ustar { "git": { "sha1": "b050327e76f234b19be921b78b7b28e034319fdb" }, "path_in_vcs": "gix-ref" }gix-ref-0.43.0/Cargo.toml0000644000000036060000000000100104640ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.65" name = "gix-ref" version = "0.43.0" authors = ["Sebastian Thiel "] include = [ "src/**/*", "LICENSE-*", ] autotests = false description = "A crate to handle git references" license = "MIT OR Apache-2.0" repository = "https://github.com/Byron/gitoxide" [package.metadata.docs.rs] features = [ "document-features", "serde", ] [lib] test = true doctest = false [dependencies.document-features] version = "0.2.1" optional = true [dependencies.gix-actor] version = "^0.31.0" [dependencies.gix-date] version = "^0.8.5" [dependencies.gix-features] version = "^0.38.1" features = ["walkdir"] [dependencies.gix-fs] version = "^0.10.1" [dependencies.gix-hash] version = "^0.14.2" [dependencies.gix-lock] version = "^13.0.0" [dependencies.gix-object] version = "^0.42.0" [dependencies.gix-path] version = "^0.10.7" [dependencies.gix-tempfile] version = "^13.0.0" default-features = false [dependencies.gix-utils] version = "^0.1.11" [dependencies.gix-validate] version = "^0.8.4" [dependencies.memmap2] version = "0.9.0" [dependencies.serde] version = "1.0.114" features = ["derive"] optional = true default-features = false [dependencies.thiserror] version = "1.0.34" [dependencies.winnow] version = "0.6.0" features = ["simd"] [dev-dependencies] [features] serde = [ "dep:serde", "gix-hash/serde", "gix-actor/serde", "gix-object/serde", ] gix-ref-0.43.0/Cargo.toml.orig000064400000000000000000000031531046102023000141420ustar 00000000000000[package] name = "gix-ref" version = "0.43.0" repository = "https://github.com/Byron/gitoxide" license = "MIT OR Apache-2.0" description = "A crate to handle git references" authors = ["Sebastian Thiel "] edition = "2021" include = ["src/**/*", "LICENSE-*"] rust-version = "1.65" autotests = false [lib] doctest = false test = true [features] ## Data structures implement `serde::Serialize` and `serde::Deserialize`. serde = ["dep:serde", "gix-hash/serde", "gix-actor/serde", "gix-object/serde"] [dependencies] gix-features = { version = "^0.38.1", path = "../gix-features", features = ["walkdir"]} gix-fs = { version = "^0.10.1", path = "../gix-fs" } gix-path = { version = "^0.10.7", path = "../gix-path" } gix-hash = { version = "^0.14.2", path = "../gix-hash" } gix-date = { version = "^0.8.5", path = "../gix-date" } gix-object = { version = "^0.42.0", path = "../gix-object" } gix-utils = { version = "^0.1.11", path = "../gix-utils" } gix-validate = { version = "^0.8.4", path = "../gix-validate" } gix-actor = { version = "^0.31.0", path = "../gix-actor" } gix-lock = { version = "^13.0.0", path = "../gix-lock" } gix-tempfile = { version = "^13.0.0", default-features = false, path = "../gix-tempfile" } thiserror = "1.0.34" winnow = { version = "0.6.0", features = ["simd"] } serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"]} # packed refs memmap2 = "0.9.0" document-features = { version = "0.2.1", optional = true } [dev-dependencies] gix-testtools = { path = "../tests/tools" } [package.metadata.docs.rs] features = ["document-features", "serde"] gix-ref-0.43.0/LICENSE-APACHE000064400000000000000000000247461046102023000132120ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. gix-ref-0.43.0/LICENSE-MIT000064400000000000000000000017771046102023000127210ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gix-ref-0.43.0/src/fullname.rs000064400000000000000000000167741046102023000142300ustar 00000000000000use std::{borrow::Borrow, path::Path}; use gix_object::bstr::{BStr, BString, ByteSlice}; use crate::{bstr::ByteVec, name::is_pseudo_ref, Category, FullName, FullNameRef, Namespace, PartialNameRef}; impl TryFrom<&str> for FullName { type Error = gix_validate::reference::name::Error; fn try_from(value: &str) -> Result { Ok(FullName( gix_validate::reference::name(value.as_bytes().as_bstr())?.into(), )) } } impl TryFrom for FullName { type Error = gix_validate::reference::name::Error; fn try_from(value: String) -> Result { gix_validate::reference::name(value.as_bytes().as_bstr())?; Ok(FullName(value.into())) } } impl TryFrom<&BStr> for FullName { type Error = gix_validate::reference::name::Error; fn try_from(value: &BStr) -> Result { Ok(FullName(gix_validate::reference::name(value)?.into())) } } impl TryFrom for FullName { type Error = gix_validate::reference::name::Error; fn try_from(value: BString) -> Result { gix_validate::reference::name(value.as_ref())?; Ok(FullName(value)) } } impl TryFrom<&BString> for FullName { type Error = gix_validate::reference::name::Error; fn try_from(value: &BString) -> Result { gix_validate::reference::name(value.as_ref())?; Ok(FullName(value.clone())) } } impl From for BString { fn from(name: FullName) -> Self { name.0 } } impl<'a> From<&'a FullNameRef> for &'a BStr { fn from(name: &'a FullNameRef) -> Self { &name.0 } } impl<'a> From<&'a FullNameRef> for FullName { fn from(value: &'a FullNameRef) -> Self { FullName(value.as_bstr().into()) } } impl std::fmt::Display for FullName { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } impl FullNameRef { /// Interpret this fully qualified reference name as partial name. pub fn as_partial_name(&self) -> &PartialNameRef { PartialNameRef::new_unchecked(self.0.as_bstr()) } /// Convert this name into the relative path identifying the reference location. pub fn to_path(&self) -> &Path { gix_path::from_byte_slice(&self.0) } /// Return ourselves as byte string which is a valid refname pub fn as_bstr(&self) -> &BStr { &self.0 } /// Strip well-known prefixes from the name and return it. /// /// If there is no such prefix, the original name is returned. pub fn shorten(&self) -> &BStr { self.category_and_short_name() .map_or_else(|| self.0.as_bstr(), |(_, short)| short) } /// Classify this name, or return `None` if it's unclassified. pub fn category(&self) -> Option> { self.category_and_short_name().map(|(cat, _)| cat) } /// Classify this name, or return `None` if it's unclassified. If `Some`, /// the shortened name is returned as well. pub fn category_and_short_name(&self) -> Option<(Category<'_>, &BStr)> { let name = self.0.as_bstr(); for category in &[Category::Tag, Category::LocalBranch, Category::RemoteBranch] { if let Some(shortened) = name.strip_prefix(category.prefix().as_bytes()) { return Some((*category, shortened.as_bstr())); } } for category in &[ Category::Note, Category::Bisect, Category::WorktreePrivate, Category::Rewritten, ] { if name.starts_with(category.prefix().as_ref()) { return Some(( *category, name.strip_prefix(b"refs/") .expect("we checked for refs/* above") .as_bstr(), )); } } if is_pseudo_ref(name) { Some((Category::PseudoRef, name)) } else if let Some(shortened) = name.strip_prefix(Category::MainPseudoRef.prefix().as_bytes()) { if shortened.starts_with_str("refs/") { (Category::MainRef, shortened.as_bstr()).into() } else { is_pseudo_ref(shortened.into()).then(|| (Category::MainPseudoRef, shortened.as_bstr())) } } else if let Some(shortened_with_worktree_name) = name.strip_prefix(Category::LinkedPseudoRef { name: "".into() }.prefix().as_bytes()) { let (name, shortened) = shortened_with_worktree_name.find_byte(b'/').map(|pos| { ( shortened_with_worktree_name[..pos].as_bstr(), shortened_with_worktree_name[pos + 1..].as_bstr(), ) })?; if shortened.starts_with_str("refs/") { (Category::LinkedRef { name }, shortened.as_bstr()).into() } else { is_pseudo_ref(shortened).then(|| (Category::LinkedPseudoRef { name }, shortened.as_bstr())) } } else { None } } } impl FullName { /// Convert this name into the relative path, lossily, identifying the reference location relative to a repository pub fn to_path(&self) -> &Path { gix_path::from_byte_slice(&self.0) } /// Dissolve this instance and return the buffer. pub fn into_inner(self) -> BString { self.0 } /// Return ourselves as byte string which is a valid refname pub fn as_bstr(&self) -> &BStr { self.0.as_bstr() } /// Modify ourself so that we use `namespace` as prefix, if it is not yet in the `namespace` pub fn prefix_namespace(&mut self, namespace: &Namespace) -> &mut Self { if !self.0.starts_with_str(&namespace.0) { self.0.insert_str(0, &namespace.0); } self } /// Strip the given `namespace` off the beginning of this name, if it is in this namespace. pub fn strip_namespace(&mut self, namespace: &Namespace) -> &mut Self { if self.0.starts_with_str(&namespace.0) { let prev_len = self.0.len(); self.0.copy_within(namespace.0.len().., 0); self.0.resize(prev_len - namespace.0.len(), 0); } self } /// Strip well-known prefixes from the name and return it. /// /// If there is no such prefix, the original name is returned. pub fn shorten(&self) -> &BStr { self.as_ref().shorten() } /// Classify this name, or return `None` if it's unclassified. pub fn category(&self) -> Option> { self.as_ref().category() } /// Classify this name, or return `None` if it's unclassified. If `Some`, /// the shortened name is returned as well. pub fn category_and_short_name(&self) -> Option<(crate::Category<'_>, &BStr)> { self.as_ref().category_and_short_name() } } impl FullNameRef { /// Return the file name portion of a full name, for instance `main` if the /// full name was `refs/heads/main`. pub fn file_name(&self) -> &BStr { self.0.rsplitn(2, |b| *b == b'/').next().expect("valid ref").as_bstr() } } impl Borrow for FullName { #[inline] fn borrow(&self) -> &FullNameRef { FullNameRef::new_unchecked(self.0.as_bstr()) } } impl AsRef for FullName { fn as_ref(&self) -> &FullNameRef { self.borrow() } } impl ToOwned for FullNameRef { type Owned = FullName; fn to_owned(&self) -> Self::Owned { FullName(self.0.to_owned()) } } gix-ref-0.43.0/src/lib.rs000064400000000000000000000154671046102023000131710ustar 00000000000000//! A crate for handling the references stored in various formats in a git repository. //! //! References are also called _refs_ which are used interchangeably. //! //! Refs are the way to keep track of objects and come in two flavors. //! //! * symbolic refs are pointing to another reference //! * peeled refs point to the an object by its [`ObjectId`] //! //! They can be identified by a relative path and stored in various flavors. //! //! * **files** //! * **[loose][file::Store]** //! * one reference maps to a file on disk //! * **packed** //! * references are stored in a single human-readable file, along with their targets if they are symbolic. //! //! ## Feature Flags #![cfg_attr( all(doc, feature = "document-features"), doc = ::document_features::document_features!() )] #![cfg_attr(all(doc, feature = "document-features"), feature(doc_cfg, doc_auto_cfg))] #![deny(missing_docs, rust_2018_idioms, unsafe_code)] use std::borrow::Cow; use gix_hash::{oid, ObjectId}; pub use gix_object::bstr; use gix_object::bstr::{BStr, BString}; #[path = "store/mod.rs"] mod store_impl; pub use store_impl::{file, packed}; mod fullname; /// #[allow(clippy::empty_docs)] pub mod name; /// #[allow(clippy::empty_docs)] pub mod namespace; /// #[allow(clippy::empty_docs)] pub mod transaction; mod parse; mod raw; pub use raw::Reference; mod target; /// #[allow(clippy::empty_docs)] pub mod log; /// #[allow(clippy::empty_docs)] pub mod peel; /// #[allow(clippy::empty_docs)] pub mod store { /// The way a file store handles the reflog #[derive(Default, Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone, Copy)] pub enum WriteReflog { /// Always write the reflog for all references for ref edits, unconditionally. Always, /// Write a ref log for ref edits according to the standard rules. #[default] Normal, /// Never write a ref log. Disable, } /// A thread-local handle for interacting with a [`Store`][crate::Store] to find and iterate references. #[derive(Clone)] #[allow(dead_code)] pub(crate) struct Handle { /// A way to access shared state with the requirement that interior mutability doesn't leak or is incorporated into error types /// if it could. The latter can't happen if references to said internal aren't ever returned. state: handle::State, } #[allow(dead_code)] pub(crate) enum State { Loose { store: file::Store }, } pub(crate) mod general; /// #[path = "general/handle/mod.rs"] mod handle; pub use handle::find; use crate::file; } /// The git reference store. /// TODO: Figure out if handles are needed at all, which depends on the ref-table implementation. #[allow(dead_code)] pub(crate) struct Store { inner: store::State, } /// A validated complete and fully qualified referenced reference name, safe to use for all operations. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct FullName(pub(crate) BString); /// A validated complete and fully qualified referenced reference name, safe to use for all operations. #[derive(Hash, Debug, PartialEq, Eq, Ord, PartialOrd)] #[repr(transparent)] pub struct FullNameRef(BStr); /// A validated and potentially partial reference name, safe to use for common operations. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub struct PartialNameCow<'a>(Cow<'a, BStr>); /// A validated and potentially partial reference name, safe to use for common operations. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd)] #[repr(transparent)] pub struct PartialNameRef(BStr); /// A validated and potentially partial reference name, safe to use for common operations. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub struct PartialName(BString); /// A _validated_ prefix for references to act as a namespace. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub struct Namespace(BString); /// Denotes the kind of reference. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Kind { /// A ref that points to an object id Peeled, /// A ref that points to another reference, adding a level of indirection. /// /// It can be resolved to an id using the [`peel_in_place_to_id()`][`crate::file::ReferenceExt::peel_to_id_in_place()`] method. Symbolic, } /// The various known categories of references. /// /// This translates into a prefix containing all references of a given category. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)] pub enum Category<'a> { /// A tag in `refs/tags` Tag, /// A branch in `refs/heads` LocalBranch, /// A branch in `refs/remotes` RemoteBranch, /// A tag in `refs/notes` Note, /// Something outside of `ref/` in the current worktree, typically `HEAD`. PseudoRef, /// A `PseudoRef`, but referenced so that it will always refer to the main worktree by /// prefixing it with `main-worktree/`. MainPseudoRef, /// Any reference that is prefixed with `main-worktree/refs/` MainRef, /// A `PseudoRef` in another _linked_ worktree, never in the main one, like `worktrees//HEAD`. LinkedPseudoRef { /// The name of the worktree. name: &'a BStr, }, /// Any reference that is prefixed with `worktrees//refs/`. LinkedRef { /// The name of the worktree. name: &'a BStr, }, /// A ref that is private to each worktree (_linked_ or _main_), with `refs/bisect/` prefix Bisect, /// A ref that is private to each worktree (_linked_ or _main_), with `refs/rewritten/` prefix Rewritten, /// A ref that is private to each worktree (_linked_ or _main_), with `refs/worktree/` prefix WorktreePrivate, // REF_TYPE_NORMAL, /* normal/shared refs inside refs/ */ } /// Denotes a ref target, equivalent to [`Kind`], but with mutable data. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Target { /// A ref that points to an object id Peeled(ObjectId), /// A ref that points to another reference by its validated name, adding a level of indirection. /// /// Note that this is an extension of gitoxide which will be helpful in logging all reference changes. Symbolic(FullName), } /// Denotes a ref target, equivalent to [`Kind`], but with immutable data. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)] pub enum TargetRef<'a> { /// A ref that points to an object id Peeled(&'a oid), /// A ref that points to another reference by its validated name, adding a level of indirection. Symbolic(&'a FullNameRef), } gix-ref-0.43.0/src/log.rs000064400000000000000000000013061046102023000131670ustar 00000000000000use gix_hash::ObjectId; use gix_object::bstr::BString; /// A parsed ref log line that can be changed #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Line { /// The previous object id. Can be a null-sha to indicate this is a line for a new ref. pub previous_oid: ObjectId, /// The new object id. Can be a null-sha to indicate this ref is being deleted. pub new_oid: ObjectId, /// The signature of the currently configured committer. pub signature: gix_actor::Signature, /// The message providing details about the operation performed in this log line. pub message: BString, } gix-ref-0.43.0/src/name.rs000064400000000000000000000206661046102023000133400ustar 00000000000000use std::{convert, convert::Infallible, ffi::OsStr, path::Path}; use gix_object::bstr::{BStr, BString, ByteSlice, ByteVec}; use crate::{Category, FullName, FullNameRef, PartialName, PartialNameRef}; /// The error used in the [`PartialNameRef`]`::try_from`(…) implementations. pub type Error = gix_validate::reference::name::Error; impl<'a> Category<'a> { /// Return the prefix that would contain all references of our kind, or an empty string if the reference would /// be directly inside of the [`git_dir()`][crate::file::Store::git_dir()]. pub fn prefix(&self) -> &BStr { match self { Category::Tag => b"refs/tags/".as_bstr(), Category::LocalBranch => b"refs/heads/".as_bstr(), Category::RemoteBranch => b"refs/remotes/".as_bstr(), Category::Note => b"refs/notes/".as_bstr(), Category::MainPseudoRef => b"main-worktree/".as_bstr(), Category::MainRef => b"main-worktree/refs/".as_bstr(), Category::PseudoRef => b"".as_bstr(), Category::LinkedPseudoRef { .. } => b"worktrees/".as_bstr(), Category::LinkedRef { .. } => b"worktrees/".as_bstr(), Category::Bisect => b"refs/bisect/".as_bstr(), Category::Rewritten => b"refs/rewritten/".as_bstr(), Category::WorktreePrivate => b"refs/worktree/".as_bstr(), } } /// Returns true if the category is private to their worktrees, and never shared with other worktrees. pub fn is_worktree_private(&self) -> bool { matches!( self, Category::MainPseudoRef | Category::PseudoRef | Category::LinkedPseudoRef { .. } | Category::WorktreePrivate | Category::Rewritten | Category::Bisect ) } } impl FullNameRef { pub(crate) fn new_unchecked(v: &BStr) -> &Self { // SAFETY: FullNameRef is transparent and equivalent to a &BStr if provided as reference #[allow(unsafe_code)] unsafe { std::mem::transmute(v) } } } impl PartialNameRef { pub(crate) fn new_unchecked(v: &BStr) -> &Self { // SAFETY: PartialNameRef is transparent and equivalent to a &BStr if provided as reference #[allow(unsafe_code)] unsafe { std::mem::transmute(v) } } } impl PartialNameRef { pub(crate) fn looks_like_full_name(&self) -> bool { let name = self.0.as_bstr(); name.starts_with_str("refs/") || name.starts_with(Category::MainPseudoRef.prefix()) || name.starts_with(Category::LinkedPseudoRef { name: "".into() }.prefix()) || is_pseudo_ref(name) } pub(crate) fn construct_full_name_ref<'buf>(&self, inbetween: &str, buf: &'buf mut BString) -> &'buf FullNameRef { buf.clear(); if !self.looks_like_full_name() { buf.push_str("refs/"); } if !inbetween.is_empty() { buf.push_str(inbetween); buf.push_byte(b'/'); } buf.extend_from_slice(&self.0); FullNameRef::new_unchecked(buf.as_bstr()) } } impl PartialNameRef { /// Convert this name into the relative path possibly identifying the reference location. /// Note that it may be only a partial path though. pub fn to_partial_path(&self) -> &Path { gix_path::from_byte_slice(self.0.as_bstr()) } /// Provide the name as binary string which is known to be a valid partial ref name. pub fn as_bstr(&self) -> &BStr { &self.0 } } impl PartialName { /// Append the `component` to ourselves and validate the newly created partial path. pub fn join(self, component: &BStr) -> Result { let mut b = self.0; b.push_byte(b'/'); b.extend(component.as_bytes()); gix_validate::reference::name_partial(b.as_ref())?; Ok(PartialName(b)) } } impl<'a> convert::TryFrom<&'a BStr> for &'a FullNameRef { type Error = Error; fn try_from(v: &'a BStr) -> Result { Ok(FullNameRef::new_unchecked(gix_validate::reference::name(v)?)) } } impl<'a> From<&'a FullNameRef> for &'a PartialNameRef { fn from(v: &'a FullNameRef) -> Self { PartialNameRef::new_unchecked(v.0.as_bstr()) } } impl<'a> convert::TryFrom<&'a OsStr> for &'a PartialNameRef { type Error = Error; fn try_from(v: &'a OsStr) -> Result { let v = gix_path::os_str_into_bstr(v).map_err(|_| { Error::Tag(gix_validate::tag::name::Error::InvalidByte { byte: "".into(), }) })?; Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial( v.as_bstr(), )?)) } } mod impls { use std::borrow::Borrow; use crate::{bstr::ByteSlice, PartialName, PartialNameRef}; impl Borrow for PartialName { #[inline] fn borrow(&self) -> &PartialNameRef { PartialNameRef::new_unchecked(self.0.as_bstr()) } } impl AsRef for PartialName { fn as_ref(&self) -> &PartialNameRef { self.borrow() } } impl ToOwned for PartialNameRef { type Owned = PartialName; fn to_owned(&self) -> Self::Owned { PartialName(self.0.to_owned()) } } } impl<'a> convert::TryFrom<&'a BString> for &'a PartialNameRef { type Error = Error; fn try_from(v: &'a BString) -> Result { Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial( v.as_ref(), )?)) } } impl<'a> convert::TryFrom<&'a BStr> for &'a PartialNameRef { type Error = Error; fn try_from(v: &'a BStr) -> Result { Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(v)?)) } } impl<'a> convert::TryFrom<&'a PartialName> for &'a PartialNameRef { type Error = Error; fn try_from(v: &'a PartialName) -> Result { Ok(PartialNameRef::new_unchecked(v.0.as_bstr())) } } impl<'a> convert::TryFrom<&'a str> for &'a FullNameRef { type Error = Error; fn try_from(v: &'a str) -> Result { let v = v.as_bytes().as_bstr(); Ok(FullNameRef::new_unchecked(gix_validate::reference::name(v)?)) } } impl<'a> convert::TryFrom<&'a str> for &'a PartialNameRef { type Error = Error; fn try_from(v: &'a str) -> Result { let v = v.as_bytes().as_bstr(); Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(v)?)) } } impl<'a> convert::TryFrom<&'a str> for PartialName { type Error = Error; fn try_from(v: &'a str) -> Result { let v = v.as_bytes().as_bstr(); Ok(PartialName(gix_validate::reference::name_partial(v)?.to_owned())) } } impl<'a> convert::TryFrom<&'a FullName> for &'a PartialNameRef { type Error = Infallible; fn try_from(v: &'a FullName) -> Result { Ok(v.as_ref().as_partial_name()) } } impl<'a> convert::TryFrom<&'a String> for &'a FullNameRef { type Error = Error; fn try_from(v: &'a String) -> Result { let v = v.as_bytes().as_bstr(); Ok(FullNameRef::new_unchecked(gix_validate::reference::name(v)?)) } } impl<'a> convert::TryFrom<&'a String> for &'a PartialNameRef { type Error = Error; fn try_from(v: &'a String) -> Result { let v = v.as_bytes().as_bstr(); Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(v)?)) } } impl convert::TryFrom for PartialName { type Error = Error; fn try_from(v: String) -> Result { gix_validate::reference::name_partial(v.as_bytes().as_bstr())?; Ok(PartialName(v.into())) } } impl convert::TryFrom for PartialName { type Error = Error; fn try_from(v: BString) -> Result { gix_validate::reference::name_partial(v.as_ref())?; Ok(PartialName(v)) } } /// Note that this method is disagreeing with `gix_validate` as it allows dashes '-' for some reason. /// Since partial names cannot be created with dashes inside we adjusted this as it's probably unintended or git creates pseudo-refs /// which wouldn't pass its safety checks. pub(crate) fn is_pseudo_ref(name: &BStr) -> bool { name.bytes().all(|b| b.is_ascii_uppercase() || b == b'_') } gix-ref-0.43.0/src/namespace.rs000064400000000000000000000033421046102023000143440ustar 00000000000000use std::path::{Path, PathBuf}; use gix_object::bstr::{BStr, BString, ByteSlice, ByteVec}; use crate::{FullName, FullNameRef, Namespace, PartialNameRef}; impl Namespace { /// Dissolve ourselves into the interior representation pub fn into_bstring(self) -> BString { self.0 } /// Return ourselves as pub fn as_bstr(&self) -> &BStr { self.0.as_ref() } /// Return ourselves as a path for use within the filesystem. pub fn to_path(&self) -> &Path { gix_path::from_byte_slice(&self.0) } /// Append the given `prefix` to this namespace so it becomes usable for prefixed iteration. pub fn into_namespaced_prefix(mut self, prefix: &Path) -> PathBuf { let prefix = gix_path::into_bstr(prefix); self.0.push_str(prefix.as_ref()); gix_path::to_native_path_on_windows(self.0).into_owned() } pub(crate) fn into_namespaced_name(mut self, name: &FullNameRef) -> FullName { self.0.push_str(name.as_bstr()); FullName(self.0) } } /// Given a `namespace` 'foo we output 'refs/namespaces/foo', and given 'foo/bar' we output 'refs/namespaces/foo/refs/namespaces/bar'. /// /// For more information, consult the [git namespace documentation](https://git-scm.com/docs/gitnamespaces). pub fn expand<'a, Name, E>(namespace: Name) -> Result where Name: TryInto<&'a PartialNameRef, Error = E>, gix_validate::reference::name::Error: From, { let namespace = &namespace.try_into()?.0; let mut out = BString::default(); for component in namespace.split_str(b"/") { out.push_str("refs/namespaces/"); out.push_str(component); out.push_str(b"/"); } Ok(Namespace(out)) } gix-ref-0.43.0/src/parse.rs000064400000000000000000000015701046102023000135230ustar 00000000000000use gix_object::bstr::{BStr, ByteSlice}; use winnow::{combinator::alt, error::ParserError, prelude::*, token::take_while}; fn is_hex_digit_lc(b: u8) -> bool { matches!(b, b'0'..=b'9' | b'a'..=b'f') } /// Copy from https://github.com/Byron/gitoxide/blob/f270850ff92eab15258023b8e59346ec200303bd/gix-object/src/immutable/parse.rs#L64 pub fn hex_hash<'a, E: ParserError<&'a [u8]>>(i: &mut &'a [u8]) -> PResult<&'a BStr, E> { // NOTE: It's important to be able to read all hashes, do not parameterize it. Hashes can be rejected at a later stage // if needed. take_while( gix_hash::Kind::shortest().len_in_hex()..=gix_hash::Kind::longest().len_in_hex(), is_hex_digit_lc, ) .map(ByteSlice::as_bstr) .parse_next(i) } pub fn newline<'a, E: ParserError<&'a [u8]>>(i: &mut &'a [u8]) -> PResult<&'a [u8], E> { alt((b"\r\n", b"\n")).parse_next(i) } gix-ref-0.43.0/src/peel.rs000064400000000000000000000017451046102023000133420ustar 00000000000000/// #[allow(clippy::empty_docs)] pub mod to_id { use std::path::PathBuf; use gix_object::bstr::BString; use crate::file; /// The error returned by [`crate::file::ReferenceExt::peel_to_id_in_place()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not follow a single level of a symbolic reference")] Follow(#[from] file::find::existing::Error), #[error("Aborting due to reference cycle with first seen path being {start_absolute:?}")] Cycle { start_absolute: PathBuf }, #[error("Refusing to follow more than {max_depth} levels of indirection")] DepthLimitExceeded { max_depth: usize }, #[error("An error occurred when trying to resolve an object a reference points to")] Find(#[from] gix_object::find::Error), #[error("Object {oid} as referred to by {name:?} could not be found")] NotFound { oid: gix_hash::ObjectId, name: BString }, } } gix-ref-0.43.0/src/raw.rs000064400000000000000000000063211046102023000132010ustar 00000000000000use gix_hash::ObjectId; use crate::{FullName, Target}; /// A fully owned backend agnostic reference #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Reference { /// The path to uniquely identify this ref within its store. pub name: FullName, /// The target of the reference, either a symbolic reference by full name or a possibly intermediate object by its id. pub target: Target, /// The fully peeled object to which this reference ultimately points to. Only guaranteed to be set after /// [`Reference::peel_to_id_in_place()`](crate::file::ReferenceExt) was called or if this reference originated /// from a packed ref. pub peeled: Option, } mod convert { use gix_hash::ObjectId; use crate::{ raw::Reference, store_impl::{file::loose, packed}, Target, }; impl From for loose::Reference { fn from(value: Reference) -> Self { loose::Reference { name: value.name, target: value.target, } } } impl From for Reference { fn from(value: loose::Reference) -> Self { Reference { name: value.name, target: value.target, peeled: None, } } } impl<'p> From> for Reference { fn from(value: packed::Reference<'p>) -> Self { Reference { name: value.name.into(), target: Target::Peeled(value.target()), peeled: value .object .map(|hex| ObjectId::from_hex(hex).expect("parser validation")), } } } } mod access { use gix_object::bstr::ByteSlice; use crate::{raw::Reference, FullNameRef, Namespace, Target}; impl Reference { /// Returns the kind of reference based on its target pub fn kind(&self) -> crate::Kind { self.target.kind() } /// Return the full validated name of the reference, with the given namespace stripped if possible. /// /// If the reference name wasn't prefixed with `namespace`, `None` is returned instead. pub fn name_without_namespace(&self, namespace: &Namespace) -> Option<&FullNameRef> { self.name .0 .as_bstr() .strip_prefix(namespace.0.as_bytes()) .map(|stripped| FullNameRef::new_unchecked(stripped.as_bstr())) } /// Strip the given namespace from our name as well as the name, but not the reference we point to. pub fn strip_namespace(&mut self, namespace: &Namespace) -> &mut Self { self.name.strip_namespace(namespace); if let Target::Symbolic(name) = &mut self.target { name.strip_namespace(namespace); } self } } } #[cfg(test)] mod tests { use super::*; #[test] fn size_of_reference() { assert_eq!( std::mem::size_of::(), 80, "let's not let it change size undetected" ); } } gix-ref-0.43.0/src/store/file/find.rs000064400000000000000000000362201046102023000154040ustar 00000000000000use std::{ borrow::Cow, io::{self, Read}, path::{Path, PathBuf}, }; pub use error::Error; use crate::{ file, store_impl::{file::loose, packed}, BStr, BString, FullNameRef, PartialName, PartialNameRef, Reference, }; /// ### Finding References - notes about precomposed unicode. /// /// Generally, ref names and the target of symbolic refs are stored as-is if [`Self::precompose_unicode`] is `false`. /// If `true`, refs are stored as precomposed unicode in `packed-refs`, but stored as is on disk as it is then assumed /// to be indifferent, i.e. `"a\u{308}"` is the same as `"ä"`. /// /// This also means that when refs are packed for transmission to another machine, both their names and the target of /// symbolic references need to be precomposed. /// /// Namespaces are left as is as they never get past the particular repository that uses them. impl file::Store { /// Find a single reference by the given `path` which is required to be a valid reference name. /// /// Returns `Ok(None)` if no such ref exists. /// /// ### Note /// /// * The lookup algorithm follows the one in [the git documentation][git-lookup-docs]. /// * The packed buffer is checked for modifications each time the method is called. See [`file::Store::try_find_packed()`] /// for a version with more control. /// /// [git-lookup-docs]: https://github.com/git/git/blob/5d5b1473453400224ebb126bf3947e0a3276bdf5/Documentation/revisions.txt#L34-L46 pub fn try_find<'a, Name, E>(&self, partial: Name) -> Result, Error> where Name: TryInto<&'a PartialNameRef, Error = E>, Error: From, { let packed = self.assure_packed_refs_uptodate()?; self.find_one_with_verified_input(partial.try_into()?, packed.as_ref().map(|b| &***b)) } /// Similar to [`file::Store::find()`] but a non-existing ref is treated as error. /// /// Find only loose references, that is references that aren't in the packed-refs buffer. /// All symbolic references are loose references. /// `HEAD` is always a loose reference. pub fn try_find_loose<'a, Name, E>(&self, partial: Name) -> Result, Error> where Name: TryInto<&'a PartialNameRef, Error = E>, Error: From, { self.find_one_with_verified_input(partial.try_into()?, None) .map(|r| r.map(Into::into)) } /// Similar to [`file::Store::find()`], but allows to pass a snapshotted packed buffer instead. pub fn try_find_packed<'a, Name, E>( &self, partial: Name, packed: Option<&packed::Buffer>, ) -> Result, Error> where Name: TryInto<&'a PartialNameRef, Error = E>, Error: From, { self.find_one_with_verified_input(partial.try_into()?, packed) } pub(crate) fn find_one_with_verified_input( &self, partial_name: &PartialNameRef, packed: Option<&packed::Buffer>, ) -> Result, Error> { fn decompose_if(mut r: Reference, input_changed_to_precomposed: bool) -> Reference { if input_changed_to_precomposed { use gix_object::bstr::ByteSlice; let decomposed = r .name .0 .to_str() .ok() .map(|name| gix_utils::str::decompose(name.into())); if let Some(Cow::Owned(decomposed)) = decomposed { r.name.0 = decomposed.into(); } } r } let mut buf = BString::default(); let mut precomposed_partial_name_storage = packed.filter(|_| self.precompose_unicode).and_then(|_| { use gix_object::bstr::ByteSlice; let precomposed = partial_name.0.to_str().ok()?; let precomposed = gix_utils::str::precompose(precomposed.into()); match precomposed { Cow::Owned(precomposed) => Some(PartialName(precomposed.into())), Cow::Borrowed(_) => None, } }); let precomposed_partial_name = precomposed_partial_name_storage .as_ref() .map(std::convert::AsRef::as_ref); for inbetween in &["", "tags", "heads", "remotes"] { match self.find_inner(inbetween, partial_name, precomposed_partial_name, packed, &mut buf) { Ok(Some(r)) => return Ok(Some(decompose_if(r, precomposed_partial_name.is_some()))), Ok(None) => { continue; } Err(err) => return Err(err), } } if partial_name.as_bstr() != "HEAD" { if let Some(mut precomposed) = precomposed_partial_name_storage { precomposed = precomposed.join("HEAD".into()).expect("HEAD is valid name"); precomposed_partial_name_storage = Some(precomposed); } self.find_inner( "remotes", partial_name .to_owned() .join("HEAD".into()) .expect("HEAD is valid name") .as_ref(), precomposed_partial_name_storage .as_ref() .map(std::convert::AsRef::as_ref), None, &mut buf, ) .map(|res| res.map(|r| decompose_if(r, precomposed_partial_name_storage.is_some()))) } else { Ok(None) } } fn find_inner( &self, inbetween: &str, partial_name: &PartialNameRef, precomposed_partial_name: Option<&PartialNameRef>, packed: Option<&packed::Buffer>, path_buf: &mut BString, ) -> Result, Error> { let full_name = precomposed_partial_name .unwrap_or(partial_name) .construct_full_name_ref(inbetween, path_buf); let content_buf = self.ref_contents(full_name).map_err(|err| Error::ReadFileContents { source: err, path: self.reference_path(full_name), })?; match content_buf { None => { if let Some(packed) = packed { if let Some(full_name) = packed::find::transform_full_name_for_lookup(full_name) { let full_name_backing; let full_name = match &self.namespace { Some(namespace) => { full_name_backing = namespace.to_owned().into_namespaced_name(full_name); full_name_backing.as_ref() } None => full_name, }; if let Some(packed_ref) = packed.try_find_full_name(full_name)? { let mut res: Reference = packed_ref.into(); if let Some(namespace) = &self.namespace { res.strip_namespace(namespace); } return Ok(Some(res)); }; } } Ok(None) } Some(content) => Ok(Some( loose::Reference::try_from_path(full_name.to_owned(), &content) .map(Into::into) .map(|mut r: Reference| { if let Some(namespace) = &self.namespace { r.strip_namespace(namespace); } r }) .map_err(|err| Error::ReferenceCreation { source: err, relative_path: full_name.to_path().to_owned(), })?, )), } } } impl file::Store { pub(crate) fn to_base_dir_and_relative_name<'a>( &self, name: &'a FullNameRef, is_reflog: bool, ) -> (Cow<'_, Path>, &'a FullNameRef) { let commondir = self.common_dir_resolved(); let linked_git_dir = |worktree_name: &BStr| commondir.join("worktrees").join(gix_path::from_bstr(worktree_name)); name.category_and_short_name() .map(|(c, sn)| { use crate::Category::*; let sn = FullNameRef::new_unchecked(sn); match c { LinkedPseudoRef { name: worktree_name } => is_reflog .then(|| (linked_git_dir(worktree_name).into(), sn)) .unwrap_or((commondir.into(), name)), Tag | LocalBranch | RemoteBranch | Note => (commondir.into(), name), MainRef | MainPseudoRef => (commondir.into(), sn), LinkedRef { name: worktree_name } => sn .category() .map_or(false, |cat| cat.is_worktree_private()) .then(|| { if is_reflog { (linked_git_dir(worktree_name).into(), sn) } else { (commondir.into(), name) } }) .unwrap_or((commondir.into(), sn)), PseudoRef | Bisect | Rewritten | WorktreePrivate => (self.git_dir.as_path().into(), name), } }) .unwrap_or((commondir.into(), name)) } /// Implements the logic required to transform a fully qualified refname into a filesystem path pub(crate) fn reference_path_with_base<'b>(&self, name: &'b FullNameRef) -> (Cow<'_, Path>, Cow<'b, Path>) { let (base, name) = self.to_base_dir_and_relative_name(name, false); ( base, match &self.namespace { None => gix_path::to_native_path_on_windows(name.as_bstr()), Some(namespace) => { gix_path::to_native_path_on_windows(namespace.to_owned().into_namespaced_name(name).into_inner()) } }, ) } /// Implements the logic required to transform a fully qualified refname into a filesystem path pub(crate) fn reference_path(&self, name: &FullNameRef) -> PathBuf { let (base, relative_path) = self.reference_path_with_base(name); base.join(relative_path) } /// Read the file contents with a verified full reference path and return it in the given vector if possible. pub(crate) fn ref_contents(&self, name: &FullNameRef) -> io::Result>> { let ref_path = self.reference_path(name); match std::fs::File::open(&ref_path) { Ok(mut file) => { let mut buf = Vec::with_capacity(128); if let Err(err) = file.read_to_end(&mut buf) { return if ref_path.is_dir() { Ok(None) } else { Err(err) }; } Ok(buf.into()) } Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None), #[cfg(windows)] Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => Ok(None), Err(err) => Err(err), } } } /// #[allow(clippy::empty_docs)] pub mod existing { pub use error::Error; use crate::{ file::{self}, store_impl::{ file::{find, loose}, packed, }, PartialNameRef, Reference, }; impl file::Store { /// Similar to [`file::Store::try_find()`] but a non-existing ref is treated as error. pub fn find<'a, Name, E>(&self, partial: Name) -> Result where Name: TryInto<&'a PartialNameRef, Error = E>, crate::name::Error: From, { let packed = self.assure_packed_refs_uptodate().map_err(find::Error::PackedOpen)?; self.find_existing_inner(partial, packed.as_ref().map(|b| &***b)) } /// Similar to [`file::Store::find()`], but supports a stable packed buffer. pub fn find_packed<'a, Name, E>( &self, partial: Name, packed: Option<&packed::Buffer>, ) -> Result where Name: TryInto<&'a PartialNameRef, Error = E>, crate::name::Error: From, { self.find_existing_inner(partial, packed) } /// Similar to [`file::Store::find()`] won't handle packed-refs. pub fn find_loose<'a, Name, E>(&self, partial: Name) -> Result where Name: TryInto<&'a PartialNameRef, Error = E>, crate::name::Error: From, { self.find_existing_inner(partial, None).map(Into::into) } /// Similar to [`file::Store::find()`] but a non-existing ref is treated as error. pub(crate) fn find_existing_inner<'a, Name, E>( &self, partial: Name, packed: Option<&packed::Buffer>, ) -> Result where Name: TryInto<&'a PartialNameRef, Error = E>, crate::name::Error: From, { let path = partial .try_into() .map_err(|err| Error::Find(find::Error::RefnameValidation(err.into())))?; match self.find_one_with_verified_input(path, packed) { Ok(Some(r)) => Ok(r), Ok(None) => Err(Error::NotFound { name: path.to_partial_path().to_owned(), }), Err(err) => Err(err.into()), } } } mod error { use std::path::PathBuf; use crate::store_impl::file::find; /// The error returned by [file::Store::find_existing()][crate::file::Store::find()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("An error occurred while trying to find a reference")] Find(#[from] find::Error), #[error("The ref partially named {name:?} could not be found")] NotFound { name: PathBuf }, } } } mod error { use std::{convert::Infallible, io, path::PathBuf}; use crate::{file, store_impl::packed}; /// The error returned by [file::Store::find()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The ref name or path is not a valid ref name")] RefnameValidation(#[from] crate::name::Error), #[error("The ref file {path:?} could not be read in full")] ReadFileContents { source: io::Error, path: PathBuf }, #[error("The reference at \"{relative_path}\" could not be instantiated")] ReferenceCreation { source: file::loose::reference::decode::Error, relative_path: PathBuf, }, #[error("A packed ref lookup failed")] PackedRef(#[from] packed::find::Error), #[error("Could not open the packed refs buffer when trying to find references.")] PackedOpen(#[from] packed::buffer::open::Error), } impl From for Error { fn from(_: Infallible) -> Self { unreachable!("this impl is needed to allow passing a known valid partial path as parameter") } } } gix-ref-0.43.0/src/store/file/log/iter.rs000064400000000000000000000216571046102023000162200ustar 00000000000000use gix_object::bstr::ByteSlice; use crate::{ file, file::loose::reference::logiter::must_be_io_err, store_impl::file::{log, log::iter::decode::LineNumber}, FullNameRef, }; /// #[allow(clippy::empty_docs)] pub mod decode { use crate::store_impl::file::log; /// The error returned by items in the [forward][super::forward()] and [reverse][super::reverse()] iterators #[derive(Debug)] pub struct Error { inner: log::line::decode::Error, line: LineNumber, } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "In line {}: {}", self.line, self.inner) } } impl std::error::Error for Error {} impl Error { pub(crate) fn new(err: log::line::decode::Error, line: LineNumber) -> Self { Error { line, inner: err } } } #[derive(Debug)] pub(crate) enum LineNumber { FromStart(usize), FromEnd(usize), } impl std::fmt::Display for LineNumber { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let (line, suffix) = match self { LineNumber::FromStart(line) => (line, ""), LineNumber::FromEnd(line) => (line, " from the end"), }; write!(f, "{}{}", line + 1, suffix) } } } /// Returns a forward iterator over the given `lines`, starting from the first line in the file and ending at the last. /// /// Note that `lines` are an entire reflog file. /// /// This iterator is useful when the ref log file is going to be rewritten which forces processing of the entire file. /// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to /// abort or continue. pub fn forward(lines: &[u8]) -> Forward<'_> { Forward { inner: lines.as_bstr().lines().enumerate(), } } /// An iterator yielding parsed lines in a file from start to end, oldest to newest. pub struct Forward<'a> { inner: std::iter::Enumerate>, } impl<'a> Iterator for Forward<'a> { type Item = Result, decode::Error>; fn next(&mut self) -> Option { self.inner.next().map(|(ln, line)| { log::LineRef::from_bytes(line).map_err(|err| decode::Error::new(err, decode::LineNumber::FromStart(ln))) }) } } /// A platform to store a buffer to hold ref log lines for iteration. #[must_use = "Iterators should be obtained from this platform"] pub struct Platform<'a, 's> { /// The store containing the reflogs pub store: &'s file::Store, /// The full name of the reference whose reflog to retrieve. pub name: &'a FullNameRef, /// A reusable buffer for storing log lines read from disk. pub buf: Vec, } impl<'a, 's> Platform<'a, 's> { /// Return a forward iterator over all log-lines, most recent to oldest. pub fn rev(&mut self) -> std::io::Result>> { self.buf.clear(); self.buf.resize(512, 0); self.store .reflog_iter_rev(self.name, &mut self.buf) .map_err(must_be_io_err) } /// Return a forward iterator over all log-lines, oldest to most recent. pub fn all(&mut self) -> std::io::Result>> { self.buf.clear(); self.store.reflog_iter(self.name, &mut self.buf).map_err(must_be_io_err) } } /// An iterator yielding parsed lines in a file in reverse, most recent to oldest. pub struct Reverse<'a, F> { buf: &'a mut [u8], count: usize, read_and_pos: Option<(F, u64)>, last_nl_pos: Option, } /// An iterator over entries of the `log` file in reverse, using `buf` as sliding window. /// /// Note that `buf` must be big enough to capture typical line length or else partial lines will be parsed and probably fail /// in the process. /// /// This iterator is very expensive in terms of I/O operations and shouldn't be used to read more than the last few entries of the log. /// Use a forward iterator instead for these cases. /// /// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to /// abort or continue. pub fn reverse(mut log: F, buf: &mut [u8]) -> std::io::Result> where F: std::io::Read + std::io::Seek, { let pos = log.seek(std::io::SeekFrom::End(0))?; if buf.is_empty() { return Err(std::io::Error::new( std::io::ErrorKind::Other, "Zero sized buffers are not allowed, use 256 bytes or more for typical logs", )); } Ok(Reverse { buf, count: 0, read_and_pos: Some((log, pos)), last_nl_pos: None, }) } /// #[allow(clippy::empty_docs)] pub mod reverse { use super::decode; /// The error returned by the [`Reverse`][super::Reverse] iterator #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The buffer could not be filled to make more lines available")] Io(#[from] std::io::Error), #[error("Could not decode log line")] Decode(#[from] decode::Error), } } impl<'a, F> Iterator for Reverse<'a, F> where F: std::io::Read + std::io::Seek, { type Item = Result; fn next(&mut self) -> Option { match (self.last_nl_pos.take(), self.read_and_pos.take()) { // Initial state - load first data block (None, Some((mut read, pos))) => { let npos = pos.saturating_sub(self.buf.len() as u64); if let Err(err) = read.seek(std::io::SeekFrom::Start(npos)) { return Some(Err(err.into())); } let n = (pos - npos) as usize; if n == 0 { return None; } let buf = &mut self.buf[..n]; if let Err(err) = read.read_exact(buf) { return Some(Err(err.into())); }; let last_byte = *buf.last().expect("we have read non-zero bytes before"); self.last_nl_pos = Some(if last_byte != b'\n' { buf.len() } else { buf.len() - 1 }); self.read_and_pos = Some((read, npos)); self.next() } // Has data block and can extract lines from it, load new blocks as needed (Some(end), Some(read_and_pos)) => match self.buf[..end].rfind_byte(b'\n') { Some(start) => { self.read_and_pos = Some(read_and_pos); self.last_nl_pos = Some(start); let buf = &self.buf[start + 1..end]; let res = Some( log::LineRef::from_bytes(buf) .map_err(|err| { reverse::Error::Decode(decode::Error::new(err, LineNumber::FromEnd(self.count))) }) .map(Into::into), ); self.count += 1; res } None => { let (mut read, last_read_pos) = read_and_pos; if last_read_pos == 0 { let buf = &self.buf[..end]; Some( log::LineRef::from_bytes(buf) .map_err(|err| { reverse::Error::Decode(decode::Error::new(err, LineNumber::FromEnd(self.count))) }) .map(Into::into), ) } else { let npos = last_read_pos.saturating_sub((self.buf.len() - end) as u64); if npos == last_read_pos { return Some(Err(std::io::Error::new( std::io::ErrorKind::Other, "buffer too small for line size", ) .into())); } let n = (last_read_pos - npos) as usize; self.buf.copy_within(0..end, n); if let Err(err) = read.seek(std::io::SeekFrom::Start(npos)) { return Some(Err(err.into())); } if let Err(err) = read.read_exact(&mut self.buf[..n]) { return Some(Err(err.into())); } self.read_and_pos = Some((read, npos)); self.last_nl_pos = Some(n + end); self.next() } } }, // depleted (None, None) => None, (Some(_), None) => unreachable!("BUG: Invalid state: we never discard only our file, always both."), } } } gix-ref-0.43.0/src/store/file/log/line.rs000064400000000000000000000255401046102023000161770ustar 00000000000000use gix_hash::ObjectId; use crate::{log::Line, store_impl::file::log::LineRef}; impl<'a> LineRef<'a> { /// Convert this instance into its mutable counterpart pub fn to_owned(&self) -> Line { self.clone().into() } } mod write { use std::io; use gix_object::bstr::{BStr, ByteSlice}; use crate::log::Line; /// The Error produced by [`Line::write_to()`] (but wrapped in an io error). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] enum Error { #[error("Messages must not contain newlines\\n")] IllegalCharacter, } impl From for io::Error { fn from(err: Error) -> Self { io::Error::new(io::ErrorKind::Other, err) } } /// Output impl Line { /// Serialize this instance to `out` in the git serialization format for ref log lines. pub fn write_to(&self, out: &mut dyn io::Write) -> io::Result<()> { write!(out, "{} {} ", self.previous_oid, self.new_oid)?; self.signature.write_to(out)?; writeln!(out, "\t{}", check_newlines(self.message.as_ref())?) } } fn check_newlines(input: &BStr) -> Result<&BStr, Error> { if input.find_byte(b'\n').is_some() { return Err(Error::IllegalCharacter); } Ok(input) } } impl<'a> LineRef<'a> { /// The previous object id of the ref. It will be a null hash if there was no previous id as /// this ref is being created. pub fn previous_oid(&self) -> ObjectId { ObjectId::from_hex(self.previous_oid).expect("parse validation") } /// The new object id of the ref, or a null hash if it is removed. pub fn new_oid(&self) -> ObjectId { ObjectId::from_hex(self.new_oid).expect("parse validation") } } impl<'a> From> for Line { fn from(v: LineRef<'a>) -> Self { Line { previous_oid: v.previous_oid(), new_oid: v.new_oid(), signature: v.signature.into(), message: v.message.into(), } } } /// #[allow(clippy::empty_docs)] pub mod decode { use gix_object::bstr::{BStr, ByteSlice}; use winnow::{ combinator::{alt, eof, fail, opt, preceded, rest, terminated}, error::{AddContext, ParserError, StrContext}, prelude::*, token::take_while, }; use crate::{file::log::LineRef, parse::hex_hash}; /// #[allow(clippy::empty_docs)] mod error { use gix_object::bstr::{BString, ByteSlice}; /// The error returned by [`from_bytes(…)`][super::Line::from_bytes()] #[derive(Debug)] pub struct Error { pub input: BString, } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "{:?} did not match ' <> \\t'", self.input ) } } impl std::error::Error for Error {} impl Error { pub(crate) fn new(input: &[u8]) -> Self { Error { input: input.as_bstr().to_owned(), } } } } pub use error::Error; impl<'a> LineRef<'a> { /// Decode a line from the given bytes which are expected to start at a hex sha. pub fn from_bytes(mut input: &'a [u8]) -> Result, Error> { one::<()>(&mut input).map_err(|_| Error::new(input)) } } fn message<'a, E: ParserError<&'a [u8]>>(i: &mut &'a [u8]) -> PResult<&'a BStr, E> { if i.is_empty() { rest.map(ByteSlice::as_bstr).parse_next(i) } else { terminated(take_while(0.., |c| c != b'\n'), opt(b'\n')) .map(ByteSlice::as_bstr) .parse_next(i) } } fn one<'a, E: ParserError<&'a [u8]> + AddContext<&'a [u8], StrContext>>( bytes: &mut &'a [u8], ) -> PResult, E> { ( ( terminated(hex_hash, b" ").context(StrContext::Expected("".into())), terminated(hex_hash, b" ").context(StrContext::Expected("".into())), gix_actor::signature::decode.context(StrContext::Expected(" <> ".into())), ) .context(StrContext::Expected( " <> \\t".into(), )), alt(( preceded( b'\t', message.context(StrContext::Expected("".into())), ), b'\n'.value(Default::default()), eof.value(Default::default()), fail.context(StrContext::Expected( "log message must be separated from signature with whitespace".into(), )), )), ) .map(|((old, new, signature), message)| LineRef { previous_oid: old, new_oid: new, signature, message, }) .parse_next(bytes) } #[cfg(test)] mod test { use super::*; use gix_date::{time::Sign, Time}; /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_. fn hex_to_oid(hex: &str) -> gix_hash::ObjectId { gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex") } fn with_newline(mut v: Vec) -> Vec { v.push(b'\n'); v } mod invalid { use gix_testtools::to_bstr_err; use winnow::{error::TreeError, prelude::*}; use super::one; #[test] fn completely_bogus_shows_error_with_context() { let err = one::> .parse_peek(b"definitely not a log entry") .map_err(to_bstr_err) .expect_err("this should fail"); assert!(err.to_string().contains(" ")); } #[test] fn missing_whitespace_between_signature_and_message() { let line = "0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 one 1234567890 -0000message"; let err = one::> .parse_peek(line.as_bytes()) .map_err(to_bstr_err) .expect_err("this should fail"); assert!( err.to_string() .contains("log message must be separated from signature with whitespace"), "expected\n `log message must be separated from signature with whitespace`\nin\n```\n{err}\n```" ); } } const NULL_SHA1: &[u8] = b"0000000000000000000000000000000000000000"; #[test] fn entry_with_empty_message() { let line_without_nl: Vec<_> = b"0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 name 1234567890 -0000".to_vec(); let line_with_nl = with_newline(line_without_nl.clone()); for input in &[line_without_nl, line_with_nl] { assert_eq!( one::> .parse_peek(input) .expect("successful parsing") .1, LineRef { previous_oid: NULL_SHA1.as_bstr(), new_oid: NULL_SHA1.as_bstr(), signature: gix_actor::SignatureRef { name: b"name".as_bstr(), email: b"foo@example.com".as_bstr(), time: Time { seconds: 1234567890, offset: 0, sign: Sign::Minus } }, message: b"".as_bstr(), } ); } } #[test] fn entry_with_message_without_newline_and_with_newline() { let line_without_nl: Vec<_> = b"a5828ae6b52137b913b978e16cd2334482eb4c1f 89b43f80a514aee58b662ad606e6352e03eaeee4 Sebastian Thiel 1618030561 +0800\tpull --ff-only: Fast-forward".to_vec(); let line_with_nl = with_newline(line_without_nl.clone()); for input in &[line_without_nl, line_with_nl] { let (remaining, res) = one::> .parse_peek(input) .expect("successful parsing"); assert!(remaining.is_empty(), "all consuming even without trailing newline"); let actual = LineRef { previous_oid: b"a5828ae6b52137b913b978e16cd2334482eb4c1f".as_bstr(), new_oid: b"89b43f80a514aee58b662ad606e6352e03eaeee4".as_bstr(), signature: gix_actor::SignatureRef { name: b"Sebastian Thiel".as_bstr(), email: b"foo@example.com".as_bstr(), time: Time { seconds: 1618030561, offset: 28800, sign: Sign::Plus, }, }, message: b"pull --ff-only: Fast-forward".as_bstr(), }; assert_eq!(res, actual); assert_eq!( actual.previous_oid(), hex_to_oid("a5828ae6b52137b913b978e16cd2334482eb4c1f") ); assert_eq!(actual.new_oid(), hex_to_oid("89b43f80a514aee58b662ad606e6352e03eaeee4")); } } #[test] fn two_lines_in_a_row_with_and_without_newline() { let lines = b"0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 one 1234567890 -0000\t\n0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 two 1234567890 -0000\thello"; let (remainder, parsed) = one::> .parse_peek(lines) .expect("parse single line"); assert_eq!(parsed.message, b"".as_bstr(), "first message is empty"); let (remainder, parsed) = one::> .parse_peek(remainder) .expect("parse single line"); assert_eq!( parsed.message, b"hello".as_bstr(), "second message is not and contains no newline" ); assert!(remainder.is_empty()); } } } gix-ref-0.43.0/src/store/file/log/mod.rs000064400000000000000000000015711046102023000160250ustar 00000000000000use gix_object::bstr::BStr; pub use super::loose::reflog::{create_or_update, Error}; /// #[allow(clippy::empty_docs)] pub mod iter; mod line; /// A parsed ref log line. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[non_exhaustive] pub struct LineRef<'a> { /// The previous object id in hexadecimal. Use [`LineRef::previous_oid()`] to get a more usable form. pub previous_oid: &'a BStr, /// The new object id in hexadecimal. Use [`LineRef::new_oid()`] to get a more usable form. pub new_oid: &'a BStr, /// The signature of the currently configured committer. #[cfg_attr(feature = "serde", serde(borrow))] pub signature: gix_actor::SignatureRef<'a>, /// The message providing details about the operation performed in this log line. pub message: &'a BStr, } gix-ref-0.43.0/src/store/file/loose/iter.rs000064400000000000000000000100671046102023000165510ustar 00000000000000use std::path::{Path, PathBuf}; use gix_features::fs::walkdir::DirEntryIter; use gix_object::bstr::ByteSlice; use crate::{file::iter::LooseThenPacked, store_impl::file, BString, FullName}; /// An iterator over all valid loose reference paths as seen from a particular base directory. pub(in crate::store_impl::file) struct SortedLoosePaths { pub(crate) base: PathBuf, filename_prefix: Option, file_walk: Option, } impl SortedLoosePaths { pub fn at(path: &Path, base: PathBuf, filename_prefix: Option, precompose_unicode: bool) -> Self { SortedLoosePaths { base, filename_prefix, file_walk: path.is_dir().then(|| { // serial iteration as we expect most refs in packed-refs anyway. gix_features::fs::walkdir_sorted_new( path, gix_features::fs::walkdir::Parallelism::Serial, precompose_unicode, ) .into_iter() }), } } } impl Iterator for SortedLoosePaths { type Item = std::io::Result<(PathBuf, FullName)>; fn next(&mut self) -> Option { for entry in self.file_walk.as_mut()?.by_ref() { match entry { Ok(entry) => { if !entry.file_type().map_or(false, |ft| ft.is_file()) { continue; } let full_path = entry.path().into_owned(); if let Some((prefix, name)) = self .filename_prefix .as_deref() .and_then(|prefix| full_path.file_name().map(|name| (prefix, name))) { match gix_path::os_str_into_bstr(name) { Ok(name) => { if !name.starts_with(prefix) { continue; } } Err(_) => continue, // TODO: silently skipping ill-formed UTF-8 on windows - maybe this can be better? } } let full_name = full_path .strip_prefix(&self.base) .expect("prefix-stripping cannot fail as prefix is our root"); let full_name = match gix_path::try_into_bstr(full_name) { Ok(name) => { let name = gix_path::to_unix_separators_on_windows(name); name.into_owned() } Err(_) => continue, // TODO: silently skipping ill-formed UTF-8 on windows here, maybe there are better ways? }; if gix_validate::reference::name_partial(full_name.as_bstr()).is_ok() { let name = FullName(full_name); return Some(Ok((full_path, name))); } else { continue; } } Err(err) => return Some(Err(err.into_io_error().expect("no symlink related errors"))), } } None } } impl file::Store { /// Return an iterator over all loose references, notably not including any packed ones, in lexical order. /// Each of the references may fail to parse and the iterator will not stop if parsing fails, allowing the caller /// to see all files that look like references whether valid or not. /// /// Reference files that do not constitute valid names will be silently ignored. pub fn loose_iter(&self) -> std::io::Result> { self.iter_packed(None) } /// Return an iterator over all loose references that start with the given `prefix`. /// /// Otherwise it's similar to [`loose_iter()`][file::Store::loose_iter()]. pub fn loose_iter_prefixed(&self, prefix: &Path) -> std::io::Result> { self.iter_prefixed_packed(prefix, None) } } gix-ref-0.43.0/src/store/file/loose/mod.rs000064400000000000000000000055531046102023000163710ustar 00000000000000use crate::{FullName, Kind, Target}; /// A git _ref_ which is stored in a file. #[derive(Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone)] pub struct Reference { /// The path to uniquely identify this ref within its store. pub name: FullName, /// The target of the reference, either a symbolic reference by full name or an object by its id. pub target: Target, } impl Reference { /// Return the kind of ref. pub fn kind(&self) -> Kind { self.target.kind() } } /// #[allow(clippy::empty_docs)] pub(crate) mod reflog; /// #[allow(clippy::empty_docs)] pub(crate) mod iter; /// #[allow(clippy::empty_docs)] pub mod reference; mod init { use std::path::PathBuf; use crate::store_impl::file; impl file::Store { /// Create a new instance at the given `git_dir`, which commonly is a standard git repository with a /// `refs/` subdirectory. /// The `object_hash` defines which kind of hash we should recognize. /// /// Note that if `precompose_unicode` is set, the `git_dir` is also expected to use precomposed unicode, /// or else some operations that strip prefixes will fail. pub fn at( git_dir: PathBuf, write_reflog: file::WriteReflog, object_hash: gix_hash::Kind, precompose_unicode: bool, ) -> Self { file::Store { git_dir, packed_buffer_mmap_threshold: packed_refs_mmap_threshold(), common_dir: None, write_reflog, namespace: None, packed: gix_fs::SharedFileSnapshotMut::new().into(), object_hash, precompose_unicode, } } /// Like [`at()`][file::Store::at()], but for _linked_ work-trees which use `git_dir` as private ref store and `common_dir` for /// shared references. /// /// Note that if `precompose_unicode` is set, the `git_dir` and `common_dir` are also expected to use precomposed unicode, /// or else some operations that strip prefixes will fail. pub fn for_linked_worktree( git_dir: PathBuf, common_dir: PathBuf, write_reflog: file::WriteReflog, object_hash: gix_hash::Kind, precompose_unicode: bool, ) -> Self { file::Store { git_dir, packed_buffer_mmap_threshold: packed_refs_mmap_threshold(), common_dir: Some(common_dir), write_reflog, namespace: None, packed: gix_fs::SharedFileSnapshotMut::new().into(), object_hash, precompose_unicode, } } } fn packed_refs_mmap_threshold() -> u64 { if cfg!(windows) { u64::MAX } else { 32 * 1024 } } } gix-ref-0.43.0/src/store/file/loose/reference/decode.rs000064400000000000000000000046711046102023000207730ustar 00000000000000use gix_hash::ObjectId; use gix_object::bstr::BString; use winnow::{ combinator::{opt, terminated}, prelude::*, token::take_while, }; use crate::{ parse::{hex_hash, newline}, store_impl::file::loose::Reference, FullName, Target, }; enum MaybeUnsafeState { Id(ObjectId), UnvalidatedPath(BString), } /// The error returned by [`Reference::try_from_path()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("{content:?} could not be parsed")] Parse { content: BString }, #[error("The path {path:?} to a symbolic reference within a ref file is invalid")] RefnameValidation { source: gix_validate::reference::name::Error, path: BString, }, } impl TryFrom for Target { type Error = Error; fn try_from(v: MaybeUnsafeState) -> Result { Ok(match v { MaybeUnsafeState::Id(id) => Target::Peeled(id), MaybeUnsafeState::UnvalidatedPath(name) => { Target::Symbolic(match gix_validate::reference::name(name.as_ref()) { Ok(_) => FullName(name), Err(err) => { return Err(Error::RefnameValidation { source: err, path: name, }) } }) } }) } } impl Reference { /// Create a new reference of the given `parent` store with `relative_path` service as unique identifier /// at which the `path_contents` was read to obtain the refs value. pub fn try_from_path(name: FullName, mut path_contents: &[u8]) -> Result { Ok(Reference { name, target: parse(&mut path_contents) .map_err(|_| Error::Parse { content: path_contents.into(), })? .try_into()?, }) } } fn parse(i: &mut &[u8]) -> PResult { if let Some(_ref_prefix) = opt(terminated("ref: ", take_while(0.., b' '))).parse_next(i)? { terminated(take_while(0.., |b| b != b'\r' && b != b'\n'), opt(newline)) .map(|path| MaybeUnsafeState::UnvalidatedPath(path.into())) .parse_next(i) } else { terminated(hex_hash, opt(newline)) .map(|hex| MaybeUnsafeState::Id(ObjectId::from_hex(hex).expect("prior validation"))) .parse_next(i) } } gix-ref-0.43.0/src/store/file/loose/reference/logiter.rs000064400000000000000000000037711046102023000212150ustar 00000000000000use crate::store_impl::{ file, file::{log, loose, loose::Reference}, }; pub(crate) fn must_be_io_err(err: loose::reflog::Error) -> std::io::Error { match err { loose::reflog::Error::Io(err) => err, loose::reflog::Error::RefnameValidation(_) => unreachable!("we are called from a valid ref"), } } impl Reference { /// Returns true if a reflog exists in the given `store`. /// /// Please note that this method shouldn't be used to check if a log exists before trying to read it, but instead /// is meant to be the fastest possible way to determine if a log exists or not. /// If the caller needs to know if it's readable, try to read the log instead with a reverse or forward iterator. pub fn log_exists(&self, store: &file::Store) -> bool { store .reflog_exists(self.name.as_ref()) .expect("name conversion infallible") } /// Return a reflog reverse iterator for this ref, reading chunks from the back into the fixed buffer `buf`, in the given `store`. /// /// The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back. /// Return `Ok(None)` if no reflog exists. pub fn log_iter_rev<'b>( &self, store: &file::Store, buf: &'b mut [u8], ) -> std::io::Result>> { store.reflog_iter_rev(self.name.as_ref(), buf).map_err(must_be_io_err) } /// Return a reflog forward iterator for this ref and write its file contents into `buf`, in the given `store`. /// /// The iterator will traverse log entries from oldest to newest. /// Return `Ok(None)` if no reflog exists. pub fn log_iter<'a, 'b: 'a>( &'a self, store: &file::Store, buf: &'b mut Vec, ) -> std::io::Result, log::iter::decode::Error>> + 'a>> { store.reflog_iter(self.name.as_ref(), buf).map_err(must_be_io_err) } } gix-ref-0.43.0/src/store/file/loose/reference/mod.rs000064400000000000000000000001121046102023000203110ustar 00000000000000pub(crate) mod logiter; /// #[allow(clippy::empty_docs)] pub mod decode; gix-ref-0.43.0/src/store/file/loose/reflog/create_or_update/tests.rs000064400000000000000000000130371046102023000235330ustar 00000000000000use gix_actor::Signature; use gix_date::{time::Sign, Time}; use gix_object::bstr::ByteSlice; use gix_testtools::tempfile::TempDir; use super::*; type Result = std::result::Result>; /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_. fn hex_to_id(hex: &str) -> gix_hash::ObjectId { gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex") } fn empty_store(writemode: WriteReflog) -> Result<(TempDir, file::Store)> { let dir = TempDir::new()?; let store = file::Store::at(dir.path().into(), writemode, gix_hash::Kind::Sha1, false); Ok((dir, store)) } fn reflog_lines(store: &file::Store, name: &str, buf: &mut Vec) -> Result> { store .reflog_iter(name, buf)? .expect("existing reflog") .map(|l| l.map(crate::log::Line::from)) .collect::, _>>() .map_err(Into::into) } const WRITE_MODES: &[WriteReflog] = &[WriteReflog::Normal, WriteReflog::Disable, WriteReflog::Always]; #[test] fn should_autocreate_is_unaffected_by_writemode() -> Result { let (_keep, store) = empty_store(WriteReflog::Disable)?; for should_create_name in &["HEAD", "refs/heads/main", "refs/remotes/any", "refs/notes/any"] { assert!(store.should_autocreate_reflog(Path::new(should_create_name))); } for should_not_create_name in &["FETCH_HEAD", "SOMETHING", "refs/special/this", "refs/tags/0.1.0"] { assert!(!store.should_autocreate_reflog(Path::new(should_not_create_name))); } Ok(()) } #[test] fn missing_reflog_creates_it_even_if_similarly_named_empty_dir_exists_and_append_log_lines() -> Result { for mode in WRITE_MODES { let (_keep, store) = empty_store(*mode)?; let full_name_str = "refs/heads/main"; let full_name: &FullNameRef = full_name_str.try_into()?; let new = hex_to_id("28ce6a8b26aa170e1de65536fe8abe1832bd3242"); let committer = Signature { name: "committer".into(), email: "committer@example.com".into(), time: Time { seconds: 1234, offset: 1800, sign: Sign::Plus, }, }; store.reflog_create_or_append( full_name, None, &new, committer.to_ref().into(), b"the message".as_bstr(), false, )?; let mut buf = Vec::new(); match mode { WriteReflog::Normal | WriteReflog::Always => { assert_eq!( reflog_lines(&store, full_name_str, &mut buf)?, vec![crate::log::Line { previous_oid: gix_hash::Kind::Sha1.null(), new_oid: new, signature: committer.clone(), message: "the message".into() }] ); let previous = hex_to_id("0000000000000000000000111111111111111111"); store.reflog_create_or_append( full_name, Some(previous), &new, committer.to_ref().into(), b"next message".as_bstr(), false, )?; let lines = reflog_lines(&store, full_name_str, &mut buf)?; assert_eq!(lines.len(), 2, "now there is another line"); assert_eq!( lines.last().expect("non-empty"), &crate::log::Line { previous_oid: previous, new_oid: new, signature: committer.clone(), message: "next message".into() } ); } WriteReflog::Disable => { assert!( store.reflog_iter(full_name, &mut buf)?.is_none(), "there is no logs in disabled mode" ); } }; // create onto existing directory let full_name_str = "refs/heads/other"; let full_name: &FullNameRef = full_name_str.try_into()?; let reflog_path = store.reflog_path(full_name_str.try_into().expect("valid")); let directory_in_place_of_reflog = reflog_path.join("empty-a").join("empty-b"); std::fs::create_dir_all(directory_in_place_of_reflog)?; store.reflog_create_or_append( full_name, None, &new, committer.to_ref().into(), b"more complicated reflog creation".as_bstr(), false, )?; match mode { WriteReflog::Normal | WriteReflog::Always => { assert_eq!( reflog_lines(&store, full_name_str, &mut buf)?.len(), 1, "reflog was written despite directory" ); assert!( reflog_path.is_file(), "the empty directory was replaced with the reflog file" ); } WriteReflog::Disable => { assert!( store.reflog_iter(full_name_str, &mut buf)?.is_none(), "reflog still doesn't exist" ); assert!( store.reflog_iter_rev(full_name_str, &mut buf)?.is_none(), "reflog still doesn't exist" ); assert!(reflog_path.is_dir(), "reflog directory wasn't touched"); } } } Ok(()) } gix-ref-0.43.0/src/store/file/loose/reflog.rs000064400000000000000000000231531046102023000170640ustar 00000000000000use std::{io::Read, path::PathBuf}; use crate::{ store_impl::{file, file::log}, FullNameRef, }; impl file::Store { /// Returns true if a reflog exists for the given reference `name`. /// /// Please note that this method shouldn't be used to check if a log exists before trying to read it, but instead /// is meant to be the fastest possible way to determine if a log exists or not. /// If the caller needs to know if it's readable, try to read the log instead with a reverse or forward iterator. pub fn reflog_exists<'a, Name, E>(&self, name: Name) -> Result where Name: TryInto<&'a FullNameRef, Error = E>, crate::name::Error: From, { Ok(self.reflog_path(name.try_into()?).is_file()) } /// Return a reflog reverse iterator for the given fully qualified `name`, reading chunks from the back into the fixed buffer `buf`. /// /// The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back. /// Return `Ok(None)` if no reflog exists. pub fn reflog_iter_rev<'a, 'b, Name, E>( &self, name: Name, buf: &'b mut [u8], ) -> Result>, Error> where Name: TryInto<&'a FullNameRef, Error = E>, crate::name::Error: From, { let name: &FullNameRef = name.try_into().map_err(|err| Error::RefnameValidation(err.into()))?; let path = self.reflog_path(name); if path.is_dir() { return Ok(None); } match std::fs::File::open(&path) { Ok(file) => Ok(Some(log::iter::reverse(file, buf)?)), Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), Err(err) => Err(err.into()), } } /// Return a reflog forward iterator for the given fully qualified `name` and write its file contents into `buf`. /// /// The iterator will traverse log entries from oldest to newest. /// Return `Ok(None)` if no reflog exists. pub fn reflog_iter<'a, 'b, Name, E>( &self, name: Name, buf: &'b mut Vec, ) -> Result>, Error> where Name: TryInto<&'a FullNameRef, Error = E>, crate::name::Error: From, { let name: &FullNameRef = name.try_into().map_err(|err| Error::RefnameValidation(err.into()))?; let path = self.reflog_path(name); match std::fs::File::open(&path) { Ok(mut file) => { buf.clear(); if let Err(err) = file.read_to_end(buf) { return if path.is_dir() { Ok(None) } else { Err(err.into()) }; } Ok(Some(log::iter::forward(buf))) } Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), #[cfg(windows)] Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => Ok(None), Err(err) => Err(err.into()), } } } impl file::Store { /// Implements the logic required to transform a fully qualified refname into its log name pub(crate) fn reflog_path(&self, name: &FullNameRef) -> PathBuf { let (base, rela_path) = self.reflog_base_and_relative_path(name); base.join(rela_path) } } /// #[allow(clippy::empty_docs)] pub mod create_or_update { use std::{ borrow::Cow, io::Write, path::{Path, PathBuf}, }; use gix_hash::{oid, ObjectId}; use gix_object::bstr::BStr; use crate::store_impl::{file, file::WriteReflog}; impl file::Store { #[allow(clippy::too_many_arguments)] pub(crate) fn reflog_create_or_append( &self, name: &FullNameRef, previous_oid: Option, new: &oid, committer: Option>, message: &BStr, mut force_create_reflog: bool, ) -> Result<(), Error> { let (reflog_base, full_name) = self.reflog_base_and_relative_path(name); match self.write_reflog { WriteReflog::Normal | WriteReflog::Always => { if self.write_reflog == WriteReflog::Always { force_create_reflog = true; } let mut options = std::fs::OpenOptions::new(); options.append(true).read(false); let log_path = reflog_base.join(&full_name); if force_create_reflog || self.should_autocreate_reflog(&full_name) { let parent_dir = log_path.parent().expect("always with parent directory"); gix_tempfile::create_dir::all(parent_dir, Default::default()).map_err(|err| { Error::CreateLeadingDirectories { source: err, reflog_directory: parent_dir.to_owned(), } })?; options.create(true); }; let file_for_appending = match options.open(&log_path) { Ok(f) => Some(f), Err(err) if err.kind() == std::io::ErrorKind::NotFound => None, Err(err) => { // TODO: when Kind::IsADirectory becomes stable, use that. if log_path.is_dir() { gix_tempfile::remove_dir::empty_depth_first(log_path.clone()) .and_then(|_| options.open(&log_path)) .map(Some) .map_err(|_| Error::Append { source: err, reflog_path: self.reflog_path(name), })? } else { return Err(Error::Append { source: err, reflog_path: log_path, }); } } }; if let Some(mut file) = file_for_appending { let committer = committer.ok_or(Error::MissingCommitter)?; write!(file, "{} {} ", previous_oid.unwrap_or_else(|| new.kind().null()), new) .and_then(|_| committer.write_to(&mut file)) .and_then(|_| { if !message.is_empty() { writeln!(file, "\t{message}") } else { writeln!(file) } }) .map_err(|err| Error::Append { source: err, reflog_path: self.reflog_path(name), })?; } Ok(()) } WriteReflog::Disable => Ok(()), } } fn should_autocreate_reflog(&self, full_name: &Path) -> bool { full_name.starts_with("refs/heads/") || full_name.starts_with("refs/remotes/") || full_name.starts_with("refs/notes/") || full_name.starts_with("refs/worktree/") // NOTE: git does not write reflogs for worktree private refs || full_name == Path::new("HEAD") } /// Returns the base paths for all reflogs pub(in crate::store_impl::file) fn reflog_base_and_relative_path<'a>( &self, name: &'a FullNameRef, ) -> (PathBuf, Cow<'a, Path>) { let is_reflog = true; let (base, name) = self.to_base_dir_and_relative_name(name, is_reflog); ( base.join("logs"), match &self.namespace { None => gix_path::to_native_path_on_windows(name.as_bstr()), Some(namespace) => gix_path::to_native_path_on_windows( namespace.to_owned().into_namespaced_name(name).into_inner(), ), }, ) } } #[cfg(test)] mod tests; mod error { use std::path::PathBuf; /// The error returned when creating or appending to a reflog #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could create one or more directories in {reflog_directory:?} to contain reflog file")] CreateLeadingDirectories { source: std::io::Error, reflog_directory: PathBuf, }, #[error("Could not open reflog file at {reflog_path:?} for appending")] Append { source: std::io::Error, reflog_path: PathBuf, }, #[error("reflog message must not contain newlines")] MessageWithNewlines, #[error("reflog messages need a committer which isn't set")] MissingCommitter, } } pub use error::Error; use crate::FullNameRef; } mod error { /// The error returned by [`crate::file::Store::reflog_iter()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The reflog name or path is not a valid ref name")] RefnameValidation(#[from] crate::name::Error), #[error("The reflog file could not read")] Io(#[from] std::io::Error), } } pub use error::Error; gix-ref-0.43.0/src/store/file/mod.rs000064400000000000000000000110251046102023000152370ustar 00000000000000use std::{ borrow::Cow, path::{Path, PathBuf}, }; use crate::{bstr::BStr, store::WriteReflog, Namespace}; /// A store for reference which uses plain files. /// /// Each ref is represented as a single file on disk in a folder structure that follows the relative path /// used to identify [references][crate::Reference]. #[derive(Debug, Clone)] pub struct Store { /// The location at which loose references can be found as per conventions of a typical git repository. /// /// Typical base paths are `.git` repository folders. git_dir: PathBuf, /// Possibly the common directory at which to find shared references. Only set if this `Store` is for a work tree. common_dir: Option, /// The kind of hash to assume in a couple of situations. Note that currently we are able to read any valid hash from files /// which might want to change one day. object_hash: gix_hash::Kind, /// The amount of bytes needed for `mmap` to be used to open packed refs. packed_buffer_mmap_threshold: u64, /// The way to handle reflog edits pub write_reflog: WriteReflog, /// The namespace to use for edits and reads pub namespace: Option, /// If set, we will convert decomposed unicode like `a\u308` into precomposed unicode like `ä` when reading /// ref names from disk. /// Note that this is an internal operation that isn't observable on the outside, but it's needed for lookups /// to packed-refs or symlinks to work correctly. /// Iterated references will be returned verbatim, thus when sending them over the wire they have to be precomposed /// as needed. pub precompose_unicode: bool, /// A packed buffer which can be mapped in one version and shared as such. /// It's updated only in one spot, which is prior to reading it based on file stamps. /// Doing it like this has the benefit of being able to hand snapshots out to people without blocking others from updating it. packed: packed::modifiable::MutableSharedBuffer, } mod access { use std::path::Path; /// Mutation impl file::Store { /// Set the amount of `bytes` needed for the `.git/packed-refs` file to be memory mapped. /// Returns the previous value, which is always 32KB. pub fn set_packed_buffer_mmap_threshold(&mut self, mut bytes: u64) -> u64 { std::mem::swap(&mut self.packed_buffer_mmap_threshold, &mut bytes); bytes } } use crate::file; /// Access impl file::Store { /// Return the `.git` directory at which all references are loaded. /// /// For worktrees, this is the linked work-tree private ref location, /// then [`common_dir()`][file::Store::common_dir()] is `Some(parent_git_dir)`. pub fn git_dir(&self) -> &Path { &self.git_dir } /// If this is a linked work tree, there will be `Some(git_dir)` pointing to the parent repository, /// while [`git_dir()`][file::Store::git_dir()] points to the location holding linked work-tree private references. pub fn common_dir(&self) -> Option<&Path> { self.common_dir.as_deref() } /// Similar to [`common_dir()`][file::Store::common_dir()], but it will produce either the common-dir, or the git-dir if the former /// isn't present. /// /// This is also the directory in which the packed references file would be placed. pub fn common_dir_resolved(&self) -> &Path { self.common_dir.as_deref().unwrap_or(&self.git_dir) } } } /// A transaction on a file store pub struct Transaction<'s, 'p> { store: &'s Store, packed_transaction: Option, updates: Option>, packed_refs: transaction::PackedRefs<'p>, } pub(in crate::store_impl::file) fn path_to_name<'a>(path: impl Into>) -> Cow<'a, BStr> { let path = gix_path::into_bstr(path.into()); gix_path::to_unix_separators_on_windows(path) } /// #[allow(clippy::empty_docs)] pub mod loose; mod overlay_iter; /// #[allow(clippy::empty_docs)] pub mod iter { pub use super::overlay_iter::{LooseThenPacked, Platform}; /// #[allow(clippy::empty_docs)] pub mod loose_then_packed { pub use super::super::overlay_iter::Error; } } /// #[allow(clippy::empty_docs)] pub mod log; /// #[allow(clippy::empty_docs)] pub mod find; /// #[allow(clippy::empty_docs)] pub mod transaction; /// #[allow(clippy::empty_docs)] pub mod packed; mod raw_ext; pub use raw_ext::ReferenceExt; gix-ref-0.43.0/src/store/file/overlay_iter.rs000064400000000000000000000423101046102023000171650ustar 00000000000000use std::{ borrow::Cow, cmp::Ordering, io::Read, iter::Peekable, path::{Path, PathBuf}, }; use crate::{ file::{loose, loose::iter::SortedLoosePaths, path_to_name}, store_impl::{file, packed}, BString, FullName, Namespace, Reference, }; /// An iterator stepping through sorted input of loose references and packed references, preferring loose refs over otherwise /// equivalent packed references. /// /// All errors will be returned verbatim, while packed errors are depleted first if loose refs also error. pub struct LooseThenPacked<'p, 's> { git_dir: &'s Path, common_dir: Option<&'s Path>, namespace: Option<&'s Namespace>, iter_packed: Option>>, iter_git_dir: Peekable, #[allow(dead_code)] iter_common_dir: Option>, buf: Vec, } enum IterKind { Git, GitAndConsumeCommon, Common, } /// An intermediate structure to hold shared state alive long enough for iteration to happen. #[must_use = "Iterators should be obtained from this platform"] pub struct Platform<'s> { store: &'s file::Store, packed: Option, } impl<'p, 's> LooseThenPacked<'p, 's> { fn strip_namespace(&self, mut r: Reference) -> Reference { if let Some(namespace) = &self.namespace { r.strip_namespace(namespace); } r } fn loose_iter(&mut self, kind: IterKind) -> &mut Peekable { match kind { IterKind::GitAndConsumeCommon => { drop(self.iter_common_dir.as_mut().map(Iterator::next)); &mut self.iter_git_dir } IterKind::Git => &mut self.iter_git_dir, IterKind::Common => self .iter_common_dir .as_mut() .expect("caller knows there is a common iter"), } } fn convert_packed( &mut self, packed: Result, packed::iter::Error>, ) -> Result { packed .map(Into::into) .map(|r| self.strip_namespace(r)) .map_err(|err| match err { packed::iter::Error::Reference { invalid_line, line_number, } => Error::PackedReference { invalid_line, line_number, }, packed::iter::Error::Header { .. } => unreachable!("this one only happens on iteration creation"), }) } fn convert_loose(&mut self, res: std::io::Result<(PathBuf, FullName)>) -> Result { let (refpath, name) = res.map_err(Error::Traversal)?; std::fs::File::open(&refpath) .and_then(|mut f| { self.buf.clear(); f.read_to_end(&mut self.buf) }) .map_err(|err| Error::ReadFileContents { source: err, path: refpath.to_owned(), })?; loose::Reference::try_from_path(name, &self.buf) .map_err(|err| { let relative_path = refpath .strip_prefix(self.git_dir) .ok() .or_else(|| { self.common_dir .and_then(|common_dir| refpath.strip_prefix(common_dir).ok()) }) .expect("one of our bases contains the path"); Error::ReferenceCreation { source: err, relative_path: relative_path.into(), } }) .map(Into::into) .map(|r| self.strip_namespace(r)) } } impl<'p, 's> Iterator for LooseThenPacked<'p, 's> { type Item = Result; fn next(&mut self) -> Option { fn advance_to_non_private(iter: &mut Peekable) { while let Some(Ok((_path, name))) = iter.peek() { if name.category().map_or(false, |cat| cat.is_worktree_private()) { iter.next(); } else { break; } } } fn peek_loose<'a>( git_dir: &'a mut Peekable, common_dir: Option<&'a mut Peekable>, ) -> Option<(&'a std::io::Result<(PathBuf, FullName)>, IterKind)> { match common_dir { Some(common_dir) => match (git_dir.peek(), { advance_to_non_private(common_dir); common_dir.peek() }) { (None, None) => None, (None, Some(res)) | (Some(_), Some(res @ Err(_))) => Some((res, IterKind::Common)), (Some(res), None) | (Some(res @ Err(_)), Some(_)) => Some((res, IterKind::Git)), (Some(r_gitdir @ Ok((_, git_dir_name))), Some(r_cd @ Ok((_, common_dir_name)))) => { match git_dir_name.cmp(common_dir_name) { Ordering::Less => Some((r_gitdir, IterKind::Git)), Ordering::Equal => Some((r_gitdir, IterKind::GitAndConsumeCommon)), Ordering::Greater => Some((r_cd, IterKind::Common)), } } }, None => git_dir.peek().map(|r| (r, IterKind::Git)), } } match self.iter_packed.as_mut() { Some(packed_iter) => match ( peek_loose(&mut self.iter_git_dir, self.iter_common_dir.as_mut()), packed_iter.peek(), ) { (None, None) => None, (None, Some(_)) | (Some(_), Some(Err(_))) => { let res = packed_iter.next().expect("peeked value exists"); Some(self.convert_packed(res)) } (Some((_, kind)), None) | (Some((Err(_), kind)), Some(_)) => { let res = self.loose_iter(kind).next().expect("prior peek"); Some(self.convert_loose(res)) } (Some((Ok((_, loose_name)), kind)), Some(Ok(packed))) => match loose_name.as_ref().cmp(packed.name) { Ordering::Less => { let res = self.loose_iter(kind).next().expect("prior peek"); Some(self.convert_loose(res)) } Ordering::Equal => { drop(packed_iter.next()); let res = self.loose_iter(kind).next().expect("prior peek"); Some(self.convert_loose(res)) } Ordering::Greater => { let res = packed_iter.next().expect("name retrieval configured"); Some(self.convert_packed(res)) } }, }, None => match peek_loose(&mut self.iter_git_dir, self.iter_common_dir.as_mut()) { None => None, Some((_, kind)) => self.loose_iter(kind).next().map(|res| self.convert_loose(res)), }, } } } impl<'s> Platform<'s> { /// Return an iterator over all references, loose or `packed`, sorted by their name. /// /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves. pub fn all(&self) -> std::io::Result> { self.store.iter_packed(self.packed.as_ref().map(|b| &***b)) } /// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads". /// /// Please note that "refs/heads" or "refs\\heads" is equivalent to "refs/heads/" pub fn prefixed(&self, prefix: &Path) -> std::io::Result> { self.store .iter_prefixed_packed(prefix, self.packed.as_ref().map(|b| &***b)) } } impl file::Store { /// Return a platform to obtain iterator over all references, or prefixed ones, loose or packed, sorted by their name. /// /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves. /// /// Note that since packed-refs are storing refs as precomposed unicode if [`Self::precompose_unicode`] is true, for consistency /// we also return loose references as precomposed unicode. pub fn iter(&self) -> Result, packed::buffer::open::Error> { Ok(Platform { store: self, packed: self.assure_packed_refs_uptodate()?, }) } } #[derive(Debug)] pub(crate) enum IterInfo<'a> { Base { base: &'a Path, precompose_unicode: bool, }, BaseAndIterRoot { base: &'a Path, iter_root: PathBuf, prefix: Cow<'a, Path>, precompose_unicode: bool, }, PrefixAndBase { base: &'a Path, prefix: &'a Path, precompose_unicode: bool, }, ComputedIterationRoot { /// The root to iterate over iter_root: PathBuf, /// The top-level directory as boundary of all references, used to create their short-names after iteration base: &'a Path, /// The original prefix prefix: Cow<'a, Path>, /// The remainder of the prefix that wasn't a valid path remainder: Option, /// If `true`, we will convert decomposed into precomposed unicode. precompose_unicode: bool, }, } impl<'a> IterInfo<'a> { fn prefix(&self) -> Option<&Path> { match self { IterInfo::Base { .. } => None, IterInfo::PrefixAndBase { prefix, .. } => Some(*prefix), IterInfo::ComputedIterationRoot { prefix, .. } | IterInfo::BaseAndIterRoot { prefix, .. } => { prefix.as_ref().into() } } } fn into_iter(self) -> Peekable { match self { IterInfo::Base { base, precompose_unicode, } => SortedLoosePaths::at(&base.join("refs"), base.into(), None, precompose_unicode), IterInfo::BaseAndIterRoot { base, iter_root, prefix: _, precompose_unicode, } => SortedLoosePaths::at(&iter_root, base.into(), None, precompose_unicode), IterInfo::PrefixAndBase { base, prefix, precompose_unicode, } => SortedLoosePaths::at(&base.join(prefix), base.into(), None, precompose_unicode), IterInfo::ComputedIterationRoot { iter_root, base, prefix: _, remainder, precompose_unicode, } => SortedLoosePaths::at(&iter_root, base.into(), remainder, precompose_unicode), } .peekable() } fn from_prefix(base: &'a Path, prefix: Cow<'a, Path>, precompose_unicode: bool) -> std::io::Result { if prefix.is_absolute() { return Err(std::io::Error::new( std::io::ErrorKind::InvalidInput, "prefix must be a relative path, like 'refs/heads'", )); } use std::path::Component::*; if prefix.components().any(|c| matches!(c, CurDir | ParentDir)) { return Err(std::io::Error::new( std::io::ErrorKind::InvalidInput, "Refusing to handle prefixes with relative path components", )); } let iter_root = base.join(prefix.as_ref()); if iter_root.is_dir() { Ok(IterInfo::BaseAndIterRoot { base, iter_root, prefix, precompose_unicode, }) } else { let filename_prefix = iter_root .file_name() .map(ToOwned::to_owned) .map(|p| { gix_path::try_into_bstr(PathBuf::from(p)) .map(std::borrow::Cow::into_owned) .map_err(|_| { std::io::Error::new(std::io::ErrorKind::InvalidInput, "prefix contains ill-formed UTF-8") }) }) .transpose()?; let iter_root = iter_root .parent() .expect("a parent is always there unless empty") .to_owned(); Ok(IterInfo::ComputedIterationRoot { base, prefix, iter_root, remainder: filename_prefix, precompose_unicode, }) } } } impl file::Store { /// Return an iterator over all references, loose or `packed`, sorted by their name. /// /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves. pub fn iter_packed<'s, 'p>( &'s self, packed: Option<&'p packed::Buffer>, ) -> std::io::Result> { match self.namespace.as_ref() { Some(namespace) => self.iter_from_info( IterInfo::PrefixAndBase { base: self.git_dir(), prefix: namespace.to_path(), precompose_unicode: self.precompose_unicode, }, self.common_dir().map(|base| IterInfo::PrefixAndBase { base, prefix: namespace.to_path(), precompose_unicode: self.precompose_unicode, }), packed, ), None => self.iter_from_info( IterInfo::Base { base: self.git_dir(), precompose_unicode: self.precompose_unicode, }, self.common_dir().map(|base| IterInfo::Base { base, precompose_unicode: self.precompose_unicode, }), packed, ), } } /// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads". /// /// Please note that "refs/heads" or "refs\\heads" is equivalent to "refs/heads/" pub fn iter_prefixed_packed<'s, 'p>( &'s self, prefix: &Path, packed: Option<&'p packed::Buffer>, ) -> std::io::Result> { match self.namespace.as_ref() { None => { let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.into(), self.precompose_unicode)?; let common_dir_info = self .common_dir() .map(|base| IterInfo::from_prefix(base, prefix.into(), self.precompose_unicode)) .transpose()?; self.iter_from_info(git_dir_info, common_dir_info, packed) } Some(namespace) => { let prefix = namespace.to_owned().into_namespaced_prefix(prefix); let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.clone().into(), self.precompose_unicode)?; let common_dir_info = self .common_dir() .map(|base| IterInfo::from_prefix(base, prefix.into(), self.precompose_unicode)) .transpose()?; self.iter_from_info(git_dir_info, common_dir_info, packed) } } } fn iter_from_info<'s, 'p>( &'s self, git_dir_info: IterInfo<'_>, common_dir_info: Option>, packed: Option<&'p packed::Buffer>, ) -> std::io::Result> { Ok(LooseThenPacked { git_dir: self.git_dir(), common_dir: self.common_dir(), iter_packed: match packed { Some(packed) => Some( match git_dir_info.prefix() { Some(prefix) => packed.iter_prefixed(path_to_name(prefix).into_owned()), None => packed.iter(), } .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))? .peekable(), ), None => None, }, iter_git_dir: git_dir_info.into_iter(), iter_common_dir: common_dir_info.map(IterInfo::into_iter), buf: Vec::new(), namespace: self.namespace.as_ref(), }) } } mod error { use std::{io, path::PathBuf}; use gix_object::bstr::BString; use crate::store_impl::file; /// The error returned by the [`LooseThenPacked`][super::LooseThenPacked] iterator. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The file system could not be traversed")] Traversal(#[source] io::Error), #[error("The ref file {path:?} could not be read in full")] ReadFileContents { source: io::Error, path: PathBuf }, #[error("The reference at \"{relative_path}\" could not be instantiated")] ReferenceCreation { source: file::loose::reference::decode::Error, relative_path: PathBuf, }, #[error("Invalid reference in line {line_number}: {invalid_line:?}")] PackedReference { invalid_line: BString, line_number: usize }, } } pub use error::Error; gix-ref-0.43.0/src/store/file/packed.rs000064400000000000000000000104261046102023000157130ustar 00000000000000use std::path::PathBuf; use crate::store_impl::{file, packed}; impl file::Store { /// Return a packed transaction ready to receive updates. Use this to create or update `packed-refs`. /// Note that if you already have a [`packed::Buffer`] then use its [`packed::Buffer::into_transaction()`] method instead. pub(crate) fn packed_transaction( &self, lock_mode: gix_lock::acquire::Fail, ) -> Result { let lock = gix_lock::File::acquire_to_update_resource(self.packed_refs_path(), lock_mode, None)?; // We 'steal' the possibly existing packed buffer which may safe time if it's already there and fresh. // If nothing else is happening, nobody will get to see the soon stale buffer either, but if so, they will pay // for reloading it. That seems preferred over always loading up a new one. Ok(packed::Transaction::new_from_pack_and_lock( self.assure_packed_refs_uptodate()?, lock, self.precompose_unicode, self.namespace.clone(), )) } /// Try to open a new packed buffer. It's not an error if it doesn't exist, but yields `Ok(None)`. /// /// Note that it will automatically be memory mapped if it exceeds the default threshold of 32KB. /// Change the threshold with [file::Store::set_packed_buffer_mmap_threshold()]. pub fn open_packed_buffer(&self) -> Result, packed::buffer::open::Error> { match packed::Buffer::open(self.packed_refs_path(), self.packed_buffer_mmap_threshold) { Ok(buf) => Ok(Some(buf)), Err(packed::buffer::open::Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), Err(err) => Err(err), } } /// Return a possibly cached packed buffer with shared ownership. At retrieval it will assure it's up to date, but /// after that it can be considered a snapshot as it cannot change anymore. /// /// Use this to make successive calls to [`file::Store::try_find_packed()`] /// or obtain iterators using [`file::Store::iter_packed()`] in a way that assures the packed-refs content won't change. pub fn cached_packed_buffer( &self, ) -> Result, packed::buffer::open::Error> { self.assure_packed_refs_uptodate() } /// Return the path at which packed-refs would usually be stored pub fn packed_refs_path(&self) -> PathBuf { self.common_dir_resolved().join("packed-refs") } pub(crate) fn packed_refs_lock_path(&self) -> PathBuf { let mut p = self.packed_refs_path(); p.set_extension("lock"); p } } /// #[allow(clippy::empty_docs)] pub mod transaction { use crate::store_impl::packed; /// The error returned by [`file::Transaction::prepare()`][crate::file::Transaction::prepare()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("An existing pack couldn't be opened or read when preparing a transaction")] BufferOpen(#[from] packed::buffer::open::Error), #[error("The lock for a packed transaction could not be obtained")] TransactionLock(#[from] gix_lock::acquire::Error), } } /// An up-to-date snapshot of the packed refs buffer. pub type SharedBufferSnapshot = gix_fs::SharedFileSnapshot; pub(crate) mod modifiable { use gix_features::threading::OwnShared; use crate::{file, packed}; pub(crate) type MutableSharedBuffer = OwnShared>; impl file::Store { pub(crate) fn force_refresh_packed_buffer(&self) -> Result<(), packed::buffer::open::Error> { self.packed.force_refresh(|| { let modified = self.packed_refs_path().metadata()?.modified()?; self.open_packed_buffer().map(|packed| Some(modified).zip(packed)) }) } pub(crate) fn assure_packed_refs_uptodate( &self, ) -> Result, packed::buffer::open::Error> { self.packed.recent_snapshot( || self.packed_refs_path().metadata().and_then(|m| m.modified()).ok(), || self.open_packed_buffer(), ) } } } gix-ref-0.43.0/src/store/file/raw_ext.rs000064400000000000000000000155701046102023000161420ustar 00000000000000use std::collections::BTreeSet; use gix_hash::ObjectId; use crate::{ packed, peel, raw::Reference, store_impl::{file, file::log}, Target, }; pub trait Sealed {} impl Sealed for crate::Reference {} /// A trait to extend [Reference][crate::Reference] with functionality requiring a [file::Store]. pub trait ReferenceExt: Sealed { /// A step towards obtaining forward or reverse iterators on reference logs. fn log_iter<'a, 's>(&'a self, store: &'s file::Store) -> log::iter::Platform<'a, 's>; /// For details, see [`Reference::log_exists()`]. fn log_exists(&self, store: &file::Store) -> bool; /// Follow all symbolic targets this reference might point to and peel the underlying object /// to the end of the chain, and return it, using `objects` to access them. /// /// This is useful to learn where this reference is ultimately pointing to. fn peel_to_id_in_place( &mut self, store: &file::Store, objects: &dyn gix_object::Find, ) -> Result; /// Like [`ReferenceExt::peel_to_id_in_place()`], but with support for a known stable packed buffer /// to use for resolving symbolic links. fn peel_to_id_in_place_packed( &mut self, store: &file::Store, objects: &dyn gix_object::Find, packed: Option<&packed::Buffer>, ) -> Result; /// Follow this symbolic reference one level and return the ref it refers to. /// /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain. fn follow(&self, store: &file::Store) -> Option>; /// Follow this symbolic reference one level and return the ref it refers to, /// possibly providing access to `packed` references for lookup if it contains the referent. /// /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain. fn follow_packed( &self, store: &file::Store, packed: Option<&packed::Buffer>, ) -> Option>; } impl ReferenceExt for Reference { fn log_iter<'a, 's>(&'a self, store: &'s file::Store) -> log::iter::Platform<'a, 's> { log::iter::Platform { store, name: self.name.as_ref(), buf: Vec::new(), } } fn log_exists(&self, store: &file::Store) -> bool { store .reflog_exists(self.name.as_ref()) .expect("infallible name conversion") } fn peel_to_id_in_place( &mut self, store: &file::Store, objects: &dyn gix_object::Find, ) -> Result { let packed = store.assure_packed_refs_uptodate().map_err(|err| { peel::to_id::Error::Follow(file::find::existing::Error::Find(file::find::Error::PackedOpen(err))) })?; self.peel_to_id_in_place_packed(store, objects, packed.as_ref().map(|b| &***b)) } fn peel_to_id_in_place_packed( &mut self, store: &file::Store, objects: &dyn gix_object::Find, packed: Option<&packed::Buffer>, ) -> Result { match self.peeled { Some(peeled) => { self.target = Target::Peeled(peeled.to_owned()); Ok(peeled) } None => { if self.target.kind() == crate::Kind::Symbolic { let mut seen = BTreeSet::new(); let cursor = &mut *self; while let Some(next) = cursor.follow_packed(store, packed) { let next = next?; if seen.contains(&next.name) { return Err(peel::to_id::Error::Cycle { start_absolute: store.reference_path(cursor.name.as_ref()), }); } *cursor = next; seen.insert(cursor.name.clone()); const MAX_REF_DEPTH: usize = 5; if seen.len() == MAX_REF_DEPTH { return Err(peel::to_id::Error::DepthLimitExceeded { max_depth: MAX_REF_DEPTH, }); } } }; let mut buf = Vec::new(); let mut oid = self.target.try_id().expect("peeled ref").to_owned(); let peeled_id = loop { let gix_object::Data { kind, data } = objects .try_find(&oid, &mut buf)? .ok_or_else(|| peel::to_id::Error::NotFound { oid, name: self.name.0.clone(), })?; match kind { gix_object::Kind::Tag => { oid = gix_object::TagRefIter::from_bytes(data).target_id().map_err(|_err| { peel::to_id::Error::NotFound { oid, name: self.name.0.clone(), } })?; } _ => break oid, }; }; self.peeled = Some(peeled_id); self.target = Target::Peeled(peeled_id); Ok(peeled_id) } } } fn follow(&self, store: &file::Store) -> Option> { let packed = match store .assure_packed_refs_uptodate() .map_err(|err| file::find::existing::Error::Find(file::find::Error::PackedOpen(err))) { Ok(packed) => packed, Err(err) => return Some(Err(err)), }; self.follow_packed(store, packed.as_ref().map(|b| &***b)) } fn follow_packed( &self, store: &file::Store, packed: Option<&packed::Buffer>, ) -> Option> { match self.peeled { Some(peeled) => Some(Ok(Reference { name: self.name.clone(), target: Target::Peeled(peeled), peeled: None, })), None => match &self.target { Target::Peeled(_) => None, Target::Symbolic(full_name) => match store.try_find_packed(full_name.as_ref(), packed) { Ok(Some(next)) => Some(Ok(next)), Ok(None) => Some(Err(file::find::existing::Error::NotFound { name: full_name.to_path().to_owned(), })), Err(err) => Some(Err(file::find::existing::Error::Find(err))), }, }, } } } gix-ref-0.43.0/src/store/file/transaction/commit.rs000064400000000000000000000243241046102023000203030ustar 00000000000000use crate::{ store_impl::file::{transaction::PackedRefs, Transaction}, transaction::{Change, LogChange, RefEdit, RefLog}, Target, }; impl<'s, 'p> Transaction<'s, 'p> { /// Make all [prepared][Transaction::prepare()] permanent and return the performed edits which represent the current /// state of the affected refs in the ref store in that instant. Please note that the obtained edits may have been /// adjusted to contain more dependent edits or additional information. /// `committer` is used in the reflog and only if the reflog is actually written, which is why it is optional. Please note /// that if `None` is passed and the reflog needs to be written, the operation will be aborted late and a few refs may have been /// successfully committed already, making clear the non-atomic nature of multi-file edits. /// /// On error the transaction may have been performed partially, depending on the nature of the error, and no attempt to roll back /// partial changes is made. /// /// In this stage, we perform the following operations: /// /// * update the ref log /// * move updated refs into place /// * delete reflogs and empty parent directories /// * delete packed refs /// * delete their corresponding reference (if applicable) /// along with empty parent directories /// /// Note that transactions will be prepared automatically as needed. pub fn commit<'a>(self, committer: impl Into>>) -> Result, Error> { self.commit_inner(committer.into()) } fn commit_inner(self, committer: Option>) -> Result, Error> { let mut updates = self.updates.expect("BUG: must call prepare before commit"); let delete_loose_refs = matches!( self.packed_refs, PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_) ); // Perform updates first so live commits remain referenced for change in &mut updates { assert!(!change.update.deref, "Deref mode is turned into splits and turned off"); match &change.update.change { // reflog first, then reference Change::Update { log, new, expected } => { let lock = change.lock.take(); let (update_ref, update_reflog) = match log.mode { RefLog::Only => (false, true), RefLog::AndReference => (true, true), }; if update_reflog { let log_update = match new { Target::Symbolic(_) => { // Special HACK: no reflog for symref changes as there is no OID involved which the reflog needs. // Unless, the ref is new and we can obtain a peeled id // identified by the expectation of what could be there, as is the case when cloning. match expected { PreviousValue::ExistingMustMatch(Target::Peeled(oid)) => { Some((Some(gix_hash::ObjectId::null(oid.kind())), oid)) } _ => None, } } Target::Peeled(new_oid) => { let previous = match expected { // Here, this means that the ref already existed, and that it will receive (even transitively) // the given value PreviousValue::MustExistAndMatch(Target::Peeled(oid)) => Some(oid.to_owned()), _ => None, } .or(change.leaf_referent_previous_oid); Some((previous, new_oid)) } }; if let Some((previous, new_oid)) = log_update { let do_update = previous.as_ref().map_or(true, |previous| previous != new_oid); if do_update { self.store.reflog_create_or_append( change.update.name.as_ref(), previous, new_oid, committer, log.message.as_ref(), log.force_create_reflog, )?; } } } // Don't do anything else while keeping the lock after potentially updating the reflog. // We delay deletion of the reference and dropping the lock to after the packed-refs were // safely written. if delete_loose_refs && matches!(new, Target::Peeled(_)) { change.lock = lock; continue; } if update_ref { if let Some(Err(err)) = lock.map(gix_lock::Marker::commit) { // TODO: when Kind::IsADirectory becomes stable, use that. let err = if err.instance.resource_path().is_dir() { gix_tempfile::remove_dir::empty_depth_first(err.instance.resource_path()) .map_err(|io_err| std::io::Error::new(std::io::ErrorKind::Other, io_err)) .and_then(|_| err.instance.commit().map_err(|err| err.error)) .err() } else { Some(err.error) }; if let Some(err) = err { return Err(Error::LockCommit { source: err, full_name: change.name(), }); } }; } } Change::Delete { .. } => {} } } for change in &mut updates { let (reflog_root, relative_name) = self.store.reflog_base_and_relative_path(change.update.name.as_ref()); match &change.update.change { Change::Update { .. } => {} Change::Delete { .. } => { // Reflog deletion happens first in case it fails a ref without log is less terrible than // a log without a reference. let reflog_path = reflog_root.join(relative_name); if let Err(err) = std::fs::remove_file(&reflog_path) { if err.kind() != std::io::ErrorKind::NotFound { return Err(Error::DeleteReflog { source: err, full_name: change.name(), }); } } else { gix_tempfile::remove_dir::empty_upward_until_boundary( reflog_path.parent().expect("never without parent"), &reflog_root, ) .ok(); } } } } if let Some(t) = self.packed_transaction { t.commit().map_err(Error::PackedTransactionCommit)?; // Always refresh ourselves right away to avoid races. We ignore errors as there may be many reasons this fails, and it's not // critical to be done here. In other words, the pack may be refreshed at a later time and then it might work. self.store.force_refresh_packed_buffer().ok(); } for change in &mut updates { let take_lock_and_delete = match &change.update.change { Change::Update { log: LogChange { mode, .. }, new, .. } => delete_loose_refs && *mode == RefLog::AndReference && matches!(new, Target::Peeled(_)), Change::Delete { log: mode, .. } => *mode == RefLog::AndReference, }; if take_lock_and_delete { let lock = change.lock.take(); let reference_path = self.store.reference_path(change.update.name.as_ref()); if let Err(err) = std::fs::remove_file(reference_path) { if err.kind() != std::io::ErrorKind::NotFound { return Err(Error::DeleteReference { err, full_name: change.name(), }); } } drop(lock) } } Ok(updates.into_iter().map(|edit| edit.update).collect()) } } mod error { use gix_object::bstr::BString; use crate::store_impl::{file, packed}; /// The error returned by various [`Transaction`][super::Transaction] methods. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The packed-ref transaction could not be committed")] PackedTransactionCommit(#[source] packed::transaction::commit::Error), #[error("Edit preprocessing failed with error")] PreprocessingFailed { source: std::io::Error }, #[error("The change for reference {full_name:?} could not be committed")] LockCommit { source: std::io::Error, full_name: BString }, #[error("The reference {full_name} could not be deleted")] DeleteReference { full_name: BString, err: std::io::Error }, #[error("The reflog of reference {full_name:?} could not be deleted")] DeleteReflog { full_name: BString, source: std::io::Error }, #[error("The reflog could not be created or updated")] CreateOrUpdateRefLog(#[from] file::log::create_or_update::Error), } } pub use error::Error; use crate::transaction::PreviousValue; gix-ref-0.43.0/src/store/file/transaction/mod.rs000064400000000000000000000072721046102023000175750ustar 00000000000000use std::fmt::Formatter; use gix_hash::ObjectId; use gix_object::bstr::BString; use crate::{ store_impl::{file, file::Transaction}, transaction::RefEdit, }; /// How to handle packed refs during a transaction #[derive(Default)] pub enum PackedRefs<'a> { /// Only propagate deletions of references. This is the default. /// This means deleted references are removed from disk if they are loose and from the packed-refs file if they are present. #[default] DeletionsOnly, /// Propagate deletions as well as updates to references which are peeled and contain an object id. /// /// This means deleted references are removed from disk if they are loose and from the packed-refs file if they are present, /// while updates are also written into the loose file as well as into packed-refs, potentially creating an entry. DeletionsAndNonSymbolicUpdates(Box), /// Propagate deletions as well as updates to references which are peeled and contain an object id. Furthermore delete the /// reference which is originally updated if it exists. If it doesn't, the new value will be written into the packed ref right away. /// Note that this doesn't affect symbolic references at all, which can't be placed into packed refs. /// /// Thus, this is similar to `DeletionsAndNonSymbolicUpdates`, but removes the loose reference after the update, leaving only their copy /// in `packed-refs`. DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box), } #[derive(Debug)] pub(in crate::store_impl::file) struct Edit { update: RefEdit, lock: Option, /// Set if this update is coming from a symbolic reference and used to make it appear like it is the one that is handled, /// instead of the referent reference. parent_index: Option, /// For symbolic refs, this is the previous OID to put into the reflog instead of our own previous value. It's the /// peeled value of the leaf referent. leaf_referent_previous_oid: Option, } impl Edit { fn name(&self) -> BString { self.update.name.0.clone() } } impl std::borrow::Borrow for Edit { fn borrow(&self) -> &RefEdit { &self.update } } impl std::borrow::BorrowMut for Edit { fn borrow_mut(&mut self) -> &mut RefEdit { &mut self.update } } /// Edits impl file::Store { /// Open a transaction with the given `edits`, and determine how to fail if a `lock` cannot be obtained. /// A snapshot of packed references will be obtained automatically if needed to fulfill this transaction /// and will be provided as result of a successful transaction. Note that upon transaction failure, packed-refs /// will never have been altered. /// /// The transaction inherits the parent namespace. pub fn transaction(&self) -> Transaction<'_, '_> { Transaction { store: self, packed_transaction: None, updates: None, packed_refs: PackedRefs::default(), } } } impl<'s, 'p> Transaction<'s, 'p> { /// Configure the way packed refs are handled during the transaction pub fn packed_refs(mut self, packed_refs: PackedRefs<'p>) -> Self { self.packed_refs = packed_refs; self } } impl std::fmt::Debug for Transaction<'_, '_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("Transaction") .field("store", self.store) .field("edits", &self.updates.as_ref().map(Vec::len)) .finish_non_exhaustive() } } /// #[allow(clippy::empty_docs)] pub mod prepare; /// #[allow(clippy::empty_docs)] pub mod commit; gix-ref-0.43.0/src/store/file/transaction/prepare.rs000064400000000000000000000524031046102023000204500ustar 00000000000000use crate::{ packed, packed::transaction::buffer_into_transaction, store_impl::{ file, file::{ loose, transaction::{Edit, PackedRefs}, Transaction, }, }, transaction::{Change, LogChange, PreviousValue, RefEdit, RefEditsExt, RefLog}, FullName, FullNameRef, Reference, Target, }; impl<'s, 'p> Transaction<'s, 'p> { fn lock_ref_and_apply_change( store: &file::Store, lock_fail_mode: gix_lock::acquire::Fail, packed: Option<&packed::Buffer>, change: &mut Edit, has_global_lock: bool, direct_to_packed_refs: bool, ) -> Result<(), Error> { use std::io::Write; assert!( change.lock.is_none(), "locks can only be acquired once and it's all or nothing" ); let existing_ref = store .ref_contents(change.update.name.as_ref()) .map_err(Error::from) .and_then(|maybe_loose| { maybe_loose .map(|buf| { loose::Reference::try_from_path(change.update.name.clone(), &buf) .map(Reference::from) .map_err(Error::from) }) .transpose() }) .or_else(|err| match err { Error::ReferenceDecode(_) => Ok(None), other => Err(other), }) .and_then(|maybe_loose| match (maybe_loose, packed) { (None, Some(packed)) => packed .try_find(change.update.name.as_ref()) .map(|opt| opt.map(Into::into)) .map_err(Error::from), (None, None) => Ok(None), (maybe_loose, _) => Ok(maybe_loose), }); let lock = match &mut change.update.change { Change::Delete { expected, .. } => { let (base, relative_path) = store.reference_path_with_base(change.update.name.as_ref()); let lock = if has_global_lock { None } else { gix_lock::Marker::acquire_to_hold_resource( base.join(relative_path.as_ref()), lock_fail_mode, Some(base.clone().into_owned()), ) .map_err(|err| Error::LockAcquire { source: err, full_name: "borrowcheck won't allow change.name()".into(), })? .into() }; let existing_ref = existing_ref?; match (&expected, &existing_ref) { (PreviousValue::MustNotExist, _) => { panic!("BUG: MustNotExist constraint makes no sense if references are to be deleted") } (PreviousValue::ExistingMustMatch(_) | PreviousValue::Any, None) | (PreviousValue::MustExist | PreviousValue::Any, Some(_)) => {} (PreviousValue::MustExist | PreviousValue::MustExistAndMatch(_), None) => { return Err(Error::DeleteReferenceMustExist { full_name: change.name(), }) } ( PreviousValue::MustExistAndMatch(previous) | PreviousValue::ExistingMustMatch(previous), Some(existing), ) => { let actual = existing.target.clone(); if *previous != actual { let expected = previous.clone(); return Err(Error::ReferenceOutOfDate { full_name: change.name(), expected, actual, }); } } } // Keep the previous value for the caller and ourselves. Maybe they want to keep a log of sorts. if let Some(existing) = existing_ref { *expected = PreviousValue::MustExistAndMatch(existing.target); } lock } Change::Update { expected, new, .. } => { let (base, relative_path) = store.reference_path_with_base(change.update.name.as_ref()); let obtain_lock = || { gix_lock::File::acquire_to_update_resource( base.join(relative_path.as_ref()), lock_fail_mode, Some(base.clone().into_owned()), ) .map_err(|err| Error::LockAcquire { source: err, full_name: "borrowcheck won't allow change.name() and this will be corrected by caller".into(), }) }; let mut lock = (!has_global_lock).then(obtain_lock).transpose()?; let existing_ref = existing_ref?; match (&expected, &existing_ref) { (PreviousValue::Any, _) | (PreviousValue::MustExist, Some(_)) | (PreviousValue::MustNotExist | PreviousValue::ExistingMustMatch(_), None) => {} (PreviousValue::MustExist, None) => { let expected = Target::Peeled(store.object_hash.null()); let full_name = change.name(); return Err(Error::MustExist { full_name, expected }); } (PreviousValue::MustNotExist, Some(existing)) => { if existing.target != *new { let new = new.clone(); return Err(Error::MustNotExist { full_name: change.name(), actual: existing.target.clone(), new, }); } } ( PreviousValue::MustExistAndMatch(previous) | PreviousValue::ExistingMustMatch(previous), Some(existing), ) => { if *previous != existing.target { let actual = existing.target.clone(); let expected = previous.to_owned(); let full_name = change.name(); return Err(Error::ReferenceOutOfDate { full_name, actual, expected, }); } } (PreviousValue::MustExistAndMatch(previous), None) => { let expected = previous.to_owned(); let full_name = change.name(); return Err(Error::MustExist { full_name, expected }); } }; fn new_would_change_existing(new: &Target, existing: &Target) -> (bool, bool) { match (new, existing) { (Target::Peeled(new), Target::Peeled(old)) => (old != new, false), (Target::Symbolic(new), Target::Symbolic(old)) => (old != new, true), (Target::Peeled(_), _) => (true, false), (Target::Symbolic(_), _) => (true, true), } } let (is_effective, is_symbolic) = if let Some(existing) = existing_ref { let (effective, is_symbolic) = new_would_change_existing(new, &existing.target); *expected = PreviousValue::MustExistAndMatch(existing.target); (effective, is_symbolic) } else { (true, matches!(new, Target::Symbolic(_))) }; if (is_effective && !direct_to_packed_refs) || is_symbolic { let mut lock = lock.take().map_or_else(obtain_lock, Ok)?; lock.with_mut(|file| match new { Target::Peeled(oid) => write!(file, "{oid}"), Target::Symbolic(name) => writeln!(file, "ref: {}", name.0), })?; Some(lock.close()?) } else { None } } }; change.lock = lock; Ok(()) } } impl<'s, 'p> Transaction<'s, 'p> { /// Prepare for calling [`commit(…)`][Transaction::commit()] in a way that can be rolled back perfectly. /// /// If the operation succeeds, the transaction can be committed or dropped to cause a rollback automatically. /// Rollbacks happen automatically on failure and they tend to be perfect. /// This method is idempotent. pub fn prepare( self, edits: impl IntoIterator, ref_files_lock_fail_mode: gix_lock::acquire::Fail, packed_refs_lock_fail_mode: gix_lock::acquire::Fail, ) -> Result { self.prepare_inner( &mut edits.into_iter(), ref_files_lock_fail_mode, packed_refs_lock_fail_mode, ) } fn prepare_inner( mut self, edits: &mut dyn Iterator, ref_files_lock_fail_mode: gix_lock::acquire::Fail, packed_refs_lock_fail_mode: gix_lock::acquire::Fail, ) -> Result { assert!(self.updates.is_none(), "BUG: Must not call prepare(…) multiple times"); let store = self.store; let mut updates: Vec<_> = edits .map(|update| Edit { update, lock: None, parent_index: None, leaf_referent_previous_oid: None, }) .collect(); updates .pre_process( &mut |name| { let symbolic_refs_are_never_packed = None; store .find_existing_inner(name, symbolic_refs_are_never_packed) .map(|r| r.target) .ok() }, &mut |idx, update| Edit { update, lock: None, parent_index: Some(idx), leaf_referent_previous_oid: None, }, ) .map_err(Error::PreprocessingFailed)?; let mut maybe_updates_for_packed_refs = match self.packed_refs { PackedRefs::DeletionsAndNonSymbolicUpdates(_) | PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_) => Some(0_usize), PackedRefs::DeletionsOnly => None, }; if maybe_updates_for_packed_refs.is_some() || self.store.packed_refs_path().is_file() || self.store.packed_refs_lock_path().is_file() { let mut edits_for_packed_transaction = Vec::::new(); let mut needs_packed_refs_lookups = false; for edit in &updates { let log_mode = match edit.update.change { Change::Update { log: LogChange { mode, .. }, .. } => mode, Change::Delete { log, .. } => log, }; if log_mode == RefLog::Only { continue; } let name = match possibly_adjust_name_for_prefixes(edit.update.name.as_ref()) { Some(n) => n, None => continue, }; if let Some(ref mut num_updates) = maybe_updates_for_packed_refs { if let Change::Update { new: Target::Peeled(_), .. } = edit.update.change { edits_for_packed_transaction.push(RefEdit { name, ..edit.update.clone() }); *num_updates += 1; } continue; } match edit.update.change { Change::Update { expected: PreviousValue::ExistingMustMatch(_) | PreviousValue::MustExistAndMatch(_), .. } => needs_packed_refs_lookups = true, Change::Delete { .. } => { edits_for_packed_transaction.push(RefEdit { name, ..edit.update.clone() }); } _ => { needs_packed_refs_lookups = true; } } } if !edits_for_packed_transaction.is_empty() || needs_packed_refs_lookups { // What follows means that we will only create a transaction if we have to access packed refs for looking // up current ref values, or that we definitely have a transaction if we need to make updates. Otherwise // we may have no transaction at all which isn't required if we had none and would only try making deletions. let packed_transaction: Option<_> = if maybe_updates_for_packed_refs.unwrap_or(0) > 0 || self.store.packed_refs_lock_path().is_file() { // We have to create a packed-ref even if it doesn't exist self.store .packed_transaction(packed_refs_lock_fail_mode) .map_err(|err| match err { file::packed::transaction::Error::BufferOpen(err) => Error::from(err), file::packed::transaction::Error::TransactionLock(err) => { Error::PackedTransactionAcquire(err) } })? .into() } else { // A packed transaction is optional - we only have deletions that can't be made if // no packed-ref file exists anyway self.store .assure_packed_refs_uptodate()? .map(|p| { buffer_into_transaction( p, packed_refs_lock_fail_mode, self.store.precompose_unicode, self.store.namespace.clone(), ) .map_err(Error::PackedTransactionAcquire) }) .transpose()? }; if let Some(transaction) = packed_transaction { self.packed_transaction = Some(match &mut self.packed_refs { PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(f) | PackedRefs::DeletionsAndNonSymbolicUpdates(f) => { transaction.prepare(&mut edits_for_packed_transaction.into_iter(), &**f)? } PackedRefs::DeletionsOnly => transaction .prepare(&mut edits_for_packed_transaction.into_iter(), &gix_object::find::Never)?, }); } } } for cid in 0..updates.len() { let change = &mut updates[cid]; if let Err(err) = Self::lock_ref_and_apply_change( self.store, ref_files_lock_fail_mode, self.packed_transaction.as_ref().and_then(packed::Transaction::buffer), change, self.packed_transaction.is_some(), matches!( self.packed_refs, PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_) ), ) { let err = match err { Error::LockAcquire { source, full_name: _bogus, } => Error::LockAcquire { source, full_name: { let mut cursor = change.parent_index; let mut ref_name = change.name(); while let Some(parent_idx) = cursor { let parent = &updates[parent_idx]; if parent.parent_index.is_none() { ref_name = parent.name(); } else { cursor = parent.parent_index; } } ref_name }, }, other => other, }; return Err(err); }; // traverse parent chain from leaf/peeled ref and set the leaf previous oid accordingly // to help with their reflog entries if let (Some(crate::TargetRef::Peeled(oid)), Some(parent_idx)) = (change.update.change.previous_value(), change.parent_index) { let oid = oid.to_owned(); let mut parent_idx_cursor = Some(parent_idx); while let Some(parent) = parent_idx_cursor.take().map(|idx| &mut updates[idx]) { parent_idx_cursor = parent.parent_index; parent.leaf_referent_previous_oid = Some(oid); } } } self.updates = Some(updates); Ok(self) } /// Rollback all intermediate state and return the `RefEdits` as we know them thus far. /// /// Note that they have been altered compared to what was initially provided as they have /// been split and know about their current state on disk. /// /// # Note /// /// A rollback happens automatically as this instance is dropped as well. pub fn rollback(self) -> Vec { self.updates .map(|updates| updates.into_iter().map(|u| u.update).collect()) .unwrap_or_default() } } fn possibly_adjust_name_for_prefixes(name: &FullNameRef) -> Option { match name.category_and_short_name() { Some((c, sn)) => { use crate::Category::*; let sn = FullNameRef::new_unchecked(sn); match c { Bisect | Rewritten | WorktreePrivate | LinkedPseudoRef { .. } | PseudoRef | MainPseudoRef => None, Tag | LocalBranch | RemoteBranch | Note => name.into(), MainRef | LinkedRef { .. } => sn .category() .map_or(false, |cat| !cat.is_worktree_private()) .then_some(sn), } .map(ToOwned::to_owned) } None => Some(name.to_owned()), // allow (uncategorized/very special) refs to be packed } } mod error { use gix_object::bstr::BString; use crate::{ store_impl::{file, packed}, Target, }; /// The error returned by various [`Transaction`][super::Transaction] methods. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The packed ref buffer could not be loaded")] Packed(#[from] packed::buffer::open::Error), #[error("The lock for the packed-ref file could not be obtained")] PackedTransactionAcquire(#[source] gix_lock::acquire::Error), #[error("The packed transaction could not be prepared")] PackedTransactionPrepare(#[from] packed::transaction::prepare::Error), #[error("The packed ref file could not be parsed")] PackedFind(#[from] packed::find::Error), #[error("Edit preprocessing failed with an error")] PreprocessingFailed(#[source] std::io::Error), #[error("A lock could not be obtained for reference {full_name:?}")] LockAcquire { source: gix_lock::acquire::Error, full_name: BString, }, #[error("An IO error occurred while applying an edit")] Io(#[from] std::io::Error), #[error("The reference {full_name:?} for deletion did not exist or could not be parsed")] DeleteReferenceMustExist { full_name: BString }, #[error("Reference {full_name:?} was not supposed to exist when writing it with value {new:?}, but actual content was {actual:?}")] MustNotExist { full_name: BString, actual: Target, new: Target, }, #[error("Reference {full_name:?} was supposed to exist with value {expected}, but didn't.")] MustExist { full_name: BString, expected: Target }, #[error("The reference {full_name:?} should have content {expected}, actual content was {actual}")] ReferenceOutOfDate { full_name: BString, expected: Target, actual: Target, }, #[error("Could not read reference")] ReferenceDecode(#[from] file::loose::reference::decode::Error), } } pub use error::Error; gix-ref-0.43.0/src/store/general/handle/find.rs000064400000000000000000000047731046102023000173450ustar 00000000000000use crate::{store, PartialNameRef, Reference}; mod error { use std::convert::Infallible; /// The error returned by [`crate::file::Store::find_loose()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("An error occurred while finding a reference in the loose file database")] Loose(#[from] crate::file::find::Error), #[error("The ref name or path is not a valid ref name")] RefnameValidation(#[from] crate::name::Error), } impl From for Error { fn from(_: Infallible) -> Self { unreachable!("this impl is needed to allow passing a known valid partial path as parameter") } } } pub use error::Error; use crate::store::handle; impl store::Handle { /// TODO: actually implement this with handling of the packed buffer. pub fn try_find<'a, Name, E>(&self, partial: Name) -> Result, Error> where Name: TryInto<&'a PartialNameRef, Error = E>, Error: From, { let _name = partial.try_into()?; match &self.state { handle::State::Loose { store: _, .. } => { todo!() } } } } mod existing { mod error { use std::path::PathBuf; /// The error returned by [file::Store::find_existing()][crate::file::Store::find_existing()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("An error occurred while finding a reference in the database")] Find(#[from] crate::store::find::Error), #[error("The ref partially named {name:?} could not be found")] NotFound { name: PathBuf }, } } pub use error::Error; use crate::{store, PartialNameRef, Reference}; impl store::Handle { /// Similar to [`crate::file::Store::find()`] but a non-existing ref is treated as error. pub fn find<'a, Name, E>(&self, _partial: Name) -> Result where Name: TryInto<&'a PartialNameRef, Error = E>, crate::name::Error: From, { todo!() // match self.try_find(partial) {} // match self.find_one_with_verified_input(path.to_partial_path().as_ref(), packed) { // Ok(Some(r)) => Ok(r), // Ok(None) => Err(Error::NotFound(path.to_partial_path().into_owned())), // Err(err) => Err(err.into()), // } } } } gix-ref-0.43.0/src/store/general/handle/mod.rs000064400000000000000000000024601046102023000171730ustar 00000000000000#![allow(dead_code)] use crate::{store, Namespace}; #[derive(Clone)] pub(crate) enum State { Loose { store: crate::file::Store }, } impl crate::Store { /// Return a new handle which sees all references if `namespace` is `None` or all read and write operations are limited /// to the given `namespace` if `Some`. pub fn to_handle(&self) -> store::Handle { Self::new_handle_inner(&self.inner, None) } /// As above, but supports a namespace to be set pub fn to_handle_namespaced(&self, namespace: Option) -> store::Handle { Self::new_handle_inner(&self.inner, namespace) } fn new_handle_inner(state: &store::State, namespace: Option) -> store::Handle { store::Handle { state: match state { store::State::Loose { store } => store::handle::State::Loose { store: { let mut store = store.clone(); store.namespace = namespace; store }, }, }, } } } /// #[allow(clippy::empty_docs)] pub mod find; mod iter { // impl store::Handle { // pub fn iter<'p, 's>(&'s self, packed: Option<&'p packed::Buffer>) -> std::io::Result> { // } } gix-ref-0.43.0/src/store/general/init.rs000064400000000000000000000026121046102023000161230ustar 00000000000000use std::path::PathBuf; use crate::store::WriteReflog; mod error { /// The error returned by [`crate::Store::at()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("There was an error accessing the store's directory")] Io(#[from] std::io::Error), } } pub use error::Error; use crate::file; #[allow(dead_code)] impl crate::Store { /// Create a new store at the given location, typically the `.git/` directory. /// /// `object_hash` defines the kind of hash to assume when dealing with refs. /// `precompose_unicode` is used to set to the value of [`crate::file::Store::precompose_unicode]. /// /// Note that if `precompose_unicode` is set, the `git_dir` is also expected to use precomposed unicode, /// or else some operations that strip prefixes will fail. pub fn at( git_dir: PathBuf, reflog_mode: WriteReflog, object_hash: gix_hash::Kind, precompose_unicode: bool, ) -> Result { // for now, just try to read the directory - later we will do that naturally as we have to figure out if it's a ref-table or not. std::fs::read_dir(&git_dir)?; Ok(crate::Store { inner: crate::store::State::Loose { store: file::Store::at(git_dir, reflog_mode, object_hash, precompose_unicode), }, }) } } gix-ref-0.43.0/src/store/general/mod.rs000064400000000000000000000000121046102023000157270ustar 00000000000000mod init; gix-ref-0.43.0/src/store/mod.rs000064400000000000000000000001411046102023000143150ustar 00000000000000/// #[allow(clippy::empty_docs)] pub mod file; /// #[allow(clippy::empty_docs)] pub mod packed; gix-ref-0.43.0/src/store/packed/buffer.rs000064400000000000000000000113151046102023000162430ustar 00000000000000use crate::store_impl::packed; impl AsRef<[u8]> for packed::Buffer { fn as_ref(&self) -> &[u8] { &self.data.as_ref()[self.offset..] } } impl AsRef<[u8]> for packed::Backing { fn as_ref(&self) -> &[u8] { match self { packed::Backing::InMemory(data) => data, packed::Backing::Mapped(map) => map, } } } /// #[allow(clippy::empty_docs)] pub mod open { use std::path::PathBuf; use winnow::{prelude::*, stream::Offset}; use crate::store_impl::packed; /// Initialization impl packed::Buffer { fn open_with_backing(backing: packed::Backing, path: PathBuf) -> Result { let (backing, offset) = { let (offset, sorted) = { let mut input = backing.as_ref(); if *input.first().unwrap_or(&b' ') == b'#' { let header = packed::decode::header::<()> .parse_next(&mut input) .map_err(|_| Error::HeaderParsing)?; let offset = input.offset_from(&backing.as_ref()); (offset, header.sorted) } else { (0, false) } }; if !sorted { // this implementation is likely slower than what git does, but it's less code, too. let mut entries = packed::Iter::new(&backing.as_ref()[offset..])?.collect::, _>>()?; entries.sort_by_key(|e| e.name.as_bstr()); let mut serialized = Vec::::new(); for entry in entries { serialized.extend_from_slice(entry.target); serialized.push(b' '); serialized.extend_from_slice(entry.name.as_bstr()); serialized.push(b'\n'); if let Some(object) = entry.object { serialized.push(b'^'); serialized.extend_from_slice(object); serialized.push(b'\n'); } } (Backing::InMemory(serialized), 0) } else { (backing, offset) } }; Ok(packed::Buffer { offset, data: backing, path, }) } /// Open the file at `path` and map it into memory if the file size is larger than `use_memory_map_if_larger_than_bytes`. /// /// In order to allow fast lookups and optimizations, the contents of the packed refs must be sorted. /// If that's not the case, they will be sorted on the fly with the data being written into a memory buffer. pub fn open(path: PathBuf, use_memory_map_if_larger_than_bytes: u64) -> Result { let backing = if std::fs::metadata(&path)?.len() <= use_memory_map_if_larger_than_bytes { packed::Backing::InMemory(std::fs::read(&path)?) } else { packed::Backing::Mapped( // SAFETY: we have to take the risk of somebody changing the file underneath. Git never writes into the same file. #[allow(unsafe_code)] unsafe { memmap2::MmapOptions::new().map_copy_read_only(&std::fs::File::open(&path)?)? }, ) }; Self::open_with_backing(backing, path) } /// Open a buffer from `bytes`, which is the content of a typical `packed-refs` file. /// /// In order to allow fast lookups and optimizations, the contents of the packed refs must be sorted. /// If that's not the case, they will be sorted on the fly. pub fn from_bytes(bytes: &[u8]) -> Result { let backing = packed::Backing::InMemory(bytes.into()); Self::open_with_backing(backing, PathBuf::from("")) } } mod error { use crate::packed; /// The error returned by [`open()`][super::packed::Buffer::open()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The packed-refs file did not have a header or wasn't sorted and could not be iterated")] Iter(#[from] packed::iter::Error), #[error("The header could not be parsed, even though first line started with '#'")] HeaderParsing, #[error("The buffer could not be opened or read")] Io(#[from] std::io::Error), } } pub use error::Error; use crate::packed::Backing; } gix-ref-0.43.0/src/store/packed/decode/tests.rs000064400000000000000000000101701046102023000173550ustar 00000000000000type Result = std::result::Result<(), Box>; mod reference { use winnow::{error::TreeError, prelude::*}; use super::Result; use crate::{ store_impl::{packed, packed::decode}, FullNameRef, }; /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_. fn hex_to_id(hex: &str) -> gix_hash::ObjectId { gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex") } #[test] fn invalid() { assert!(decode::reference::<()> .parse_peek(b"# what looks like a comment",) .is_err()); assert!( decode::reference::<()> .parse_peek(b"^e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37\n",) .is_err(), "lonely peel" ); } #[test] fn two_refs_in_a_row() -> Result { let input: &[u8] = b"d53c4b0f91f1b29769c9430f2d1c0bcab1170c75 refs/heads/alternates-after-packs-and-loose ^e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37\neaae9c1bc723209d793eb93f5587fa2604d5cd92 refs/heads/avoid-double-lookup\n"; let (input, parsed) = decode::reference::>.parse_peek(input).unwrap(); assert_eq!( parsed, packed::Reference { name: FullNameRef::new_unchecked("refs/heads/alternates-after-packs-and-loose".into()), target: "d53c4b0f91f1b29769c9430f2d1c0bcab1170c75".into(), object: Some("e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37".into()) } ); assert_eq!(parsed.target(), hex_to_id("d53c4b0f91f1b29769c9430f2d1c0bcab1170c75")); assert_eq!(parsed.object(), hex_to_id("e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37")); let (input, parsed) = decode::reference::>.parse_peek(input).unwrap(); assert!(input.is_empty(), "exhausted"); assert_eq!( parsed.name, FullNameRef::new_unchecked("refs/heads/avoid-double-lookup".into()) ); assert_eq!(parsed.target, "eaae9c1bc723209d793eb93f5587fa2604d5cd92"); assert!(parsed.object.is_none()); Ok(()) } } mod header { use gix_object::bstr::ByteSlice; use gix_testtools::to_bstr_err; use winnow::prelude::*; use super::Result; use crate::store_impl::packed::{ decode, decode::{Header, Peeled}, }; #[test] fn invalid() { assert!( decode::header::<()>.parse_peek(b"# some user comment").is_err(), "something the user put there" ); assert!( decode::header::<()>.parse_peek(b"# pack-refs: ").is_err(), "looks right but isn't" ); assert!( decode::header::<()>.parse_peek(b" # pack-refs with: ").is_err(), "does not start with #" ); } #[test] fn valid_fully_peeled_stored() -> Result { let input: &[u8] = b"# pack-refs with: peeled fully-peeled sorted \nsomething else"; let (rest, header) = decode::header::> .parse_peek(input) .map_err(to_bstr_err)?; assert_eq!(rest.as_bstr(), "something else", "remainder starts after newline"); assert_eq!( header, Header { peeled: Peeled::Fully, sorted: true } ); Ok(()) } #[test] fn valid_peeled_unsorted() -> Result { let input: &[u8] = b"# pack-refs with: peeled\n"; let (rest, header) = decode::header::<()>.parse_peek(input).unwrap(); assert!(rest.is_empty()); assert_eq!( header, Header { peeled: Peeled::Partial, sorted: false } ); Ok(()) } #[test] fn valid_empty() -> Result { let input: &[u8] = b"# pack-refs with: \n"; let (rest, header) = decode::header::<()>.parse_peek(input).unwrap(); assert!(rest.is_empty()); assert_eq!( header, Header { peeled: Peeled::Unspecified, sorted: false } ); Ok(()) } } gix-ref-0.43.0/src/store/packed/decode.rs000064400000000000000000000040261046102023000162160ustar 00000000000000use gix_object::bstr::{BStr, ByteSlice}; use winnow::{ combinator::{delimited, opt, preceded, terminated}, error::{FromExternalError, ParserError}, prelude::*, token::take_while, }; use crate::{ parse::{hex_hash, newline}, store_impl::packed, }; #[derive(Debug, PartialEq, Eq)] enum Peeled { Unspecified, Partial, Fully, } /// Information parsed from the header of a packed ref file #[derive(Debug, PartialEq, Eq)] pub struct Header { peeled: Peeled, pub sorted: bool, } impl Default for Header { fn default() -> Self { Header { peeled: Peeled::Unspecified, sorted: false, } } } fn until_newline<'a, E>(input: &mut &'a [u8]) -> PResult<&'a BStr, E> where E: ParserError<&'a [u8]>, { terminated(take_while(0.., |b: u8| b != b'\r' && b != b'\n'), newline) .map(ByteSlice::as_bstr) .parse_next(input) } pub fn header<'a, E>(input: &mut &'a [u8]) -> PResult where E: ParserError<&'a [u8]>, { preceded(b"# pack-refs with: ", until_newline) .map(|traits| { let mut peeled = Peeled::Unspecified; let mut sorted = false; for token in traits.as_bstr().split_str(b" ") { if token == b"fully-peeled" { peeled = Peeled::Fully; } else if token == b"peeled" { peeled = Peeled::Partial; } else if token == b"sorted" { sorted = true; } } Header { peeled, sorted } }) .parse_next(input) } pub fn reference<'a, E: ParserError<&'a [u8]> + FromExternalError<&'a [u8], crate::name::Error>>( input: &mut &'a [u8], ) -> PResult, E> { ( terminated(hex_hash, b" "), until_newline.try_map(TryInto::try_into), opt(delimited(b"^", hex_hash, newline)), ) .map(|(target, name, object)| packed::Reference { name, target, object }) .parse_next(input) } #[cfg(test)] mod tests; gix-ref-0.43.0/src/store/packed/find.rs000064400000000000000000000133131046102023000157120ustar 00000000000000use gix_object::bstr::{BStr, BString, ByteSlice}; use winnow::prelude::*; use crate::{store_impl::packed, FullNameRef, PartialNameRef}; /// packed-refs specific functionality impl packed::Buffer { /// Find a reference with the given `name` and return it. /// /// Note that it will look it up verbatim and does not deal with namespaces or special prefixes like /// `main-worktree/` or `worktrees//`, as this is left to the caller. pub fn try_find<'a, Name, E>(&self, name: Name) -> Result>, Error> where Name: TryInto<&'a PartialNameRef, Error = E>, Error: From, { let name = name.try_into()?; let mut buf = BString::default(); for inbetween in &["", "tags", "heads", "remotes"] { let (name, was_absolute) = if name.looks_like_full_name() { let name = FullNameRef::new_unchecked(name.as_bstr()); let name = match transform_full_name_for_lookup(name) { None => return Ok(None), Some(name) => name, }; (name, true) } else { let full_name = name.construct_full_name_ref(inbetween, &mut buf); (full_name, false) }; match self.try_find_full_name(name)? { Some(r) => return Ok(Some(r)), None if was_absolute => return Ok(None), None => continue, } } Ok(None) } pub(crate) fn try_find_full_name(&self, name: &FullNameRef) -> Result>, Error> { match self.binary_search_by(name.as_bstr()) { Ok(line_start) => { let mut input = &self.as_ref()[line_start..]; Ok(Some( packed::decode::reference::<()> .parse_next(&mut input) .map_err(|_| Error::Parse)?, )) } Err((parse_failure, _)) => { if parse_failure { Err(Error::Parse) } else { Ok(None) } } } } /// Find a reference with the given `name` and return it. pub fn find<'a, Name, E>(&self, name: Name) -> Result, existing::Error> where Name: TryInto<&'a PartialNameRef, Error = E>, Error: From, { match self.try_find(name) { Ok(Some(r)) => Ok(r), Ok(None) => Err(existing::Error::NotFound), Err(err) => Err(existing::Error::Find(err)), } } /// Perform a binary search where `Ok(pos)` is the beginning of the line that matches `name` perfectly and `Err(pos)` /// is the beginning of the line at which `name` could be inserted to still be in sort order. pub(in crate::store_impl::packed) fn binary_search_by(&self, full_name: &BStr) -> Result { let a = self.as_ref(); let search_start_of_record = |ofs: usize| { a[..ofs] .rfind(b"\n") .and_then(|pos| { let candidate = pos + 1; a.get(candidate).and_then(|b| { if *b == b'^' { a[..pos].rfind(b"\n").map(|pos| pos + 1) } else { Some(candidate) } }) }) .unwrap_or(0) }; let mut encountered_parse_failure = false; a.binary_search_by_key(&full_name.as_ref(), |b: &u8| { let ofs = b as *const u8 as usize - a.as_ptr() as usize; let mut line = &a[search_start_of_record(ofs)..]; packed::decode::reference::<()> .parse_next(&mut line) .map(|r| r.name.as_bstr().as_bytes()) .map_err(|err| { encountered_parse_failure = true; err }) .unwrap_or(&[]) }) .map(search_start_of_record) .map_err(|pos| (encountered_parse_failure, search_start_of_record(pos))) } } mod error { use std::convert::Infallible; /// The error returned by [`find()`][super::packed::Buffer::find()] #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The ref name or path is not a valid ref name")] RefnameValidation(#[from] crate::name::Error), #[error("The reference could not be parsed")] Parse, } impl From for Error { fn from(_: Infallible) -> Self { unreachable!("this impl is needed to allow passing a known valid partial path as parameter") } } } pub use error::Error; /// #[allow(clippy::empty_docs)] pub mod existing { /// The error returned by [`find_existing()`][super::packed::Buffer::find()] #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The find operation failed")] Find(#[from] super::Error), #[error("The reference did not exist even though that was expected")] NotFound, } } pub(crate) fn transform_full_name_for_lookup(name: &FullNameRef) -> Option<&FullNameRef> { match name.category_and_short_name() { Some((c, sn)) => { use crate::Category::*; Some(match c { MainRef | LinkedRef { .. } => FullNameRef::new_unchecked(sn), Tag | RemoteBranch | LocalBranch | Bisect | Rewritten | Note => name, MainPseudoRef | PseudoRef | LinkedPseudoRef { .. } | WorktreePrivate => return None, }) } None => Some(name), } } gix-ref-0.43.0/src/store/packed/iter.rs000064400000000000000000000103671046102023000157430ustar 00000000000000use gix_object::bstr::{BString, ByteSlice}; use winnow::{ combinator::{preceded, rest}, prelude::*, stream::Stream as _, }; use crate::store_impl::{packed, packed::decode}; /// packed-refs specific functionality impl packed::Buffer { /// Return an iterator of references stored in this packed refs buffer, ordered by reference name. /// /// # Note /// /// There is no namespace support in packed iterators. It can be emulated using `iter_prefixed(…)`. pub fn iter(&self) -> Result, packed::iter::Error> { packed::Iter::new(self.as_ref()) } /// Return an iterator yielding only references matching the given prefix, ordered by reference name. pub fn iter_prefixed(&self, prefix: BString) -> Result, packed::iter::Error> { let first_record_with_prefix = self.binary_search_by(prefix.as_bstr()).unwrap_or_else(|(_, pos)| pos); packed::Iter::new_with_prefix(&self.as_ref()[first_record_with_prefix..], Some(prefix)) } } impl<'a> Iterator for packed::Iter<'a> { type Item = Result, Error>; fn next(&mut self) -> Option { if self.cursor.is_empty() { return None; } let start = self.cursor.checkpoint(); match decode::reference::<()>.parse_next(&mut self.cursor) { Ok(reference) => { self.current_line += 1; if let Some(ref prefix) = self.prefix { if !reference.name.as_bstr().starts_with_str(prefix) { self.cursor = &[]; return None; } } Some(Ok(reference)) } Err(_) => { self.cursor.reset(&start); let (failed_line, next_cursor) = self .cursor .find_byte(b'\n') .map_or((self.cursor, &[][..]), |pos| self.cursor.split_at(pos + 1)); self.cursor = next_cursor; let line_number = self.current_line; self.current_line += 1; Some(Err(Error::Reference { invalid_line: failed_line .get(..failed_line.len().saturating_sub(1)) .unwrap_or(failed_line) .into(), line_number, })) } } } } impl<'a> packed::Iter<'a> { /// Return a new iterator after successfully parsing the possibly existing first line of the given `packed` refs buffer. pub fn new(packed: &'a [u8]) -> Result { Self::new_with_prefix(packed, None) } /// Returns an iterators whose references will only match the given prefix. /// /// It assumes that the underlying `packed` buffer is indeed sorted pub(in crate::store_impl::packed) fn new_with_prefix( packed: &'a [u8], prefix: Option, ) -> Result { if packed.is_empty() { Ok(packed::Iter { cursor: packed, prefix, current_line: 1, }) } else if packed[0] == b'#' { let mut input = packed; let refs = preceded(decode::header::<()>, rest) .parse_next(&mut input) .map_err(|_| Error::Header { invalid_first_line: packed.lines().next().unwrap_or(packed).into(), })?; Ok(packed::Iter { cursor: refs, prefix, current_line: 2, }) } else { Ok(packed::Iter { cursor: packed, prefix, current_line: 1, }) } } } mod error { use gix_object::bstr::BString; /// The error returned by [`Iter`][super::packed::Iter], #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The header existed but could not be parsed: {invalid_first_line:?}")] Header { invalid_first_line: BString }, #[error("Invalid reference in line {line_number}: {invalid_line:?}")] Reference { invalid_line: BString, line_number: usize }, } } pub use error::Error; gix-ref-0.43.0/src/store/packed/mod.rs000064400000000000000000000057061046102023000155600ustar 00000000000000use std::path::PathBuf; use gix_hash::ObjectId; use gix_object::bstr::{BStr, BString}; use memmap2::Mmap; use crate::{file, transaction::RefEdit, FullNameRef, Namespace}; #[derive(Debug)] enum Backing { /// The buffer is loaded entirely in memory, along with the `offset` to the first record past the header. InMemory(Vec), /// The buffer is mapping the file on disk, along with the offset to the first record past the header Mapped(Mmap), } /// A buffer containing a packed-ref file that is either memory mapped or fully in-memory depending on a cutoff. /// /// The buffer is guaranteed to be sorted as per the packed-ref rules which allows some operations to be more efficient. #[derive(Debug)] pub struct Buffer { data: Backing, /// The offset to the first record, how many bytes to skip past the header offset: usize, /// The path from which we were loaded path: PathBuf, } struct Edit { inner: RefEdit, peeled: Option, } /// A transaction for editing packed references pub(crate) struct Transaction { buffer: Option, edits: Option>, lock: Option, #[allow(dead_code)] // It just has to be kept alive, hence no reads closed_lock: Option, precompose_unicode: bool, /// The namespace to use when preparing or writing refs namespace: Option, } /// A reference as parsed from the `packed-refs` file #[derive(Debug, PartialEq, Eq)] pub struct Reference<'a> { /// The validated full name of the reference. pub name: &'a FullNameRef, /// The target object id of the reference, hex encoded. pub target: &'a BStr, /// The fully peeled object id, hex encoded, that the ref is ultimately pointing to /// i.e. when all indirections are removed. pub object: Option<&'a BStr>, } impl<'a> Reference<'a> { /// Decode the target as object pub fn target(&self) -> ObjectId { gix_hash::ObjectId::from_hex(self.target).expect("parser validation") } /// Decode the object this reference is ultimately pointing to. Note that this is /// the [`target()`][Reference::target()] if this is not a fully peeled reference like a tag. pub fn object(&self) -> ObjectId { self.object.map_or_else( || self.target(), |id| ObjectId::from_hex(id).expect("parser validation"), ) } } /// An iterator over references in a packed refs file pub struct Iter<'a> { /// The position at which to parse the next reference cursor: &'a [u8], /// The next line, starting at 1 current_line: usize, /// If set, references returned will match the prefix, the first failed match will stop all iteration. prefix: Option, } mod decode; /// #[allow(clippy::empty_docs)] pub mod iter; /// #[allow(clippy::empty_docs)] pub mod buffer; /// #[allow(clippy::empty_docs)] pub mod find; /// #[allow(clippy::empty_docs)] pub mod transaction; gix-ref-0.43.0/src/store/packed/transaction.rs000064400000000000000000000257601046102023000173300ustar 00000000000000use std::borrow::Cow; use std::{fmt::Formatter, io::Write}; use crate::{ file, store_impl::{packed, packed::Edit}, transaction::{Change, RefEdit}, Namespace, Target, }; pub(crate) const HEADER_LINE: &[u8] = b"# pack-refs with: peeled fully-peeled sorted \n"; /// Access and instantiation impl packed::Transaction { pub(crate) fn new_from_pack_and_lock( buffer: Option, lock: gix_lock::File, precompose_unicode: bool, namespace: Option, ) -> Self { packed::Transaction { buffer, edits: None, lock: Some(lock), closed_lock: None, precompose_unicode, namespace, } } } impl std::fmt::Debug for packed::Transaction { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("packed::Transaction") .field("edits", &self.edits.as_ref().map(Vec::len)) .field("lock", &self.lock) .finish_non_exhaustive() } } /// Access impl packed::Transaction { /// Returns our packed buffer pub fn buffer(&self) -> Option<&packed::Buffer> { self.buffer.as_ref().map(|b| &***b) } } /// Lifecycle impl packed::Transaction { /// Prepare the transaction by checking all edits for applicability. /// Use `objects` to access objects for the purpose of peeling them - this is only used if packed-refs are involved. pub fn prepare( mut self, edits: &mut dyn Iterator, objects: &dyn gix_object::Find, ) -> Result { assert!(self.edits.is_none(), "BUG: cannot call prepare(…) more than once"); let buffer = &self.buffer; // Remove all edits which are deletions that aren't here in the first place let mut edits: Vec = edits .into_iter() .map(|mut edit| { use gix_object::bstr::ByteSlice; if self.precompose_unicode { let precomposed = edit .name .0 .to_str() .ok() .map(|name| gix_utils::str::precompose(name.into())); match precomposed { None | Some(Cow::Borrowed(_)) => edit, Some(Cow::Owned(precomposed)) => { edit.name.0 = precomposed.into(); edit } } } else { edit } }) .map(|mut edit| { if let Some(namespace) = &self.namespace { edit.name = namespace.clone().into_namespaced_name(edit.name.as_ref()); } edit }) .filter(|edit| { if let Change::Delete { .. } = edit.change { buffer.as_ref().map_or(true, |b| b.find(edit.name.as_ref()).is_ok()) } else { true } }) .map(|change| Edit { inner: change, peeled: None, }) .collect(); let mut buf = Vec::new(); for edit in &mut edits { if let Change::Update { new: Target::Peeled(new), .. } = edit.inner.change { let mut next_id = new; edit.peeled = loop { let kind = objects.try_find(&next_id, &mut buf)?.map(|d| d.kind); match kind { Some(gix_object::Kind::Tag) => { next_id = gix_object::TagRefIter::from_bytes(&buf).target_id().map_err(|_| { prepare::Error::Resolve( format!("Couldn't get target object id from tag {next_id}").into(), ) })?; } Some(_) => { break if next_id == new { None } else { Some(next_id) }; } None => { return Err(prepare::Error::Resolve( format!("Couldn't find object with id {next_id}").into(), )) } } }; } } if edits.is_empty() { self.closed_lock = self .lock .take() .map(gix_lock::File::close) .transpose() .map_err(prepare::Error::CloseLock)?; } else { // NOTE that we don't do any additional checks here but apply all edits unconditionally. // This is because this transaction system is internal and will be used correctly from the // loose ref store transactions, which do the necessary checking. } self.edits = Some(edits); Ok(self) } /// Commit the prepared transaction. /// /// Please note that actual edits invalidated existing packed buffers. /// Note: There is the potential to write changes into memory and return such a packed-refs buffer for reuse. pub fn commit(self) -> Result<(), commit::Error> { let mut edits = self.edits.expect("BUG: cannot call commit() before prepare(…)"); if edits.is_empty() { return Ok(()); } let mut file = self.lock.expect("a write lock for applying changes"); let refs_sorted: Box, packed::iter::Error>>> = match self.buffer.as_ref() { Some(buffer) => Box::new(buffer.iter()?), None => Box::new(std::iter::empty()), }; let mut refs_sorted = refs_sorted.peekable(); edits.sort_by(|l, r| l.inner.name.as_bstr().cmp(r.inner.name.as_bstr())); let mut peekable_sorted_edits = edits.iter().peekable(); file.with_mut(|f| f.write_all(HEADER_LINE))?; let mut num_written_lines = 0; loop { match (refs_sorted.peek(), peekable_sorted_edits.peek()) { (Some(Err(_)), _) => { let err = refs_sorted.next().expect("next").expect_err("err"); return Err(commit::Error::Iteration(err)); } (None, None) => { break; } (Some(Ok(_)), None) => { let pref = refs_sorted.next().expect("next").expect("no err"); num_written_lines += 1; file.with_mut(|out| write_packed_ref(out, pref))?; } (Some(Ok(pref)), Some(edit)) => { use std::cmp::Ordering::*; match pref.name.as_bstr().cmp(edit.inner.name.as_bstr()) { Less => { let pref = refs_sorted.next().expect("next").expect("valid"); num_written_lines += 1; file.with_mut(|out| write_packed_ref(out, pref))?; } Greater => { let edit = peekable_sorted_edits.next().expect("next"); file.with_mut(|out| write_edit(out, edit, &mut num_written_lines))?; } Equal => { let _pref = refs_sorted.next().expect("next").expect("valid"); let edit = peekable_sorted_edits.next().expect("next"); file.with_mut(|out| write_edit(out, edit, &mut num_written_lines))?; } } } (None, Some(_)) => { let edit = peekable_sorted_edits.next().expect("next"); file.with_mut(|out| write_edit(out, edit, &mut num_written_lines))?; } } } if num_written_lines == 0 { std::fs::remove_file(file.resource_path())?; } else { file.commit()?; } drop(refs_sorted); Ok(()) } } fn write_packed_ref(out: &mut dyn std::io::Write, pref: packed::Reference<'_>) -> std::io::Result<()> { write!(out, "{} ", pref.target)?; out.write_all(pref.name.as_bstr())?; out.write_all(b"\n")?; if let Some(object) = pref.object { writeln!(out, "^{object}")?; } Ok(()) } fn write_edit(out: &mut dyn std::io::Write, edit: &Edit, lines_written: &mut i32) -> std::io::Result<()> { match edit.inner.change { Change::Delete { .. } => {} Change::Update { new: Target::Peeled(target_oid), .. } => { write!(out, "{target_oid} ")?; out.write_all(edit.inner.name.as_bstr())?; out.write_all(b"\n")?; if let Some(object) = edit.peeled { writeln!(out, "^{object}")?; } *lines_written += 1; } Change::Update { new: Target::Symbolic(_), .. } => unreachable!("BUG: packed refs cannot contain symbolic refs, catch that in prepare(…)"), } Ok(()) } /// Convert this buffer to be used as the basis for a transaction. pub(crate) fn buffer_into_transaction( buffer: file::packed::SharedBufferSnapshot, lock_mode: gix_lock::acquire::Fail, precompose_unicode: bool, namespace: Option, ) -> Result { let lock = gix_lock::File::acquire_to_update_resource(&buffer.path, lock_mode, None)?; Ok(packed::Transaction { buffer: Some(buffer), lock: Some(lock), closed_lock: None, edits: None, precompose_unicode, namespace, }) } /// #[allow(clippy::empty_docs)] pub mod prepare { /// The error used in [`Transaction::prepare(…)`][crate::file::Transaction::prepare()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not close a lock which won't ever be committed")] CloseLock(#[from] std::io::Error), #[error("The lookup of an object failed while peeling it")] Resolve(#[from] Box), } } /// #[allow(clippy::empty_docs)] pub mod commit { use crate::store_impl::packed; /// The error used in [`Transaction::commit(…)`][crate::file::Transaction::commit()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Changes to the resource could not be committed")] Commit(#[from] gix_lock::commit::Error), #[error("Some references in the packed refs buffer could not be parsed")] Iteration(#[from] packed::iter::Error), #[error("Failed to write a ref line to the packed ref file")] Io(#[from] std::io::Error), } } gix-ref-0.43.0/src/target.rs000064400000000000000000000112711046102023000136760ustar 00000000000000use std::fmt; use gix_hash::{oid, ObjectId}; use crate::{FullName, FullNameRef, Kind, Target, TargetRef}; impl<'a> TargetRef<'a> { /// Returns the kind of the target the ref is pointing to. pub fn kind(&self) -> Kind { match self { TargetRef::Symbolic(_) => Kind::Symbolic, TargetRef::Peeled(_) => Kind::Peeled, } } /// Interpret this target as object id which maybe `None` if it is symbolic. pub fn try_id(&self) -> Option<&oid> { match self { TargetRef::Symbolic(_) => None, TargetRef::Peeled(oid) => Some(oid), } } /// Interpret this target as object id or **panic** if it is symbolic. pub fn id(&self) -> &oid { match self { TargetRef::Symbolic(_) => panic!("BUG: tries to obtain object id from symbolic target"), TargetRef::Peeled(oid) => oid, } } /// Interpret this target as name of the reference it points to which maybe `None` if it an object id. pub fn try_name(&self) -> Option<&FullNameRef> { match self { TargetRef::Symbolic(name) => Some(name), TargetRef::Peeled(_) => None, } } /// Convert this instance into an owned version, without consuming it. pub fn into_owned(self) -> Target { self.into() } } impl Target { /// Returns the kind of the target the ref is pointing to. pub fn kind(&self) -> Kind { match self { Target::Symbolic(_) => Kind::Symbolic, Target::Peeled(_) => Kind::Peeled, } } /// Return true if this is a peeled target with a null hash pub fn is_null(&self) -> bool { match self { Target::Peeled(oid) => oid.is_null(), Target::Symbolic(_) => false, } } /// Interpret this owned Target as shared Target pub fn to_ref(&self) -> TargetRef<'_> { match self { Target::Peeled(oid) => TargetRef::Peeled(oid), Target::Symbolic(name) => TargetRef::Symbolic(name.as_ref()), } } /// Interpret this target as object id which maybe `None` if it is symbolic. pub fn try_id(&self) -> Option<&oid> { match self { Target::Symbolic(_) => None, Target::Peeled(oid) => Some(oid), } } /// Interpret this target as object id or panic if it is symbolic. pub fn id(&self) -> &oid { match self { Target::Symbolic(_) => panic!("BUG: tries to obtain object id from symbolic target"), Target::Peeled(oid) => oid, } } /// Return the contained object id or panic pub fn into_id(self) -> ObjectId { match self { Target::Symbolic(_) => panic!("BUG: expected peeled reference target but found symbolic one"), Target::Peeled(oid) => oid, } } /// Return the contained object id if the target is peeled or itself if it is not. pub fn try_into_id(self) -> Result { match self { Target::Symbolic(_) => Err(self), Target::Peeled(oid) => Ok(oid), } } /// Interpret this target as name of the reference it points to which maybe `None` if it an object id. pub fn try_name(&self) -> Option<&FullNameRef> { match self { Target::Symbolic(name) => Some(name.as_ref()), Target::Peeled(_) => None, } } } impl<'a> From> for Target { fn from(src: TargetRef<'a>) -> Self { match src { TargetRef::Peeled(oid) => Target::Peeled(oid.to_owned()), TargetRef::Symbolic(name) => Target::Symbolic(name.to_owned()), } } } impl<'a> PartialEq> for Target { fn eq(&self, other: &TargetRef<'a>) -> bool { match (self, other) { (Target::Peeled(lhs), TargetRef::Peeled(rhs)) => lhs == rhs, (Target::Symbolic(lhs), TargetRef::Symbolic(rhs)) => lhs.as_bstr() == rhs.as_bstr(), _ => false, } } } impl From for Target { fn from(id: ObjectId) -> Self { Target::Peeled(id) } } impl TryFrom for ObjectId { type Error = Target; fn try_from(value: Target) -> Result { match value { Target::Peeled(id) => Ok(id), Target::Symbolic(_) => Err(value), } } } impl From for Target { fn from(name: FullName) -> Self { Target::Symbolic(name) } } impl fmt::Display for Target { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Target::Peeled(oid) => oid.fmt(f), Target::Symbolic(name) => write!(f, "ref: {}", name.as_bstr()), } } } gix-ref-0.43.0/src/transaction/ext.rs000064400000000000000000000125751046102023000155450ustar 00000000000000use gix_object::bstr::BString; use crate::{ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog, Target}, PartialNameRef, }; /// An extension trait to perform commonly used operations on edits across different ref stores. pub trait RefEditsExt where T: std::borrow::Borrow + std::borrow::BorrowMut, { /// Return true if each ref `name` has exactly one `edit` across multiple ref edits fn assure_one_name_has_one_edit(&self) -> Result<(), BString>; /// Split all symbolic refs into updates for the symbolic ref as well as all their referents if the `deref` flag is enabled. /// /// Note no action is performed if deref isn't specified. fn extend_with_splits_of_symbolic_refs( &mut self, find: &mut dyn FnMut(&PartialNameRef) -> Option, make_entry: &mut dyn FnMut(usize, RefEdit) -> T, ) -> Result<(), std::io::Error>; /// All processing steps in one and in the correct order. /// /// Users call this to assure derefs are honored and duplicate checks are done. fn pre_process( &mut self, find: &mut dyn FnMut(&PartialNameRef) -> Option, make_entry: &mut dyn FnMut(usize, RefEdit) -> T, ) -> Result<(), std::io::Error> { self.extend_with_splits_of_symbolic_refs(find, make_entry)?; self.assure_one_name_has_one_edit().map_err(|name| { std::io::Error::new( std::io::ErrorKind::AlreadyExists, format!("A reference named '{name}' has multiple edits"), ) }) } } impl RefEditsExt for Vec where E: std::borrow::Borrow + std::borrow::BorrowMut, { fn assure_one_name_has_one_edit(&self) -> Result<(), BString> { let mut names: Vec<_> = self.iter().map(|e| &e.borrow().name).collect(); names.sort(); match names.windows(2).find(|v| v[0] == v[1]) { Some(name) => Err(name[0].as_bstr().to_owned()), None => Ok(()), } } fn extend_with_splits_of_symbolic_refs( &mut self, find: &mut dyn FnMut(&PartialNameRef) -> Option, make_entry: &mut dyn FnMut(usize, RefEdit) -> E, ) -> Result<(), std::io::Error> { let mut new_edits = Vec::new(); let mut first = 0; let mut round = 1; loop { for (eid, edit) in self[first..].iter_mut().enumerate().map(|(eid, v)| (eid + first, v)) { let edit = edit.borrow_mut(); if !edit.deref { continue; }; // we can't tell what happened and we are here because it's a non-existing ref or an invalid one. // In any case, we don't want the following algorithms to try dereffing it and assume they deal with // broken refs gracefully. edit.deref = false; if let Some(Target::Symbolic(referent)) = find(edit.name.as_ref().as_partial_name()) { new_edits.push(make_entry( eid, match &mut edit.change { Change::Delete { expected: previous, log: mode, } => { let current_mode = *mode; *mode = RefLog::Only; RefEdit { change: Change::Delete { expected: previous.clone(), log: current_mode, }, name: referent, deref: true, } } Change::Update { log, expected, new } => { let current = std::mem::replace( log, LogChange { message: log.message.clone(), mode: RefLog::Only, force_create_reflog: log.force_create_reflog, }, ); let next = std::mem::replace(expected, PreviousValue::Any); RefEdit { change: Change::Update { expected: next, new: new.clone(), log: current, }, name: referent, deref: true, } } }, )); } } if new_edits.is_empty() { break Ok(()); } if round == 5 { break Err(std::io::Error::new( std::io::ErrorKind::WouldBlock, format!("Could not follow all splits after {round} rounds, assuming reference cycle"), )); } round += 1; first = self.len(); self.append(&mut new_edits); } } } gix-ref-0.43.0/src/transaction/mod.rs000064400000000000000000000121771046102023000155220ustar 00000000000000//! **Transactions** are the only way make changes to the ref store in order to increase the chance of consistency in a multi-threaded //! environment. //! //! Transactions currently allow to… //! //! * create or update reference //! * delete references //! //! The following guarantees are made: //! //! * transactions are prepared which is when other writers are prevented from changing them //! - errors during preparations will cause a perfect rollback //! * prepared transactions are committed to finalize the change //! - errors when committing while leave the ref store in an inconsistent, but operational state. use gix_object::bstr::BString; use crate::{FullName, Target}; /// A change to the reflog. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub struct LogChange { /// How to treat the reference log. pub mode: RefLog, /// If set, create a reflog even though it would otherwise not be the case as prohibited by general rules. /// Note that ref-log writing might be prohibited in the entire repository which is when this flag has no effect either. pub force_create_reflog: bool, /// The message to put into the reference log. It must be a single line, hence newlines are forbidden. /// The string can be empty to indicate there should be no message at all. pub message: BString, } impl Default for LogChange { fn default() -> Self { LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: Default::default(), } } } /// The desired value of an updated value #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub enum PreviousValue { /// No requirements are made towards the current value, and the new value is set unconditionally. Any, /// The reference must exist and may have any value. MustExist, /// Create the ref only, hence the reference must not exist. MustNotExist, /// The ref _must_ exist and have the given value. MustExistAndMatch(Target), /// The ref _may_ exist and have the given value, or may not exist at all. ExistingMustMatch(Target), } /// A description of an edit to perform. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub enum Change { /// If previous is not `None`, the ref must exist and its `oid` must agree with the `previous`, and /// we function like `update`. /// Otherwise it functions as `create-or-update`. Update { /// The desired change to the reference log. log: LogChange, /// The expected value already present in the reference. /// If a ref was existing previously this field will be overwritten with `MustExistAndMatch(actual_value)` for use after /// the transaction was committed successfully. expected: PreviousValue, /// The new state of the reference, either for updating an existing one or creating a new one. new: Target, }, /// Delete a reference and optionally check if `previous` is its content. Delete { /// The expected value of the reference, with the `MustNotExist` variant being invalid. /// /// If a previous ref existed, this value will be filled in automatically as `MustExistAndMatch(actual_value)` and /// can be accessed if the transaction was committed successfully. expected: PreviousValue, /// How to treat the reference log during deletion. log: RefLog, }, } impl Change { /// Return references to values that are the new value after the change is applied, if this is an update. pub fn new_value(&self) -> Option> { match self { Change::Update { new, .. } => new.to_ref().into(), Change::Delete { .. } => None, } } /// Return references to values that are in common between all variants and denote the previous observed value. pub fn previous_value(&self) -> Option> { match self { Change::Update { expected: PreviousValue::MustExistAndMatch(previous) | PreviousValue::ExistingMustMatch(previous), .. } | Change::Delete { expected: PreviousValue::MustExistAndMatch(previous) | PreviousValue::ExistingMustMatch(previous), .. } => previous, _ => return None, } .to_ref() .into() } } /// A reference that is to be changed #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] pub struct RefEdit { /// The change itself pub change: Change, /// The name of the reference to apply the change to pub name: FullName, /// If set, symbolic references identified by `name` will be dereferenced to have the `change` applied to their target. /// This flag has no effect if the reference isn't symbolic. pub deref: bool, } /// The way to deal with the Reflog in deletions. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)] pub enum RefLog { /// Delete or update the reference and the log AndReference, /// Delete or update only the reflog Only, } mod ext; pub use ext::RefEditsExt;