gix-worktree-0.38.0/.cargo_vcs_info.json0000644000000001520000000000100135510ustar { "git": { "sha1": "4000197ecc8cf1a5d79361620e4c114f86476703" }, "path_in_vcs": "gix-worktree" }gix-worktree-0.38.0/Cargo.toml0000644000000101200000000000100115430ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.65" name = "gix-worktree" version = "0.38.0" authors = ["Sebastian Thiel "] build = false include = [ "src/**/*", "LICENSE-*", ] autobins = false autoexamples = false autotests = false autobenches = false description = "A crate of the gitoxide project for shared worktree related types and utilities." readme = false license = "MIT OR Apache-2.0" repository = "https://github.com/GitoxideLabs/gitoxide" [package.metadata.docs.rs] features = [ "document-features", "serde", ] [lib] name = "gix_worktree" path = "src/lib.rs" doctest = false [dependencies.bstr] version = "1.3.0" default-features = false [dependencies.document-features] version = "0.2.0" optional = true [dependencies.gix-attributes] version = "^0.23.1" optional = true [dependencies.gix-features] version = "^0.39.1" [dependencies.gix-fs] version = "^0.12.0" [dependencies.gix-glob] version = "^0.17.1" [dependencies.gix-hash] version = "^0.15.1" [dependencies.gix-ignore] version = "^0.12.1" [dependencies.gix-index] version = "^0.37.0" [dependencies.gix-object] version = "^0.46.0" [dependencies.gix-path] version = "^0.10.13" [dependencies.gix-validate] version = "^0.9.2" optional = true [dependencies.serde] version = "1.0.114" features = ["derive"] optional = true default-features = false [features] attributes = [ "dep:gix-attributes", "dep:gix-validate", ] default = ["attributes"] serde = [ "dep:serde", "bstr/serde", "gix-index/serde", "gix-hash/serde", "gix-object/serde", "gix-attributes?/serde", "gix-ignore/serde", ] [lints.clippy] bool_to_int_with_if = "allow" borrow_as_ptr = "allow" cast_lossless = "allow" cast_possible_truncation = "allow" cast_possible_wrap = "allow" cast_precision_loss = "allow" cast_sign_loss = "allow" checked_conversions = "allow" copy_iterator = "allow" default_trait_access = "allow" doc_markdown = "allow" empty_docs = "allow" enum_glob_use = "allow" explicit_deref_methods = "allow" explicit_into_iter_loop = "allow" explicit_iter_loop = "allow" filter_map_next = "allow" fn_params_excessive_bools = "allow" from_iter_instead_of_collect = "allow" if_not_else = "allow" ignored_unit_patterns = "allow" implicit_clone = "allow" inconsistent_struct_constructor = "allow" inefficient_to_string = "allow" inline_always = "allow" items_after_statements = "allow" iter_not_returning_iterator = "allow" iter_without_into_iter = "allow" manual_assert = "allow" manual_is_variant_and = "allow" manual_let_else = "allow" manual_string_new = "allow" many_single_char_names = "allow" match_bool = "allow" match_same_arms = "allow" match_wild_err_arm = "allow" match_wildcard_for_single_variants = "allow" missing_errors_doc = "allow" missing_panics_doc = "allow" module_name_repetitions = "allow" must_use_candidate = "allow" mut_mut = "allow" naive_bytecount = "allow" needless_for_each = "allow" needless_pass_by_value = "allow" needless_raw_string_hashes = "allow" no_effect_underscore_binding = "allow" option_option = "allow" range_plus_one = "allow" redundant_else = "allow" return_self_not_must_use = "allow" should_panic_without_expect = "allow" similar_names = "allow" single_match_else = "allow" stable_sort_primitive = "allow" struct_excessive_bools = "allow" struct_field_names = "allow" too_long_first_doc_paragraph = "allow" too_many_lines = "allow" transmute_ptr_to_ptr = "allow" trivially_copy_pass_by_ref = "allow" unnecessary_join = "allow" unnecessary_wraps = "allow" unreadable_literal = "allow" unused_self = "allow" used_underscore_binding = "allow" wildcard_imports = "allow" [lints.clippy.pedantic] level = "warn" priority = -1 [lints.rust] gix-worktree-0.38.0/Cargo.toml.orig000064400000000000000000000033201046102023000152300ustar 00000000000000lints.workspace = true [package] name = "gix-worktree" version = "0.38.0" repository = "https://github.com/GitoxideLabs/gitoxide" license = "MIT OR Apache-2.0" description = "A crate of the gitoxide project for shared worktree related types and utilities." authors = ["Sebastian Thiel "] edition = "2021" include = ["src/**/*", "LICENSE-*"] rust-version = "1.65" autotests = false [lib] doctest = false [features] default = ["attributes"] ## Instantiate stacks that can access `.gitattributes` information. attributes = ["dep:gix-attributes", "dep:gix-validate"] ## Data structures implement `serde::Serialize` and `serde::Deserialize`. serde = ["dep:serde", "bstr/serde", "gix-index/serde", "gix-hash/serde", "gix-object/serde", "gix-attributes?/serde", "gix-ignore/serde"] [dependencies] gix-index = { version = "^0.37.0", path = "../gix-index" } gix-fs = { version = "^0.12.0", path = "../gix-fs" } gix-hash = { version = "^0.15.1", path = "../gix-hash" } gix-object = { version = "^0.46.0", path = "../gix-object" } gix-glob = { version = "^0.17.1", path = "../gix-glob" } gix-path = { version = "^0.10.13", path = "../gix-path" } gix-attributes = { version = "^0.23.1", path = "../gix-attributes", optional = true } gix-validate = { version = "^0.9.2", path = "../gix-validate", optional = true } gix-ignore = { version = "^0.12.1", path = "../gix-ignore" } gix-features = { version = "^0.39.1", path = "../gix-features" } serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"] } bstr = { version = "1.3.0", default-features = false } document-features = { version = "0.2.0", optional = true } [package.metadata.docs.rs] features = ["document-features", "serde"] gix-worktree-0.38.0/LICENSE-APACHE000064400000000000000000000247461046102023000143040ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. gix-worktree-0.38.0/LICENSE-MIT000064400000000000000000000017771046102023000140130ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gix-worktree-0.38.0/src/lib.rs000064400000000000000000000060711046102023000142520ustar 00000000000000//! A crate with utility types for use by other crates that implement specifics. //! //! Unless specified differently, all operations need an index file (e.g. `.git/index`) as driver. //! //! ## Feature Flags #![cfg_attr( all(doc, feature = "document-features"), doc = ::document_features::document_features!() )] #![cfg_attr(all(doc, feature = "document-features"), feature(doc_cfg, doc_auto_cfg))] #![deny(missing_docs, rust_2018_idioms, unsafe_code)] use bstr::BString; /// Provides types needed for using [`stack::Platform::matching_attributes()`]. #[cfg(feature = "attributes")] pub use gix_attributes as attributes; /// A way to access the [`Case`](glob::pattern::Case) enum which used throughout this API. pub use gix_glob as glob; /// Provides types needed for using [`stack::Platform::excluded_kind()`]. pub use gix_ignore as ignore; /// Provides types needed for using [`Stack::at_path()`] and [`Stack::at_entry()`]. pub use gix_index as index; /// Provides types needed for using [`Stack::at_path()`] and [`Stack::at_entry()`]. pub use gix_object as object; /// Provides types needed for using [`stack::State::for_checkout()`]. #[cfg(feature = "attributes")] pub use gix_validate as validate; /// A cache for efficiently executing operations on directories and files which are encountered in sorted order. /// That way, these operations can be re-used for subsequent invocations in the same directory. /// /// This cache can be configured to create directories efficiently, read git-ignore files and git-attribute files, /// in any combination. /// /// A cache for directory creation to reduce the amount of stat calls when creating /// directories safely, that is without following symlinks that might be on the way. /// /// As a special case, it offers a 'prefix' which (by itself) is assumed to exist and may contain symlinks. /// Everything past that prefix boundary must not contain a symlink. We do this by allowing any input path. /// /// Another added benefit is its ability to store the path of full path of the entry to which leading directories /// are to be created to avoid allocating memory. /// /// For this to work, it remembers the last 'good' path to a directory and assumes that all components of it /// are still valid, too. /// As directories are created, the cache will be adjusted to reflect the latest seen directory. /// /// The caching is only useful if consecutive calls to create a directory are using a sorted list of entries. #[derive(Clone)] pub struct Stack { stack: gix_fs::Stack, /// tells us what to do as we change paths. state: stack::State, /// A buffer used when reading attribute or ignore files or their respective objects from the object database. buf: Vec, /// If case folding should happen when looking up attributes or exclusions. case: gix_glob::pattern::Case, /// A lookup table for object ids to read from in some situations when looking up attributes or exclusions. id_mappings: Vec, statistics: stack::Statistics, } pub(crate) type PathIdMapping = (BString, gix_hash::ObjectId); /// pub mod stack; gix-worktree-0.38.0/src/stack/delegate.rs000064400000000000000000000155341046102023000163670ustar 00000000000000use crate::{stack::State, PathIdMapping}; /// Various aggregate numbers related to the stack delegate itself. #[derive(Default, Clone, Copy, Debug)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Statistics { /// The amount of `std::fs::create_dir` calls. /// /// This only happens if we are in the respective mode to create leading directories efficiently. pub num_mkdir_calls: usize, /// Amount of calls to push a path element. pub push_element: usize, /// Amount of calls to push a directory. pub push_directory: usize, /// Amount of calls to pop a directory. pub pop_directory: usize, } pub(crate) struct StackDelegate<'a, 'find> { pub state: &'a mut State, pub buf: &'a mut Vec, #[cfg_attr(not(feature = "attributes"), allow(dead_code))] pub mode: Option, pub id_mappings: &'a Vec, pub objects: &'find dyn gix_object::Find, pub case: gix_glob::pattern::Case, pub statistics: &'a mut super::Statistics, } impl gix_fs::stack::Delegate for StackDelegate<'_, '_> { fn push_directory(&mut self, stack: &gix_fs::Stack) -> std::io::Result<()> { self.statistics.delegate.push_directory += 1; let rela_dir_bstr = gix_path::into_bstr(stack.current_relative()); let rela_dir = gix_path::to_unix_separators_on_windows(rela_dir_bstr); match &mut self.state { #[cfg(feature = "attributes")] State::CreateDirectoryAndAttributesStack { attributes, .. } | State::AttributesStack(attributes) => { attributes.push_directory( stack.root(), stack.current(), &rela_dir, self.buf, self.id_mappings, self.objects, &mut self.statistics.attributes, )?; } #[cfg(feature = "attributes")] State::AttributesAndIgnoreStack { ignore, attributes } => { attributes.push_directory( stack.root(), stack.current(), &rela_dir, self.buf, self.id_mappings, self.objects, &mut self.statistics.attributes, )?; ignore.push_directory( stack.root(), stack.current(), &rela_dir, self.buf, self.id_mappings, self.objects, self.case, &mut self.statistics.ignore, )?; } State::IgnoreStack(ignore) => ignore.push_directory( stack.root(), stack.current(), &rela_dir, self.buf, self.id_mappings, self.objects, self.case, &mut self.statistics.ignore, )?, } Ok(()) } #[cfg_attr(not(feature = "attributes"), allow(unused_variables))] fn push(&mut self, is_last_component: bool, stack: &gix_fs::Stack) -> std::io::Result<()> { self.statistics.delegate.push_element += 1; match &mut self.state { #[cfg(feature = "attributes")] State::CreateDirectoryAndAttributesStack { unlink_on_collision, validate, attributes: _, } => { validate_last_component(stack, self.mode, *validate)?; create_leading_directory( is_last_component, stack, self.mode, &mut self.statistics.delegate.num_mkdir_calls, *unlink_on_collision, )?; } #[cfg(feature = "attributes")] State::AttributesAndIgnoreStack { .. } | State::AttributesStack(_) => {} State::IgnoreStack(_) => {} } Ok(()) } fn pop_directory(&mut self) { self.statistics.delegate.pop_directory += 1; match &mut self.state { #[cfg(feature = "attributes")] State::CreateDirectoryAndAttributesStack { attributes, .. } | State::AttributesStack(attributes) => { attributes.pop_directory(); } #[cfg(feature = "attributes")] State::AttributesAndIgnoreStack { attributes, ignore } => { attributes.pop_directory(); ignore.pop_directory(); } State::IgnoreStack(ignore) => { ignore.pop_directory(); } } } } #[cfg(feature = "attributes")] fn validate_last_component( stack: &gix_fs::Stack, mode: Option, opts: gix_validate::path::component::Options, ) -> std::io::Result<()> { let Some(last_component) = stack.current_relative().components().next_back() else { return Ok(()); }; let last_component = gix_path::try_into_bstr(std::borrow::Cow::Borrowed(last_component.as_os_str().as_ref())).map_err(|_err| { std::io::Error::new( std::io::ErrorKind::Other, format!( "Path component {last_component:?} of path \"{}\" contained invalid UTF-8 and could not be validated", stack.current_relative().display() ), ) })?; if let Err(err) = gix_validate::path::component( last_component.as_ref(), mode.and_then(|m| { (m == gix_index::entry::Mode::SYMLINK).then_some(gix_validate::path::component::Mode::Symlink) }), opts, ) { return Err(std::io::Error::new(std::io::ErrorKind::Other, err)); } Ok(()) } #[cfg(feature = "attributes")] fn create_leading_directory( is_last_component: bool, stack: &gix_fs::Stack, mode: Option, mkdir_calls: &mut usize, unlink_on_collision: bool, ) -> std::io::Result<()> { if is_last_component && !crate::stack::mode_is_dir(mode).unwrap_or(false) { return Ok(()); } *mkdir_calls += 1; match std::fs::create_dir(stack.current()) { Ok(()) => Ok(()), Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => { let meta = stack.current().symlink_metadata()?; if meta.is_dir() { Ok(()) } else if unlink_on_collision { if meta.file_type().is_symlink() { gix_fs::symlink::remove(stack.current())?; } else { std::fs::remove_file(stack.current())?; } *mkdir_calls += 1; std::fs::create_dir(stack.current()) } else { Err(err) } } Err(err) => Err(err), } } gix-worktree-0.38.0/src/stack/mod.rs000064400000000000000000000172601046102023000153720ustar 00000000000000#![allow(missing_docs)] use std::path::{Path, PathBuf}; use bstr::{BStr, ByteSlice}; use super::Stack; use crate::PathIdMapping; /// Various aggregate numbers collected from when the corresponding [`Stack`] was instantiated. #[derive(Default, Clone, Copy, Debug)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Statistics { /// The amount of platforms created to do further matching. pub platforms: usize, /// Information about the stack delegate. pub delegate: delegate::Statistics, /// Information about attributes #[cfg(feature = "attributes")] pub attributes: state::attributes::Statistics, /// Information about the ignore stack pub ignore: state::ignore::Statistics, } #[derive(Clone)] pub enum State { /// Useful for checkout where directories need creation, but we need to access attributes as well. #[cfg(feature = "attributes")] CreateDirectoryAndAttributesStack { /// If there is a symlink or a file in our path, try to unlink it before creating the directory. unlink_on_collision: bool, /// Options to control how newly created path components should be validated. validate: gix_validate::path::component::Options, /// State to handle attribute information attributes: state::Attributes, }, /// Used when adding files, requiring access to both attributes and ignore information, for example during add operations. #[cfg(feature = "attributes")] AttributesAndIgnoreStack { /// State to handle attribute information attributes: state::Attributes, /// State to handle exclusion information ignore: state::Ignore, }, /// Used when only attributes are required, typically with fully virtual worktrees. #[cfg(feature = "attributes")] AttributesStack(state::Attributes), /// Used when providing worktree status information. IgnoreStack(state::Ignore), } #[must_use] pub struct Platform<'a> { parent: &'a Stack, is_dir: Option, } /// Initialization impl Stack { /// Create a new instance with `worktree_root` being the base for all future paths we match. /// `state` defines the capabilities of the cache. /// The `case` configures attribute and exclusion case sensitivity at *query time*, which should match the case that /// `state` might be configured with. /// `buf` is used when reading files, and `id_mappings` should have been created with [`State::id_mappings_from_index()`]. pub fn new( worktree_root: impl Into, state: State, case: gix_glob::pattern::Case, buf: Vec, id_mappings: Vec, ) -> Self { let root = worktree_root.into(); Stack { stack: gix_fs::Stack::new(root), state, case, buf, id_mappings, statistics: Statistics::default(), } } /// Create a new stack that takes into consideration the `ignore_case` result of a filesystem probe in `root`. It takes a configured /// `state` to control what it can do, while initializing attribute or ignore files that are to be queried from the ODB using /// `index` and `path_backing`. /// /// This is the easiest way to correctly setup a stack. pub fn from_state_and_ignore_case( root: impl Into, ignore_case: bool, state: State, index: &gix_index::State, path_backing: &gix_index::PathStorageRef, ) -> Self { let case = if ignore_case { gix_glob::pattern::Case::Fold } else { gix_glob::pattern::Case::Sensitive }; let attribute_files = state.id_mappings_from_index(index, path_backing, case); Stack::new(root, state, case, Vec::with_capacity(512), attribute_files) } } /// Entry points for attribute query impl Stack { /// Append the `relative` path to the root directory of the cache and efficiently create leading directories, while assuring that no /// symlinks are in that path. /// Unless `mode` is known with `Some(gix_index::entry::Mode::DIR|COMMIT)`, /// then `relative` points to a directory itself in which case the entire resulting path is created as directory. /// If it's not known it is assumed to be a file. /// `objects` maybe used to lookup objects from an [id mapping][crate::stack::State::id_mappings_from_index()], with mappnigs /// /// Provide access to cached information for that `relative` path via the returned platform. pub fn at_path( &mut self, relative: impl AsRef, mode: Option, objects: &dyn gix_object::Find, ) -> std::io::Result> { self.statistics.platforms += 1; let mut delegate = StackDelegate { state: &mut self.state, buf: &mut self.buf, mode, id_mappings: &self.id_mappings, objects, case: self.case, statistics: &mut self.statistics, }; self.stack .make_relative_path_current(relative.as_ref(), &mut delegate)?; Ok(Platform { parent: self, is_dir: mode_is_dir(mode), }) } /// Obtain a platform for lookups from a repo-`relative` path, typically obtained from an index entry. `mode` should reflect /// the kind of item set here, or left at `None` if unknown. /// `objects` maybe used to lookup objects from an [id mapping][crate::stack::State::id_mappings_from_index()]. /// All effects are similar to [`at_path()`][Self::at_path()]. /// /// If `relative` ends with `/` and `mode` is `None`, it is automatically assumed set to be a directory. pub fn at_entry<'r>( &mut self, relative: impl Into<&'r BStr>, mode: Option, objects: &dyn gix_object::Find, ) -> std::io::Result> { let relative = relative.into(); let relative_path = gix_path::try_from_bstr(relative).map_err(|_err| { std::io::Error::new( std::io::ErrorKind::Other, format!("The path \"{relative}\" contained invalid UTF-8 and could not be turned into a path"), ) })?; self.at_path( relative_path, mode.or_else(|| relative.ends_with_str("/").then_some(gix_index::entry::Mode::DIR)), objects, ) } } fn mode_is_dir(mode: Option) -> Option { mode.map(|m| // This applies to directories and commits (submodules are directories on disk) m.is_sparse() || m.is_submodule()) } /// Mutation impl Stack { /// Reset the statistics after returning them. pub fn take_statistics(&mut self) -> Statistics { std::mem::take(&mut self.statistics) } /// Return our state for applying changes. pub fn state_mut(&mut self) -> &mut State { &mut self.state } /// Change the `case` of the next match to the given one. pub fn set_case(&mut self, case: gix_glob::pattern::Case) -> &mut Self { self.case = case; self } } /// Access impl Stack { /// Return the statistics we gathered thus far. pub fn statistics(&self) -> &Statistics { &self.statistics } /// Return the state for introspection. pub fn state(&self) -> &State { &self.state } /// Return the base path against which all entries or paths should be relative to when querying. /// /// Note that this path _may_ not be canonicalized. pub fn base(&self) -> &Path { self.stack.root() } } /// pub mod delegate; use delegate::StackDelegate; mod platform; /// pub mod state; gix-worktree-0.38.0/src/stack/platform.rs000064400000000000000000000055541046102023000164420ustar 00000000000000use std::path::Path; use bstr::ByteSlice; use crate::stack::Platform; /// Access impl<'a> Platform<'a> { /// The full path to `relative` will be returned for use on the file system. pub fn path(&self) -> &'a Path { self.parent.stack.current() } /// See if the currently set entry is excluded as per exclude and git-ignore files. /// /// Note that this threats both classes, [*trashable*](gix_ignore::Kind::Expendable) and [*precious*](gix_ignore::Kind::Precious) /// as equal. If you need to differentiate, use [`matching_exclude_pattern()`](Self::matching_exclude_pattern) /// or [`excluded_kind()`](Self::excluded_kind). /// /// # Panics /// /// If the cache was configured without exclude patterns. #[doc(alias = "is_path_ignored", alias = "git2")] pub fn is_excluded(&self) -> bool { self.matching_exclude_pattern() .map_or(false, |m| !m.pattern.is_negative()) } /// See if a non-negative ignore-pattern matches and obtain the kind of exclude, or return `None` /// if the path isn't excluded. /// /// This is similar to [`is_excluded()`](Self::is_excluded), but provides details that are useful to /// decide what to do with the excluded item. pub fn excluded_kind(&self) -> Option { self.matching_exclude_pattern() .and_then(|m| (!m.pattern.is_negative()).then_some(m.kind)) } /// Check all exclude patterns to see if the currently set path matches any of them. /// /// Note that this pattern might be negated, and means this path in included. /// /// # Panics /// /// If the cache was configured without exclude patterns. pub fn matching_exclude_pattern(&self) -> Option> { let ignore = self.parent.state.ignore_or_panic(); let relative_path = gix_path::to_unix_separators_on_windows(gix_path::into_bstr(self.parent.stack.current_relative())); ignore.matching_exclude_pattern(relative_path.as_bstr(), self.is_dir, self.parent.case) } /// Match all attributes at the current path and store the result in `out`, returning `true` if at least one attribute was found. /// /// # Panics /// /// If the cache was configured without attributes. #[cfg(feature = "attributes")] pub fn matching_attributes(&self, out: &mut gix_attributes::search::Outcome) -> bool { let attrs = self.parent.state.attributes_or_panic(); let relative_path = gix_path::to_unix_separators_on_windows(gix_path::into_bstr(self.parent.stack.current_relative())); attrs.matching_attributes(relative_path.as_bstr(), self.parent.case, self.is_dir, out) } } impl std::fmt::Debug for Platform<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Debug::fmt(&self.path(), f) } } gix-worktree-0.38.0/src/stack/state/attributes.rs000064400000000000000000000225411046102023000201170ustar 00000000000000use std::path::{Path, PathBuf}; use bstr::{BStr, ByteSlice}; use gix_glob::pattern::Case; use gix_object::FindExt; use crate::{ stack::state::{AttributeMatchGroup, Attributes}, PathIdMapping, Stack, }; /// Various aggregate numbers related [`Attributes`]. #[derive(Default, Clone, Copy, Debug)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Statistics { /// Amount of patterns buffers read from the index. pub patterns_buffers: usize, /// Amount of pattern files read from disk. pub pattern_files: usize, /// Amount of pattern files we tried to find on disk. pub tried_pattern_files: usize, } /// Decide where to read `.gitattributes` files from. /// /// To Retrieve attribute files from id mappings, see /// [State::id_mappings_from_index()][crate::stack::State::id_mappings_from_index()]. /// /// These mappings are typically produced from an index. /// If a tree should be the source, build an attribute list from a tree instead, or convert a tree to an index. /// #[derive(Default, Debug, Clone, Copy)] pub enum Source { /// Use this when no worktree checkout is available, like in bare repositories, during clones, or when accessing blobs from /// other parts of the history which aren't checked out. #[default] IdMapping, /// Read from an id mappings and if not present, read from the worktree. /// /// This us typically used when *checking out* files. IdMappingThenWorktree, /// Read from the worktree and if not present, read them from the id mappings. /// /// This is typically used when *checking in* files, and it's possible for sparse worktrees not to have a `.gitattribute` file /// checked out even though it's available in the index. WorktreeThenIdMapping, } impl Source { /// Returns non-worktree variants of `self` if `is_bare` is true. pub fn adjust_for_bare(self, is_bare: bool) -> Self { if is_bare { Source::IdMapping } else { self } } } /// Initialization impl Attributes { /// Create a new instance from an attribute match group that represents `globals`. It can more easily be created with /// [`AttributeMatchGroup::new_globals()`]. /// /// * `globals` contribute first and consist of all globally available, static files. /// * `info_attributes` is a path that should refer to `.git/info/attributes`, and it's not an error if the file doesn't exist. /// * `case` is used to control case-sensitivity during matching. /// * `source` specifies from where the directory-based attribute files should be loaded from. pub fn new( globals: AttributeMatchGroup, info_attributes: Option, source: Source, collection: gix_attributes::search::MetadataCollection, ) -> Self { Attributes { globals, stack: Default::default(), info_attributes, source, collection, } } } impl Attributes { pub(crate) fn pop_directory(&mut self) { self.stack.pop_pattern_list().expect("something to pop"); } #[allow(clippy::too_many_arguments)] pub(crate) fn push_directory( &mut self, root: &Path, dir: &Path, rela_dir: &BStr, buf: &mut Vec, id_mappings: &[PathIdMapping], objects: &dyn gix_object::Find, stats: &mut Statistics, ) -> std::io::Result<()> { let attr_path_relative = gix_path::join_bstr_unix_pathsep(rela_dir, ".gitattributes"); let attr_file_in_index = id_mappings.binary_search_by(|t| t.0.as_bstr().cmp(attr_path_relative.as_ref())); // Git does not follow symbolic links as per documentation. let no_follow_symlinks = false; let read_macros_as_dir_is_root = root == dir; let mut added = false; match self.source { Source::IdMapping | Source::IdMappingThenWorktree => { if let Ok(idx) = attr_file_in_index { let blob = objects .find_blob(&id_mappings[idx].1, buf) .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?; let attr_path = gix_path::from_bstring(attr_path_relative.into_owned()); self.stack.add_patterns_buffer( blob.data, attr_path, Some(Path::new("")), &mut self.collection, read_macros_as_dir_is_root, ); added = true; stats.patterns_buffers += 1; } if !added && matches!(self.source, Source::IdMappingThenWorktree) { added = self.stack.add_patterns_file( dir.join(".gitattributes"), no_follow_symlinks, Some(root), buf, &mut self.collection, read_macros_as_dir_is_root, )?; stats.pattern_files += usize::from(added); stats.tried_pattern_files += 1; } } Source::WorktreeThenIdMapping => { added = self.stack.add_patterns_file( dir.join(".gitattributes"), no_follow_symlinks, Some(root), buf, &mut self.collection, read_macros_as_dir_is_root, )?; stats.pattern_files += usize::from(added); stats.tried_pattern_files += 1; if let Some(idx) = attr_file_in_index.ok().filter(|_| !added) { let blob = objects .find_blob(&id_mappings[idx].1, buf) .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?; let attr_path = gix_path::from_bstring(attr_path_relative.into_owned()); self.stack.add_patterns_buffer( blob.data, attr_path, Some(Path::new("")), &mut self.collection, read_macros_as_dir_is_root, ); added = true; stats.patterns_buffers += 1; } } } // Need one stack level per component so push and pop matches, but only if this isn't the root level which is never popped. if !added && self.info_attributes.is_none() { self.stack .add_patterns_buffer(&[], "".into(), None, &mut self.collection, true); } // When reading the root, always the first call, we can try to also read the `.git/info/attributes` file which is // by nature never popped, and follows the root, as global. if let Some(info_attr) = self.info_attributes.take() { let added = self.stack.add_patterns_file( info_attr, true, None, buf, &mut self.collection, true, /* read macros */ )?; stats.pattern_files += usize::from(added); stats.tried_pattern_files += 1; } Ok(()) } pub(crate) fn matching_attributes( &self, relative_path: &BStr, case: Case, is_dir: Option, out: &mut gix_attributes::search::Outcome, ) -> bool { // assure `out` is ready to deal with possibly changed collections (append-only) out.initialize(&self.collection); let groups = [&self.globals, &self.stack]; let mut has_match = false; groups.iter().rev().any(|group| { has_match |= group.pattern_matching_relative_path(relative_path, case, is_dir, out); out.is_done() }); has_match } } /// Attribute matching specific methods impl Stack { /// Creates a new container to store match outcomes for all attribute matches. /// /// ### Panics /// /// If attributes aren't configured. pub fn attribute_matches(&self) -> gix_attributes::search::Outcome { let mut out = gix_attributes::search::Outcome::default(); out.initialize(&self.state.attributes_or_panic().collection); out } /// Creates a new container to store match outcomes for the given attributes. /// /// ### Panics /// /// If attributes aren't configured. pub fn selected_attribute_matches<'a>( &self, given: impl IntoIterator>, ) -> gix_attributes::search::Outcome { let mut out = gix_attributes::search::Outcome::default(); out.initialize_with_selection( &self.state.attributes_or_panic().collection, given.into_iter().map(Into::into), ); out } /// Return the metadata collection that enables initializing attribute match outcomes as done in /// [`attribute_matches()`][Stack::attribute_matches()] or [`selected_attribute_matches()`][Stack::selected_attribute_matches()] /// /// ### Panics /// /// If attributes aren't configured. pub fn attributes_collection(&self) -> &gix_attributes::search::MetadataCollection { &self.state.attributes_or_panic().collection } } gix-worktree-0.38.0/src/stack/state/ignore.rs000064400000000000000000000215461046102023000172200ustar 00000000000000use std::path::Path; use bstr::{BStr, ByteSlice}; use gix_glob::pattern::Case; use gix_object::FindExt; use crate::{ stack::state::{Ignore, IgnoreMatchGroup}, PathIdMapping, }; /// Decide where to read `.gitignore` files from. #[derive(Default, Debug, Clone, Copy)] pub enum Source { /// Retrieve ignore files from id mappings, see /// [State::id_mappings_from_index()][crate::stack::State::id_mappings_from_index()]. /// /// These mappings are typically produced from an index. /// If a tree should be the source, build an attribute list from a tree instead, or convert a tree to an index. /// /// Use this when no worktree checkout is available, like in bare repositories or when accessing blobs from other parts /// of the history which aren't checked out. IdMapping, /// Read from the worktree and if not present, read them from the id mappings *if* these don't have the skip-worktree bit set. #[default] WorktreeThenIdMappingIfNotSkipped, } impl Source { /// Returns non-worktree variants of `self` if `is_bare` is true. pub fn adjust_for_bare(self, is_bare: bool) -> Self { if is_bare { Source::IdMapping } else { self } } } /// Various aggregate numbers related [`Ignore`]. #[derive(Default, Clone, Copy, Debug)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Statistics { /// Amount of patterns buffers read from the index. pub patterns_buffers: usize, /// Amount of pattern files read from disk. pub pattern_files: usize, /// Amount of pattern files we tried to find on disk. pub tried_pattern_files: usize, } impl Ignore { /// Configure gitignore file matching by providing the immutable groups being `overrides` and `globals`, while letting the directory /// stack be dynamic. /// /// The `exclude_file_name_for_directories` is an optional override for the filename to use when checking per-directory /// ignore files within the repository, defaults to`.gitignore`. pub fn new( overrides: IgnoreMatchGroup, globals: IgnoreMatchGroup, exclude_file_name_for_directories: Option<&BStr>, source: Source, ) -> Self { Ignore { overrides, globals, stack: Default::default(), matched_directory_patterns_stack: Vec::with_capacity(6), exclude_file_name_for_directories: exclude_file_name_for_directories .map_or_else(|| ".gitignore".into(), ToOwned::to_owned), source, } } } impl Ignore { pub(crate) fn pop_directory(&mut self) { self.matched_directory_patterns_stack.pop().expect("something to pop"); self.stack.patterns.pop().expect("something to pop"); } /// The match groups from lowest priority to highest. pub(crate) fn match_groups(&self) -> [&IgnoreMatchGroup; 3] { [&self.globals, &self.stack, &self.overrides] } pub(crate) fn matching_exclude_pattern( &self, relative_path: &BStr, is_dir: Option, case: Case, ) -> Option> { let groups = self.match_groups(); let mut dir_match = None; if let Some((source, mapping)) = self .matched_directory_patterns_stack .iter() .rev() .filter_map(|v| *v) .map(|(gidx, plidx, pidx)| { let list = &groups[gidx].patterns[plidx]; (list.source.as_deref(), &list.patterns[pidx]) }) .next() { let match_ = gix_ignore::search::Match { pattern: &mapping.pattern, sequence_number: mapping.sequence_number, kind: mapping.value, source, }; if mapping.pattern.is_negative() { dir_match = Some(match_); } else { // Note that returning here is wrong if this pattern _was_ preceded by a negative pattern that // didn't match the directory, but would match now. // Git does it similarly so we do too even though it's incorrect. // To fix this, one would probably keep track of whether there was a preceding negative pattern, and // if so we check the path in full and only use the dir match if there was no match, similar to the negative // case above whose fix fortunately won't change the overall result. return match_.into(); } } groups .iter() .rev() .find_map(|group| group.pattern_matching_relative_path(relative_path, is_dir, case)) .or(dir_match) } /// Like `matching_exclude_pattern()` but without checking if the current directory is excluded. /// It returns a triple-index into our data structure from which a match can be reconstructed. pub(crate) fn matching_exclude_pattern_no_dir( &self, relative_path: &BStr, is_dir: Option, case: Case, ) -> Option<(usize, usize, usize)> { let groups = self.match_groups(); groups.iter().enumerate().rev().find_map(|(gidx, group)| { let basename_pos = relative_path.rfind(b"/").map(|p| p + 1); group .patterns .iter() .enumerate() .rev() .find_map(|(plidx, pl)| { gix_ignore::search::pattern_idx_matching_relative_path( pl, relative_path, basename_pos, is_dir, case, ) .map(|idx| (plidx, idx)) }) .map(|(plidx, pidx)| (gidx, plidx, pidx)) }) } #[allow(clippy::too_many_arguments)] pub(crate) fn push_directory( &mut self, root: &Path, dir: &Path, rela_dir: &BStr, buf: &mut Vec, id_mappings: &[PathIdMapping], objects: &dyn gix_object::Find, case: Case, stats: &mut Statistics, ) -> std::io::Result<()> { self.matched_directory_patterns_stack .push(self.matching_exclude_pattern_no_dir(rela_dir, Some(true), case)); let ignore_path_relative = gix_path::join_bstr_unix_pathsep(rela_dir, ".gitignore"); let ignore_file_in_index = id_mappings.binary_search_by(|t| t.0.as_bstr().cmp(ignore_path_relative.as_ref())); match self.source { Source::IdMapping => { match ignore_file_in_index { Ok(idx) => { let ignore_blob = objects .find_blob(&id_mappings[idx].1, buf) .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?; let ignore_path = gix_path::from_bstring(ignore_path_relative.into_owned()); self.stack .add_patterns_buffer(ignore_blob.data, ignore_path, Some(Path::new(""))); stats.patterns_buffers += 1; } Err(_) => { // Need one stack level per component so push and pop matches. self.stack.patterns.push(Default::default()); } } } Source::WorktreeThenIdMappingIfNotSkipped => { let follow_symlinks = ignore_file_in_index.is_err(); let added = gix_glob::search::add_patterns_file( &mut self.stack.patterns, dir.join(".gitignore"), follow_symlinks, Some(root), buf, )?; stats.pattern_files += usize::from(added); stats.tried_pattern_files += 1; if !added { match ignore_file_in_index { Ok(idx) => { let ignore_blob = objects .find_blob(&id_mappings[idx].1, buf) .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?; let ignore_path = gix_path::from_bstring(ignore_path_relative.into_owned()); self.stack .add_patterns_buffer(ignore_blob.data, ignore_path, Some(Path::new(""))); stats.patterns_buffers += 1; } Err(_) => { // Need one stack level per component so push and pop matches. self.stack.patterns.push(Default::default()); } } } } } Ok(()) } } gix-worktree-0.38.0/src/stack/state/mod.rs000064400000000000000000000175771046102023000165250ustar 00000000000000use bstr::{BString, ByteSlice}; use gix_glob::pattern::Case; use crate::{stack::State, PathIdMapping}; #[cfg(feature = "attributes")] type AttributeMatchGroup = gix_attributes::Search; type IgnoreMatchGroup = gix_ignore::Search; /// State related to attributes associated with files in the repository. #[derive(Default, Clone)] #[cfg(feature = "attributes")] pub struct Attributes { /// Attribute patterns which aren't tied to the repository root, hence are global, they contribute first. globals: AttributeMatchGroup, /// Attribute patterns that match the currently set directory (in the stack). /// /// Note that the root-level file is always loaded, if present, followed by, the `$GIT_DIR/info/attributes`, if present, based /// on the location of the `info_attributes` file. stack: AttributeMatchGroup, /// The first time we push the root, we have to load additional information from this file if it exists along with the root attributes /// file if possible, and keep them there throughout. info_attributes: Option, /// A lookup table to accelerate searches. collection: gix_attributes::search::MetadataCollection, /// Where to read `.gitattributes` data from. source: attributes::Source, } /// State related to the exclusion of files, supporting static overrides and globals, along with a stack of dynamically read /// ignore files from disk or from the index each time the directory changes. #[derive(Default, Clone)] #[allow(unused)] pub struct Ignore { /// Ignore patterns passed as overrides to everything else, typically passed on the command-line and the first patterns to /// be consulted. overrides: IgnoreMatchGroup, /// Ignore patterns that match the currently set director (in the stack), which is pushed and popped as needed. stack: IgnoreMatchGroup, /// Ignore patterns which aren't tied to the repository root, hence are global. They are consulted last. globals: IgnoreMatchGroup, /// A matching stack of pattern indices which is empty if we have just been initialized to indicate that the /// currently set directory had a pattern matched. Note that this one could be negated. /// (index into match groups, index into list of pattern lists, index into pattern list) matched_directory_patterns_stack: Vec>, /// The name of the file to look for in directories. pub(crate) exclude_file_name_for_directories: BString, /// Where to read ignore files from source: ignore::Source, } /// #[cfg(feature = "attributes")] pub mod attributes; /// pub mod ignore; /// Initialization impl State { /// Configure a state to be suitable for checking out files, which only needs access to attribute files read from the index. #[cfg(feature = "attributes")] pub fn for_checkout( unlink_on_collision: bool, validate: gix_validate::path::component::Options, attributes: Attributes, ) -> Self { State::CreateDirectoryAndAttributesStack { unlink_on_collision, validate, attributes, } } /// Configure a state for adding files, with support for ignore files and attribute files. #[cfg(feature = "attributes")] pub fn for_add(attributes: Attributes, ignore: Ignore) -> Self { State::AttributesAndIgnoreStack { attributes, ignore } } } /// Utilities impl State { /// Returns a vec of tuples of relative index paths along with the best usable blob OID for /// either *ignore* or *attribute* files or both. This allows files to be accessed directly from /// the object database without the need for a worktree checkout. /// /// Note that this method… /// - ignores entries which aren't blobs. /// - ignores ignore entries which are not skip-worktree. /// - within merges, picks 'our' stage both for *ignore* and *attribute* files. /// /// * `index` is where we look for suitable files by path in order to obtain their blob hash. /// * `paths` is the indices storage backend for paths. /// * `case` determines if the search for files should be case-sensitive or not. pub fn id_mappings_from_index( &self, index: &gix_index::State, paths: &gix_index::PathStorageRef, case: Case, ) -> Vec { let a1_backing; #[cfg(feature = "attributes")] let a2_backing; let names = match self { State::IgnoreStack(ignore) => { a1_backing = [( ignore.exclude_file_name_for_directories.as_bytes().as_bstr(), Some(ignore.source), )]; a1_backing.as_ref() } #[cfg(feature = "attributes")] State::AttributesAndIgnoreStack { ignore, .. } => { a2_backing = [ ( ignore.exclude_file_name_for_directories.as_bytes().as_bstr(), Some(ignore.source), ), (".gitattributes".into(), None), ]; a2_backing.as_ref() } #[cfg(feature = "attributes")] State::CreateDirectoryAndAttributesStack { .. } | State::AttributesStack(_) => { a1_backing = [(".gitattributes".into(), None)]; a1_backing.as_ref() } }; index .entries() .iter() .filter_map(move |entry| { let path = entry.path_in(paths); // Stage 0 means there is no merge going on, stage 2 means it's 'our' side of the merge, but then // there won't be a stage 0. if entry.mode == gix_index::entry::Mode::FILE && (entry.stage_raw() == 0 || entry.stage_raw() == 2) { let basename = path.rfind_byte(b'/').map_or(path, |pos| path[pos + 1..].as_bstr()); let ignore_source = names.iter().find_map(|t| { match case { Case::Sensitive => basename == t.0, Case::Fold => basename.eq_ignore_ascii_case(t.0), } .then_some(t.1) })?; if let Some(source) = ignore_source { match source { ignore::Source::IdMapping => {} ignore::Source::WorktreeThenIdMappingIfNotSkipped => { // See https://github.com/git/git/blob/master/dir.c#L912:L912 if !entry.flags.contains(gix_index::entry::Flags::SKIP_WORKTREE) { return None; } } }; } Some((path.to_owned(), entry.id)) } else { None } }) .collect() } pub(crate) fn ignore_or_panic(&self) -> &Ignore { match self { State::IgnoreStack(v) => v, #[cfg(feature = "attributes")] State::AttributesAndIgnoreStack { ignore, .. } => ignore, #[cfg(feature = "attributes")] State::AttributesStack(_) | State::CreateDirectoryAndAttributesStack { .. } => { unreachable!("BUG: must not try to check excludes without it being setup") } } } #[cfg(feature = "attributes")] pub(crate) fn attributes_or_panic(&self) -> &Attributes { match self { State::AttributesStack(attributes) | State::AttributesAndIgnoreStack { attributes, .. } | State::CreateDirectoryAndAttributesStack { attributes, .. } => attributes, State::IgnoreStack(_) => { unreachable!("BUG: must not try to check excludes without it being setup") } } } }