gix-0.69.1/.cargo_vcs_info.json0000644000000001410000000000100117140ustar { "git": { "sha1": "3f725122aa173880aaafd4abc43b60bef3b8ca4a" }, "path_in_vcs": "gix" }gix-0.69.1/Cargo.lock0000644000002750140000000000100077040ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "addr2line" version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", "zerocopy", ] [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" [[package]] name = "anyhow" version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "arc-swap" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "async-attributes" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" dependencies = [ "quote", "syn 1.0.109", ] [[package]] name = "async-channel" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" dependencies = [ "concurrent-queue", "event-listener 2.5.3", "futures-core", ] [[package]] name = "async-channel" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" dependencies = [ "concurrent-queue", "event-listener-strategy", "futures-core", "pin-project-lite", ] [[package]] name = "async-executor" version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" dependencies = [ "async-task", "concurrent-queue", "fastrand", "futures-lite", "slab", ] [[package]] name = "async-global-executor" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ "async-channel 2.3.1", "async-executor", "async-io", "async-lock", "blocking", "futures-lite", "once_cell", ] [[package]] name = "async-io" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" dependencies = [ "async-lock", "cfg-if", "concurrent-queue", "futures-io", "futures-lite", "parking", "polling", "rustix", "slab", "tracing", "windows-sys 0.59.0", ] [[package]] name = "async-lock" version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ "event-listener 5.3.1", "event-listener-strategy", "pin-project-lite", ] [[package]] name = "async-std" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" dependencies = [ "async-attributes", "async-channel 1.9.0", "async-global-executor", "async-io", "async-lock", "crossbeam-utils", "futures-channel", "futures-core", "futures-io", "futures-lite", "gloo-timers", "kv-log-macro", "log", "memchr", "once_cell", "pin-project-lite", "pin-utils", "slab", "wasm-bindgen-futures", ] [[package]] name = "async-task" version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "aws-lc-rs" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f47bb8cc16b669d267eeccf585aea077d0882f4777b1c1f740217885d6e6e5a3" dependencies = [ "aws-lc-sys", "paste", "zeroize", ] [[package]] name = "aws-lc-sys" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2101df3813227bbaaaa0b04cd61c534c7954b22bd68d399b440be937dc63ff7" dependencies = [ "bindgen", "cc", "cmake", "dunce", "fs_extra", "libc", "paste", ] [[package]] name = "backtrace" version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", "windows-targets", ] [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bindgen" version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ "bitflags", "cexpr", "clang-sys", "itertools", "lazy_static", "lazycell", "log", "prettyplease", "proc-macro2", "quote", "regex", "rustc-hash 1.1.0", "shlex", "syn 2.0.89", "which", ] [[package]] name = "bitflags" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" dependencies = [ "serde", ] [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "blocking" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" dependencies = [ "async-channel 2.3.1", "async-task", "futures-io", "futures-lite", "piper", ] [[package]] name = "bstr" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" dependencies = [ "memchr", "regex-automata", "serde", ] [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" [[package]] name = "bytesize" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc" [[package]] name = "cc" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" dependencies = [ "jobserver", "libc", "shlex", ] [[package]] name = "cesu8" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" [[package]] name = "cexpr" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ "nom", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "cfg_aliases" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "clang-sys" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" dependencies = [ "glob", "libc", "libloading", ] [[package]] name = "clru" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbd0f76e066e64fdc5631e3bb46381254deab9ef1158292f27c8c57e3bf3fe59" [[package]] name = "cmake" version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" dependencies = [ "cc", ] [[package]] name = "combine" version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", "memchr", ] [[package]] name = "concurrent-queue" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ "crossbeam-utils", ] [[package]] name = "console" version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode", "lazy_static", "libc", "windows-sys 0.52.0", ] [[package]] name = "core-foundation" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-epoch", "crossbeam-queue", "crossbeam-utils", ] [[package]] name = "crossbeam-channel" version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-queue" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "curl" version = "0.4.47" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9fb4d13a1be2b58f14d60adba57c9834b78c62fd86c3e76a148f732686e9265" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", "windows-sys 0.52.0", ] [[package]] name = "curl-sys" version = "0.4.78+curl-8.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eec768341c5c7789611ae51cf6c459099f22e64a5d5d0ce4892434e33821eaf" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "rustls-ffi", "vcpkg", "windows-sys 0.52.0", ] [[package]] name = "dashmap" version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ "cfg-if", "crossbeam-utils", "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", ] [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "document-features" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb6969eaabd2421f8a2775cfd2471a2b634372b4a25d41e3bd647b79912850a0" dependencies = [ "litrs", ] [[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "encode_unicode" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "event-listener" version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" dependencies = [ "concurrent-queue", "parking", "pin-project-lite", ] [[package]] name = "event-listener-strategy" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ "event-listener 5.3.1", "pin-project-lite", ] [[package]] name = "faster-hex" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" dependencies = [ "serde", ] [[package]] name = "fastrand" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "filetime" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.59.0", ] [[package]] name = "flate2" version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "libz-ng-sys", "libz-sys", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foreign-types" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" dependencies = [ "foreign-types-shared", ] [[package]] name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "fs_extra" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" [[package]] name = "futures-channel" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-io" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" dependencies = [ "fastrand", "futures-core", "futures-io", "parking", "pin-project-lite", ] [[package]] name = "futures-sink" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", "futures-io", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", "libc", "wasi", "wasm-bindgen", ] [[package]] name = "gimli" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "gix" version = "0.69.1" dependencies = [ "anyhow", "async-std", "document-features", "gix-actor", "gix-archive", "gix-attributes", "gix-command", "gix-commitgraph", "gix-config", "gix-credentials", "gix-date", "gix-diff", "gix-dir", "gix-discover", "gix-features", "gix-filter", "gix-fs", "gix-glob", "gix-hash", "gix-hashtable", "gix-ignore", "gix-index", "gix-lock", "gix-mailmap", "gix-merge", "gix-negotiate", "gix-object", "gix-odb", "gix-pack", "gix-path", "gix-pathspec", "gix-prompt", "gix-protocol", "gix-ref", "gix-refspec", "gix-revision", "gix-revwalk", "gix-sec", "gix-shallow", "gix-status", "gix-submodule", "gix-tempfile", "gix-trace", "gix-transport", "gix-traverse", "gix-url", "gix-utils", "gix-validate", "gix-worktree", "gix-worktree-state", "gix-worktree-stream", "insta", "is_ci", "once_cell", "parking_lot", "pretty_assertions", "prodash", "regex", "serde", "serial_test", "signal-hook", "smallvec", "termtree", "thiserror 2.0.3", "walkdir", ] [[package]] name = "gix-actor" version = "0.33.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32b24171f514cef7bb4dfb72a0b06dacf609b33ba8ad2489d4c4559a03b7afb3" dependencies = [ "bstr", "gix-date", "gix-utils", "itoa", "serde", "thiserror 2.0.3", "winnow", ] [[package]] name = "gix-archive" version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b63ef086543dce4f2cf9cb1ded1216bbd40332d3abcdd8d876e97f7812d9a26" dependencies = [ "bstr", "gix-date", "gix-object", "gix-worktree-stream", "jiff", "thiserror 2.0.3", ] [[package]] name = "gix-attributes" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddf9bf852194c0edfe699a2d36422d2c1f28f73b7c6d446c3f0ccd3ba232cadc" dependencies = [ "bstr", "gix-glob", "gix-path", "gix-quote", "gix-trace", "kstring", "serde", "smallvec", "thiserror 2.0.3", "unicode-bom", ] [[package]] name = "gix-bitmap" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d48b897b4bbc881aea994b4a5bbb340a04979d7be9089791304e04a9fbc66b53" dependencies = [ "thiserror 2.0.3", ] [[package]] name = "gix-chunk" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6ffbeb3a5c0b8b84c3fe4133a6f8c82fa962f4caefe8d0762eced025d3eb4f7" dependencies = [ "thiserror 2.0.3", ] [[package]] name = "gix-command" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9405c0a56e17f8365a46870cd2c7db71323ecc8bda04b50cb746ea37bd091e90" dependencies = [ "bstr", "gix-path", "gix-trace", "shell-words", ] [[package]] name = "gix-commitgraph" version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8da6591a7868fb2b6dabddea6b09988b0b05e0213f938dbaa11a03dd7a48d85" dependencies = [ "bstr", "gix-chunk", "gix-features", "gix-hash", "memmap2", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-config" version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6649b406ca1f99cb148959cf00468b231f07950f8ec438cc0903cda563606f19" dependencies = [ "bstr", "gix-config-value", "gix-features", "gix-glob", "gix-path", "gix-ref", "gix-sec", "memchr", "once_cell", "smallvec", "thiserror 2.0.3", "unicode-bom", "winnow", ] [[package]] name = "gix-config-value" version = "0.14.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49aaeef5d98390a3bcf9dbc6440b520b793d1bf3ed99317dc407b02be995b28e" dependencies = [ "bitflags", "bstr", "gix-path", "libc", "thiserror 2.0.3", ] [[package]] name = "gix-credentials" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82a50c56b785c29a151ab4ccf74a83fe4e21d2feda0d30549504b4baed353e0a" dependencies = [ "bstr", "gix-command", "gix-config-value", "gix-path", "gix-prompt", "gix-sec", "gix-trace", "gix-url", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-date" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c57c477b645ee248b173bb1176b52dd528872f12c50375801a58aaf5ae91113f" dependencies = [ "bstr", "itoa", "jiff", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-diff" version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8e92566eccbca205a0a0f96ffb0327c061e85bc5c95abbcddfe177498aa04f6" dependencies = [ "bstr", "gix-command", "gix-filter", "gix-fs", "gix-hash", "gix-object", "gix-path", "gix-tempfile", "gix-trace", "gix-traverse", "gix-worktree", "imara-diff", "thiserror 2.0.3", ] [[package]] name = "gix-dir" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fba2ffbcf4bd34438e8a8367ccbc94870549903d1f193a14f47eb6b0967e1293" dependencies = [ "bstr", "gix-discover", "gix-fs", "gix-ignore", "gix-index", "gix-object", "gix-path", "gix-pathspec", "gix-trace", "gix-utils", "gix-worktree", "thiserror 2.0.3", ] [[package]] name = "gix-discover" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83bf6dfa4e266a4a9becb4d18fc801f92c3f7cc6c433dd86fdadbcf315ffb6ef" dependencies = [ "bstr", "dunce", "gix-fs", "gix-hash", "gix-path", "gix-ref", "gix-sec", "thiserror 2.0.3", ] [[package]] name = "gix-features" version = "0.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d85d673f2e022a340dba4713bed77ef2cf4cd737d2f3e0f159d45e0935fd81f" dependencies = [ "bytes", "bytesize", "crc32fast", "crossbeam-channel", "flate2", "gix-hash", "gix-trace", "gix-utils", "jwalk", "libc", "once_cell", "parking_lot", "prodash", "sha1", "sha1_smol", "thiserror 2.0.3", "walkdir", ] [[package]] name = "gix-filter" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0ecdee5667f840ba20c7fe56d63f8e1dc1e6b3bfd296151fe5ef07c874790a" dependencies = [ "bstr", "encoding_rs", "gix-attributes", "gix-command", "gix-hash", "gix-object", "gix-packetline-blocking", "gix-path", "gix-quote", "gix-trace", "gix-utils", "smallvec", "thiserror 2.0.3", ] [[package]] name = "gix-fs" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3d4fac505a621f97e5ce2c69fdc425742af00c0920363ca4074f0eb48b1db9" dependencies = [ "fastrand", "gix-features", "gix-utils", ] [[package]] name = "gix-glob" version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaf69a6bec0a3581567484bf99a4003afcaf6c469fd4214352517ea355cf3435" dependencies = [ "bitflags", "bstr", "gix-features", "gix-path", "serde", ] [[package]] name = "gix-hash" version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b5eccc17194ed0e67d49285e4853307e4147e95407f91c1c3e4a13ba9f4e4ce" dependencies = [ "faster-hex", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-hashtable" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ef65b256631078ef733bc5530c4e6b1c2e7d5c2830b75d4e9034ab3997d18fe" dependencies = [ "gix-hash", "hashbrown 0.14.5", "parking_lot", ] [[package]] name = "gix-ignore" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6b1fb24d2a4af0aa7438e2771d60c14a80cf2c9bd55c29cf1712b841f05bb8a" dependencies = [ "bstr", "gix-glob", "gix-path", "gix-trace", "serde", "unicode-bom", ] [[package]] name = "gix-index" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "270645fd20556b64c8ffa1540d921b281e6994413a0ca068596f97e9367a257a" dependencies = [ "bitflags", "bstr", "filetime", "fnv", "gix-bitmap", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-traverse", "gix-utils", "gix-validate", "hashbrown 0.14.5", "itoa", "libc", "memmap2", "rustix", "serde", "smallvec", "thiserror 2.0.3", ] [[package]] name = "gix-lock" version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cd3ab68a452db63d9f3ebdacb10f30dba1fa0d31ac64f4203d395ed1102d940" dependencies = [ "gix-tempfile", "gix-utils", "thiserror 2.0.3", ] [[package]] name = "gix-mailmap" version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6a108b866e00b8a59b8746906cccf2648ffc3e393dc9cca97254dd75c2ddf8c" dependencies = [ "bstr", "gix-actor", "gix-date", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-merge" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48bad205f8e55fc6def4e3d08981076ed29fb03bd7eba17f9c77b5bf4c16c249" dependencies = [ "bstr", "gix-command", "gix-diff", "gix-filter", "gix-fs", "gix-hash", "gix-index", "gix-object", "gix-path", "gix-quote", "gix-revision", "gix-revwalk", "gix-tempfile", "gix-trace", "gix-worktree", "imara-diff", "thiserror 2.0.3", ] [[package]] name = "gix-negotiate" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d27f830a16405386e9c83b9d5be8261fe32bbd6b3caf15bd1b284c6b2b7ef1a8" dependencies = [ "bitflags", "gix-commitgraph", "gix-date", "gix-hash", "gix-object", "gix-revwalk", "smallvec", "thiserror 2.0.3", ] [[package]] name = "gix-object" version = "0.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e42d58010183ef033f31088479b4eb92b44fe341b35b62d39eb8b185573d77ea" dependencies = [ "bstr", "gix-actor", "gix-date", "gix-features", "gix-hash", "gix-hashtable", "gix-path", "gix-utils", "gix-validate", "itoa", "serde", "smallvec", "thiserror 2.0.3", "winnow", ] [[package]] name = "gix-odb" version = "0.66.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb780eceb3372ee204469478de02eaa34f6ba98247df0186337e0333de97d0ae" dependencies = [ "arc-swap", "gix-date", "gix-features", "gix-fs", "gix-hash", "gix-hashtable", "gix-object", "gix-pack", "gix-path", "gix-quote", "parking_lot", "serde", "tempfile", "thiserror 2.0.3", ] [[package]] name = "gix-pack" version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4158928929be29cae7ab97afc8e820a932071a7f39d8ba388eed2380c12c566c" dependencies = [ "clru", "gix-chunk", "gix-features", "gix-hash", "gix-hashtable", "gix-object", "gix-path", "gix-tempfile", "memmap2", "parking_lot", "serde", "smallvec", "thiserror 2.0.3", "uluru", ] [[package]] name = "gix-packetline" version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "911aeea8b2dabeed2f775af9906152a1f0109787074daf9e64224e3892dde453" dependencies = [ "bstr", "faster-hex", "futures-io", "futures-lite", "gix-trace", "pin-project-lite", "thiserror 2.0.3", ] [[package]] name = "gix-packetline-blocking" version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce9004ce1bc00fd538b11c1ec8141a1558fb3af3d2b7ac1ac5c41881f9e42d2a" dependencies = [ "bstr", "faster-hex", "gix-trace", "thiserror 2.0.3", ] [[package]] name = "gix-path" version = "0.10.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afc292ef1a51e340aeb0e720800338c805975724c1dfbd243185452efd8645b7" dependencies = [ "bstr", "gix-trace", "home", "once_cell", "thiserror 2.0.3", ] [[package]] name = "gix-pathspec" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c472dfbe4a4e96fcf7efddcd4771c9037bb4fdea2faaabf2f4888210c75b81e" dependencies = [ "bitflags", "bstr", "gix-attributes", "gix-config-value", "gix-glob", "gix-path", "thiserror 2.0.3", ] [[package]] name = "gix-prompt" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82433a19aa44688e3bde05c692870eda50b5db053df53ed5ae6d8ea594a6babd" dependencies = [ "gix-command", "gix-config-value", "parking_lot", "rustix", "thiserror 2.0.3", ] [[package]] name = "gix-protocol" version = "0.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c84642e8b6fed7035ce9cc449593019c55b0ec1af7a5dce1ab8a0636eaaeb067" dependencies = [ "async-trait", "bstr", "futures-io", "futures-lite", "gix-credentials", "gix-date", "gix-features", "gix-hash", "gix-lock", "gix-negotiate", "gix-object", "gix-ref", "gix-refspec", "gix-revwalk", "gix-shallow", "gix-trace", "gix-transport", "gix-utils", "maybe-async", "serde", "thiserror 2.0.3", "winnow", ] [[package]] name = "gix-quote" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64a1e282216ec2ab2816cd57e6ed88f8009e634aec47562883c05ac8a7009a63" dependencies = [ "bstr", "gix-utils", "thiserror 2.0.3", ] [[package]] name = "gix-ref" version = "0.49.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a91b61776c839d0f1b7114901179afb0947aa7f4d30793ca1c56d335dfef485f" dependencies = [ "gix-actor", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-path", "gix-tempfile", "gix-utils", "gix-validate", "memmap2", "serde", "thiserror 2.0.3", "winnow", ] [[package]] name = "gix-refspec" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00c056bb747868c7eb0aeb352c9f9181ab8ca3d0a2550f16470803500c6c413d" dependencies = [ "bstr", "gix-hash", "gix-revision", "gix-validate", "smallvec", "thiserror 2.0.3", ] [[package]] name = "gix-revision" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61e1ddc474405a68d2ce8485705dd72fe6ce959f2f5fe718601ead5da2c8f9e7" dependencies = [ "bitflags", "bstr", "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "gix-revwalk", "gix-trace", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-revwalk" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "510026fc32f456f8f067d8f37c34088b97a36b2229d88a6a5023ef179fcb109d" dependencies = [ "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "smallvec", "thiserror 2.0.3", ] [[package]] name = "gix-sec" version = "0.10.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8b876ef997a955397809a2ec398d6a45b7a55b4918f2446344330f778d14fd6" dependencies = [ "bitflags", "gix-path", "libc", "serde", "windows-sys 0.52.0", ] [[package]] name = "gix-shallow" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d2673242e87492cb6ff671f0c01f689061ca306c4020f137197f3abc84ce01" dependencies = [ "bstr", "gix-hash", "gix-lock", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-status" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1665770e277608bd6b0eaf86adbf6cb3ffc6fb97749e7bc6f9318ac5f37564df" dependencies = [ "bstr", "filetime", "gix-diff", "gix-dir", "gix-features", "gix-filter", "gix-fs", "gix-hash", "gix-index", "gix-object", "gix-path", "gix-pathspec", "gix-worktree", "portable-atomic", "thiserror 2.0.3", ] [[package]] name = "gix-submodule" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2455f8c0fcb6ebe2a6e83c8f522d30615d763eb2ef7a23c7d929f9476e89f5c" dependencies = [ "bstr", "gix-config", "gix-path", "gix-pathspec", "gix-refspec", "gix-url", "thiserror 2.0.3", ] [[package]] name = "gix-tempfile" version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2feb86ef094cc77a4a9a5afbfe5de626897351bbbd0de3cb9314baf3049adb82" dependencies = [ "dashmap", "gix-fs", "libc", "once_cell", "parking_lot", "signal-hook", "signal-hook-registry", "tempfile", ] [[package]] name = "gix-trace" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04bdde120c29f1fc23a24d3e115aeeea3d60d8e65bab92cc5f9d90d9302eb952" dependencies = [ "tracing-core", ] [[package]] name = "gix-transport" version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd04d91e507a8713cfa2318d5a85d75b36e53a40379cc7eb7634ce400ecacbaf" dependencies = [ "async-std", "async-trait", "base64", "bstr", "curl", "futures-io", "futures-lite", "gix-command", "gix-credentials", "gix-features", "gix-packetline", "gix-quote", "gix-sec", "gix-url", "pin-project-lite", "reqwest", "serde", "thiserror 2.0.3", ] [[package]] name = "gix-traverse" version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed47d648619e23e93f971d2bba0d10c1100e54ef95d2981d609907a8cabac89" dependencies = [ "bitflags", "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "gix-revwalk", "smallvec", "thiserror 2.0.3", ] [[package]] name = "gix-url" version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d096fb733ba6bd3f5403dba8bd72bdd8809fe2b347b57844040b8f49c93492d9" dependencies = [ "bstr", "gix-features", "gix-path", "percent-encoding", "serde", "thiserror 2.0.3", "url", ] [[package]] name = "gix-utils" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba427e3e9599508ed98a6ddf8ed05493db114564e338e41f6a996d2e4790335f" dependencies = [ "bstr", "fastrand", "unicode-normalization", ] [[package]] name = "gix-validate" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd520d09f9f585b34b32aba1d0b36ada89ab7fefb54a8ca3fe37fc482a750937" dependencies = [ "bstr", "thiserror 2.0.3", ] [[package]] name = "gix-worktree" version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "756dbbe15188fa22540d5eab941f8f9cf511a5364d5aec34c88083c09f4bea13" dependencies = [ "bstr", "gix-attributes", "gix-features", "gix-fs", "gix-glob", "gix-hash", "gix-ignore", "gix-index", "gix-object", "gix-path", "gix-validate", "serde", ] [[package]] name = "gix-worktree-state" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "672a5416fae50538a0af0374bf67e0c97a932fd9e9b72f7d4bfd25355967cbe1" dependencies = [ "bstr", "gix-features", "gix-filter", "gix-fs", "gix-glob", "gix-hash", "gix-index", "gix-object", "gix-path", "gix-worktree", "io-close", "thiserror 2.0.3", ] [[package]] name = "gix-worktree-stream" version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34005eae2c0482eeb840e67bdd317ffe6e34057ea4bd8c910fecaee521db69cf" dependencies = [ "gix-attributes", "gix-features", "gix-filter", "gix-fs", "gix-hash", "gix-object", "gix-path", "gix-traverse", "parking_lot", "thiserror 2.0.3", ] [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "gloo-timers" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" dependencies = [ "futures-channel", "futures-core", "js-sys", "wasm-bindgen", ] [[package]] name = "h2" version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", "http", "indexmap", "slab", "tokio", "tokio-util", "tracing", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hermit-abi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hermit-abi" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" [[package]] name = "home" version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "http" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", "itoa", ] [[package]] name = "http-body" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http", ] [[package]] name = "http-body-util" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", "http", "http-body", "pin-project-lite", ] [[package]] name = "httparse" version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "human_format" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c3b1f728c459d27b12448862017b96ad4767b1ec2ec5e6434e99f1577f085b8" [[package]] name = "hyper" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", "futures-util", "h2", "http", "http-body", "httparse", "itoa", "pin-project-lite", "smallvec", "tokio", "want", ] [[package]] name = "hyper-rustls" version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", "http", "hyper", "hyper-util", "rustls", "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", "webpki-roots", ] [[package]] name = "hyper-tls" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", "hyper", "hyper-util", "native-tls", "tokio", "tokio-native-tls", "tower-service", ] [[package]] name = "hyper-util" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", "http", "http-body", "hyper", "pin-project-lite", "socket2", "tokio", "tower-service", "tracing", ] [[package]] name = "icu_collections" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ "displaydoc", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locid" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_locid_transform" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", "icu_locid", "icu_locid_transform_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_locid_transform_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" [[package]] name = "icu_normalizer" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "utf16_iter", "utf8_iter", "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] name = "icu_properties" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", "icu_locid_transform", "icu_properties_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_properties_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" [[package]] name = "icu_provider" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_provider_macros" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "idna" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "imara-diff" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc9da1a252bd44cd341657203722352efc9bc0c847d06ea6d2dc1cd1135e0a01" dependencies = [ "ahash", "hashbrown 0.14.5", ] [[package]] name = "indexmap" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", "hashbrown 0.15.2", ] [[package]] name = "insta" version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e9ffc4d4892617c50a928c52b2961cb5174b6fc6ebf252b2fac9d21955c48b8" dependencies = [ "console", "lazy_static", "linked-hash-map", "similar", ] [[package]] name = "io-close" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cadcf447f06744f8ce713d2d6239bb5bde2c357a452397a9ed90c625da390bc" dependencies = [ "libc", "winapi", ] [[package]] name = "ipnet" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "is_ci" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" [[package]] name = "itertools" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2" [[package]] name = "jiff" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d9d414fc817d3e3d62b2598616733f76c4cc74fbac96069674739b881295c8" dependencies = [ "jiff-tzdb-platform", "windows-sys 0.59.0", ] [[package]] name = "jiff-tzdb" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91335e575850c5c4c673b9bd467b0e025f164ca59d0564f69d0c2ee0ffad4653" [[package]] name = "jiff-tzdb-platform" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9835f0060a626fe59f160437bc725491a6af23133ea906500027d1bd2f8f4329" dependencies = [ "jiff-tzdb", ] [[package]] name = "jni" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ "cesu8", "combine", "jni-sys", "log", "thiserror 1.0.69", "walkdir", ] [[package]] name = "jni-sys" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] [[package]] name = "jwalk" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2735847566356cd2179a2a38264839308f7079fa96e6bd5a42d740460e003c56" dependencies = [ "crossbeam", "rayon", ] [[package]] name = "kstring" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ "serde", "static_assertions", ] [[package]] name = "kv-log-macro" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" dependencies = [ "log", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lazycell" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" version = "0.2.164" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" [[package]] name = "libloading" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", "windows-targets", ] [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags", "libc", "redox_syscall", ] [[package]] name = "libz-ng-sys" version = "1.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f0f7295a34685977acb2e8cc8b08ee4a8dffd6cf278eeccddbe1ed55ba815d5" dependencies = [ "cmake", "libc", ] [[package]] name = "libz-sys" version = "1.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" dependencies = [ "cc", "cmake", "libc", "pkg-config", "vcpkg", ] [[package]] name = "linked-hash-map" version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "litemap" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "litrs" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5" [[package]] name = "lock_api" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" dependencies = [ "value-bag", ] [[package]] name = "maybe-async" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" dependencies = [ "libc", ] [[package]] name = "mime" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "minimal-lexical" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ "adler2", ] [[package]] name = "mio" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ "hermit-abi 0.3.9", "libc", "wasi", "windows-sys 0.52.0", ] [[package]] name = "native-tls" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ "libc", "log", "openssl", "openssl-probe", "openssl-sys", "schannel", "security-framework", "security-framework-sys", "tempfile", ] [[package]] name = "nom" version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", ] [[package]] name = "num-bigint" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", ] [[package]] name = "num-integer" version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ "num-traits", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "object" version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "openssl" version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags", "cfg-if", "foreign-types", "libc", "once_cell", "openssl-macros", "openssl-sys", ] [[package]] name = "openssl-macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "parking" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-targets", ] [[package]] name = "paste" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" dependencies = [ "atomic-waker", "fastrand", "futures-io", ] [[package]] name = "pkg-config" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "polling" version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" dependencies = [ "cfg-if", "concurrent-queue", "hermit-abi 0.4.0", "pin-project-lite", "rustix", "tracing", "windows-sys 0.59.0", ] [[package]] name = "portable-atomic" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "ppv-lite86" version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" dependencies = [ "zerocopy", ] [[package]] name = "pretty_assertions" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", ] [[package]] name = "prettyplease" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", "syn 2.0.89", ] [[package]] name = "proc-macro2" version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" version = "29.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a266d8d6020c61a437be704c5e618037588e1985c7dbb7bf8d265db84cffe325" dependencies = [ "bytesize", "human_format", "log", "parking_lot", ] [[package]] name = "quinn" version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" dependencies = [ "bytes", "pin-project-lite", "quinn-proto", "quinn-udp", "rustc-hash 2.0.0", "rustls", "socket2", "thiserror 2.0.3", "tokio", "tracing", ] [[package]] name = "quinn-proto" version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" dependencies = [ "bytes", "getrandom", "rand", "ring", "rustc-hash 2.0.0", "rustls", "rustls-pki-types", "slab", "thiserror 2.0.3", "tinyvec", "tracing", "web-time", ] [[package]] name = "quinn-udp" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d5a626c6807713b15cac82a6acaccd6043c9a5408c24baae07611fec3f243da" dependencies = [ "cfg_aliases", "libc", "once_cell", "socket2", "tracing", "windows-sys 0.59.0", ] [[package]] name = "quote" version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] [[package]] name = "rayon" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", ] [[package]] name = "rayon-core" version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", ] [[package]] name = "redox_syscall" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64", "bytes", "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", "http", "http-body", "http-body-util", "hyper", "hyper-rustls", "hyper-tls", "hyper-util", "ipnet", "js-sys", "log", "mime", "native-tls", "once_cell", "percent-encoding", "pin-project-lite", "quinn", "rustls", "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "system-configuration", "tokio", "tokio-native-tls", "tokio-rustls", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "webpki-roots", "windows-registry", ] [[package]] name = "ring" version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", "getrandom", "libc", "spin", "untrusted", "windows-sys 0.52.0", ] [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" [[package]] name = "rustix" version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys 0.52.0", ] [[package]] name = "rustls" version = "0.23.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c9cc1d47e243d655ace55ed38201c19ae02c148ae56412ab8750e8f0166ab7f" dependencies = [ "aws-lc-rs", "once_cell", "ring", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] [[package]] name = "rustls-ffi" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c916a3be537e52de0f3e311048dd1cfbdb2972048b1417d6088826d7d1477ec2" dependencies = [ "libc", "log", "rustls", "rustls-pemfile", "rustls-pki-types", "rustls-platform-verifier", "rustls-webpki", ] [[package]] name = "rustls-native-certs" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" dependencies = [ "openssl-probe", "rustls-pemfile", "rustls-pki-types", "schannel", "security-framework", ] [[package]] name = "rustls-pemfile" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ "rustls-pki-types", ] [[package]] name = "rustls-pki-types" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" dependencies = [ "web-time", ] [[package]] name = "rustls-platform-verifier" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" dependencies = [ "core-foundation", "core-foundation-sys", "jni", "log", "once_cell", "rustls", "rustls-native-certs", "rustls-platform-verifier-android", "rustls-webpki", "security-framework", "security-framework-sys", "webpki-roots", "winapi", ] [[package]] name = "rustls-platform-verifier-android" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" [[package]] name = "rustls-webpki" version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", ] [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "scc" version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" dependencies = [ "sdd", ] [[package]] name = "schannel" version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sdd" version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" [[package]] name = "security-framework" version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags", "core-foundation", "core-foundation-sys", "libc", "num-bigint", "security-framework-sys", ] [[package]] name = "security-framework-sys" version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "serde" version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "serde_json" version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_urlencoded" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", "itoa", "ryu", "serde", ] [[package]] name = "serial_test" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b258109f244e1d6891bf1053a55d63a5cd4f8f4c30cf9a1280989f80e7a1fa9" dependencies = [ "once_cell", "parking_lot", "scc", "serial_test_derive", ] [[package]] name = "serial_test_derive" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", "sha1-asm", ] [[package]] name = "sha1-asm" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "286acebaf8b67c1130aedffad26f594eff0c1292389158135327d2e23aed582b" dependencies = [ "cc", ] [[package]] name = "sha1_smol" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "shell-words" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" dependencies = [ "libc", "signal-hook-registry", ] [[package]] name = "signal-hook-registry" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] [[package]] name = "similar" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" [[package]] name = "slab" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" dependencies = [ "serde", ] [[package]] name = "socket2" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "syn" version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] [[package]] name = "synstructure" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "system-configuration" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags", "core-foundation", "system-configuration-sys", ] [[package]] name = "system-configuration-sys" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "tempfile" version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] name = "termtree" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" dependencies = [ "thiserror-impl 2.0.3", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "thiserror-impl" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "tinystr" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "tinyvec" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", "libc", "mio", "pin-project-lite", "socket2", "windows-sys 0.52.0", ] [[package]] name = "tokio-native-tls" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", "tokio", ] [[package]] name = "tokio-rustls" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ "rustls", "rustls-pki-types", "tokio", ] [[package]] name = "tokio-util" version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", ] [[package]] name = "tower-service" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "pin-project-lite", "tracing-core", ] [[package]] name = "tracing-core" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", ] [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "uluru" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c8a2469e56e6e5095c82ccd3afb98dad95f7af7929aab6d8ba8d6e0f73657da" dependencies = [ "arrayvec", ] [[package]] name = "unicode-bom" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" [[package]] name = "unicode-ident" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] [[package]] name = "utf16_iter" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "valuable" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "want" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ "try-lock", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn 2.0.89", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" dependencies = [ "cfg-if", "js-sys", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "web-sys" version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "web-time" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "webpki-roots" version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] [[package]] name = "which" version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", "home", "once_cell", "rustix", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-registry" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" dependencies = [ "windows-result", "windows-strings", "windows-targets", ] [[package]] name = "windows-result" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" dependencies = [ "windows-targets", ] [[package]] name = "windows-strings" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ "windows-result", "windows-targets", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] [[package]] name = "write16" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yoke" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", "synstructure", ] [[package]] name = "zerocopy" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] [[package]] name = "zerofrom" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", "synstructure", ] [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] name = "zerovec" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", "syn 2.0.89", ] gix-0.69.1/Cargo.toml0000644000000264410000000000100077250ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.65" name = "gix" version = "0.69.1" authors = ["Sebastian Thiel "] build = false include = [ "src/**/*", "LICENSE-*", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Interact with git repositories just like git would" readme = false license = "MIT OR Apache-2.0" repository = "https://github.com/GitoxideLabs/gitoxide" [package.metadata.docs.rs] features = [ "document-features", "max-performance", "blocking-network-client", "blocking-http-transport-curl", "need-more-recent-msrv", "serde", ] [lib] name = "gix" path = "src/lib.rs" test = true doctest = false [dependencies.async-std] version = "1.12.0" optional = true [dependencies.document-features] version = "0.2.0" optional = true [dependencies.gix-actor] version = "^0.33.1" [dependencies.gix-archive] version = "^0.18.0" optional = true default-features = false [dependencies.gix-attributes] version = "^0.23.1" optional = true [dependencies.gix-command] version = "^0.4.0" optional = true [dependencies.gix-commitgraph] version = "^0.25.1" [dependencies.gix-config] version = "^0.42.0" [dependencies.gix-credentials] version = "^0.26.0" optional = true [dependencies.gix-date] version = "^0.9.3" [dependencies.gix-diff] version = "^0.49.0" default-features = false [dependencies.gix-dir] version = "^0.11.0" optional = true [dependencies.gix-discover] version = "^0.37.0" [dependencies.gix-features] version = "^0.39.1" features = [ "progress", "once_cell", ] [dependencies.gix-filter] version = "^0.16.0" optional = true [dependencies.gix-fs] version = "^0.12.1" [dependencies.gix-glob] version = "^0.17.1" [dependencies.gix-hash] version = "^0.15.1" [dependencies.gix-hashtable] version = "^0.6.0" [dependencies.gix-ignore] version = "^0.12.1" optional = true [dependencies.gix-index] version = "^0.37.0" optional = true [dependencies.gix-lock] version = "^15.0.0" [dependencies.gix-mailmap] version = "^0.25.1" optional = true [dependencies.gix-merge] version = "^0.2.0" optional = true default-features = false [dependencies.gix-negotiate] version = "^0.17.0" optional = true [dependencies.gix-object] version = "^0.46.1" [dependencies.gix-odb] version = "^0.66.0" [dependencies.gix-pack] version = "^0.56.0" features = ["object-cache-dynamic"] default-features = false [dependencies.gix-path] version = "^0.10.13" [dependencies.gix-pathspec] version = "^0.8.1" optional = true [dependencies.gix-prompt] version = "^0.9.0" optional = true [dependencies.gix-protocol] version = "^0.47.0" [dependencies.gix-ref] version = "^0.49.1" [dependencies.gix-refspec] version = "^0.27.0" [dependencies.gix-revision] version = "^0.31.1" default-features = false [dependencies.gix-revwalk] version = "^0.17.0" [dependencies.gix-sec] version = "^0.10.10" [dependencies.gix-shallow] version = "^0.1.0" [dependencies.gix-status] version = "^0.16.0" features = ["worktree-rewrites"] optional = true [dependencies.gix-submodule] version = "^0.16.0" optional = true [dependencies.gix-tempfile] version = "^15.0.0" default-features = false [dependencies.gix-trace] version = "^0.1.11" [dependencies.gix-transport] version = "^0.44.0" optional = true [dependencies.gix-traverse] version = "^0.43.1" [dependencies.gix-url] version = "^0.28.2" [dependencies.gix-utils] version = "^0.1.13" [dependencies.gix-validate] version = "^0.9.2" [dependencies.gix-worktree] version = "^0.38.0" optional = true default-features = false [dependencies.gix-worktree-state] version = "^0.16.0" optional = true [dependencies.gix-worktree-stream] version = "^0.18.0" optional = true [dependencies.once_cell] version = "1.14.0" [dependencies.parking_lot] version = "0.12.1" optional = true [dependencies.prodash] version = "29.0.0" features = ["progress-tree"] optional = true [dependencies.regex] version = "1.6.0" features = ["std"] optional = true default-features = false [dependencies.serde] version = "1.0.114" features = ["derive"] optional = true default-features = false [dependencies.signal-hook] version = "0.3.9" optional = true default-features = false [dependencies.smallvec] version = "1.9.0" [dependencies.thiserror] version = "2.0.0" [dev-dependencies.anyhow] version = "1" [dev-dependencies.async-std] version = "1.12.0" features = ["attributes"] [dev-dependencies.insta] version = "1.40.0" [dev-dependencies.is_ci] version = "1.1.1" [dev-dependencies.pretty_assertions] version = "1.4.0" [dev-dependencies.serial_test] version = "3.1.0" default-features = false [dev-dependencies.termtree] version = "0.5.1" [dev-dependencies.walkdir] version = "2.3.2" [features] async-network-client = [ "gix-protocol/async-client", "gix-pack/streaming-input", "dep:gix-transport", "attributes", "credentials", ] async-network-client-async-std = [ "async-std", "async-network-client", "gix-transport/async-std", ] attributes = [ "excludes", "dep:gix-filter", "dep:gix-pathspec", "dep:gix-attributes", "dep:gix-submodule", "gix-worktree?/attributes", "command", ] basic = [ "blob-diff", "revision", "index", ] blob-diff = [ "gix-diff/blob", "attributes", ] blocking-http-transport-curl = [ "blocking-network-client", "gix-transport/http-client-curl", ] blocking-http-transport-curl-rustls = [ "blocking-http-transport-curl", "gix-transport/http-client-curl-rust-tls", ] blocking-http-transport-reqwest = [ "blocking-network-client", "gix-transport/http-client-reqwest", ] blocking-http-transport-reqwest-native-tls = [ "blocking-http-transport-reqwest", "gix-transport/http-client-reqwest-native-tls", ] blocking-http-transport-reqwest-rust-tls = [ "blocking-http-transport-reqwest", "gix-transport/http-client-reqwest-rust-tls", ] blocking-http-transport-reqwest-rust-tls-trust-dns = [ "blocking-http-transport-reqwest", "gix-transport/http-client-reqwest-rust-tls-trust-dns", ] blocking-network-client = [ "gix-protocol/blocking-client", "gix-pack/streaming-input", "dep:gix-transport", "attributes", "credentials", ] cache-efficiency-debug = ["gix-features/cache-efficiency-debug"] comfort = [ "gix-features/progress-unit-bytes", "gix-features/progress-unit-human-numbers", ] command = ["dep:gix-command"] credentials = [ "dep:gix-credentials", "dep:gix-prompt", "dep:gix-negotiate", ] default = [ "max-performance-safe", "comfort", "basic", "extras", ] dirwalk = [ "dep:gix-dir", "attributes", "excludes", ] excludes = [ "dep:gix-ignore", "dep:gix-worktree", "index", ] extras = [ "worktree-stream", "worktree-archive", "revparse-regex", "mailmap", "excludes", "attributes", "worktree-mutation", "credentials", "interrupt", "status", "dirwalk", ] fast-sha1 = ["gix-features/fast-sha1"] hp-tempfile-registry = ["gix-tempfile/hp-hashmap"] index = ["dep:gix-index"] interrupt = [ "dep:signal-hook", "gix-tempfile/signals", "dep:parking_lot", ] mailmap = [ "dep:gix-mailmap", "revision", ] max-control = [ "parallel", "pack-cache-lru-static", "pack-cache-lru-dynamic", ] max-performance = [ "max-performance-safe", "zlib-ng", "fast-sha1", ] max-performance-safe = ["max-control"] merge = [ "tree-editor", "blob-diff", "dep:gix-merge", "attributes", ] need-more-recent-msrv = [ "merge", "tree-editor", ] pack-cache-lru-dynamic = ["gix-pack/pack-cache-lru-dynamic"] pack-cache-lru-static = ["gix-pack/pack-cache-lru-static"] parallel = ["gix-features/parallel"] parallel-walkdir = ["gix-features/fs-walkdir-parallel"] progress-tree = ["prodash/progress-tree"] revision = [ "gix-revision/describe", "gix-revision/merge_base", "index", ] revparse-regex = [ "regex", "revision", ] serde = [ "dep:serde", "gix-pack/serde", "gix-object/serde", "gix-protocol/serde", "gix-transport?/serde", "gix-ref/serde", "gix-odb/serde", "gix-index?/serde", "gix-mailmap?/serde", "gix-url/serde", "gix-attributes?/serde", "gix-ignore?/serde", "gix-revision/serde", "gix-worktree?/serde", "gix-commitgraph/serde", "gix-credentials?/serde", ] status = [ "gix-status", "dirwalk", "index", "blob-diff", ] tracing = ["gix-features/tracing"] tracing-detail = [ "gix-features/tracing-detail", "tracing", ] tree-editor = [] verbose-object-parsing-errors = ["gix-object/verbose-object-parsing-errors"] worktree-archive = [ "gix-archive", "worktree-stream", "attributes", ] worktree-mutation = [ "attributes", "dep:gix-worktree-state", ] worktree-stream = [ "gix-worktree-stream", "attributes", ] zlib-ng = ["gix-features/zlib-ng"] zlib-ng-compat = ["gix-features/zlib-ng-compat"] zlib-stock = ["gix-features/zlib-stock"] [lints.clippy] bool_to_int_with_if = "allow" borrow_as_ptr = "allow" cast_lossless = "allow" cast_possible_truncation = "allow" cast_possible_wrap = "allow" cast_precision_loss = "allow" cast_sign_loss = "allow" checked_conversions = "allow" copy_iterator = "allow" default_trait_access = "allow" doc_markdown = "allow" empty_docs = "allow" enum_glob_use = "allow" explicit_deref_methods = "allow" explicit_into_iter_loop = "allow" explicit_iter_loop = "allow" filter_map_next = "allow" fn_params_excessive_bools = "allow" from_iter_instead_of_collect = "allow" if_not_else = "allow" ignored_unit_patterns = "allow" implicit_clone = "allow" inconsistent_struct_constructor = "allow" inefficient_to_string = "allow" inline_always = "allow" items_after_statements = "allow" iter_not_returning_iterator = "allow" iter_without_into_iter = "allow" manual_assert = "allow" manual_is_variant_and = "allow" manual_let_else = "allow" manual_string_new = "allow" many_single_char_names = "allow" match_bool = "allow" match_same_arms = "allow" match_wild_err_arm = "allow" match_wildcard_for_single_variants = "allow" missing_errors_doc = "allow" missing_panics_doc = "allow" module_name_repetitions = "allow" must_use_candidate = "allow" mut_mut = "allow" naive_bytecount = "allow" needless_for_each = "allow" needless_pass_by_value = "allow" needless_raw_string_hashes = "allow" no_effect_underscore_binding = "allow" option_option = "allow" range_plus_one = "allow" redundant_else = "allow" return_self_not_must_use = "allow" should_panic_without_expect = "allow" similar_names = "allow" single_match_else = "allow" stable_sort_primitive = "allow" struct_excessive_bools = "allow" struct_field_names = "allow" too_long_first_doc_paragraph = "allow" too_many_lines = "allow" transmute_ptr_to_ptr = "allow" trivially_copy_pass_by_ref = "allow" unnecessary_join = "allow" unnecessary_wraps = "allow" unreadable_literal = "allow" unused_self = "allow" used_underscore_binding = "allow" wildcard_imports = "allow" [lints.clippy.pedantic] level = "warn" priority = -1 [lints.rust] gix-0.69.1/Cargo.toml.orig000064400000000000000000000463561046102023000134150ustar 00000000000000lints.workspace = true [package] name = "gix" repository = "https://github.com/GitoxideLabs/gitoxide" description = "Interact with git repositories just like git would" license = "MIT OR Apache-2.0" version = "0.69.1" authors = ["Sebastian Thiel "] edition = "2021" include = ["src/**/*", "LICENSE-*"] rust-version = "1.65" [lib] doctest = false test = true [[example]] name = "clone" path = "examples/clone.rs" required-features = ["blocking-network-client"] [features] default = ["max-performance-safe", "comfort", "basic", "extras"] #! There are various categories of features which help to optimize performance and build times. `gix` comes with 'batteries included' and everything is #! enabled as long as it doesn't sacrifice compatibility. Most users will be fine with that but will pay with higher compile times than necessary as they #! probably don't use all of these features. #! #! **Thus it's recommended to take a moment and optimize build times by choosing only those 'Components' that you require.** *'Performance' relevant features should #! be chosen next to maximize efficiency.* #! #! #### Application Developers #! #! These are considered the end-users, all they need to tune is `Performance` features to optimize the efficiency of their app, assuming they don't use `gix` #! directly. Otherwise, see the `Library Developers` paragraph. #! #! In order to configure a crate that isn't a direct dependency, one has to make it a direct dependency. We recommend #! `gix-for-configuration = { package = "gix", version = "X.Y.Z", features = […] }` to make clear this dependency isn't used in code. #! #! #### Library Developers #! #! As a developer of a library, you should start out with `gix = { version = "X.Y.Z", default-features = false }` and add components as you see fit. #! For best compatibility, **do not activate `max-performance-safe`** or any other performance options. #! #! #### Bundles #! #! A bundle is a set of related feature toggles which can be activated with a single name that acts as a group. #! Bundles are for convenience only and bear no further meaning beyond the cargo manifest file. ## More fundamental components that most will be able to make good use of. basic = ["blob-diff", "revision", "index"] ## Various additional features and capabilities that are not necessarily part of what most users would need. extras = [ "worktree-stream", "worktree-archive", "revparse-regex", "mailmap", "excludes", "attributes", "worktree-mutation", "credentials", "interrupt", "status", "dirwalk", ] ## A collection of features that need a larger MSRV, and thus are disabled by default. ## * `blob-merge` should be in extras, but needs `tree-editor` for convenience. need-more-recent-msrv = ["merge", "tree-editor"] ## Various progress-related features that improve the look of progress message units. comfort = [ "gix-features/progress-unit-bytes", "gix-features/progress-unit-human-numbers", ] #! #### Components #! #! A component is a distinct feature which may be comprised of one or more methods around a particular topic. #! Providers of libraries should only activate the components they need. ## Provide a top-level `command` module that helps with spawning commands similarly to `git`. command = ["dep:gix-command"] ## Obtain information similar to `git status`. status = ["gix-status", "dirwalk", "index", "blob-diff"] ## Utilities for interrupting computations and cleaning up tempfiles. interrupt = ["dep:signal-hook", "gix-tempfile/signals", "dep:parking_lot"] ## Access to `.git/index` files. index = ["dep:gix-index"] ## Support directory walks with Git-style annoations. dirwalk = ["dep:gix-dir", "attributes", "excludes"] ## Access to credential helpers, which provide credentials for URLs. # Note that `gix-negotiate` just piggibacks here, as 'credentials' is equivalent to 'fetch & push' right now. credentials = ["dep:gix-credentials", "dep:gix-prompt", "dep:gix-negotiate"] ## Various ways to alter the worktree makeup by checkout and reset. worktree-mutation = ["attributes", "dep:gix-worktree-state"] ## Retrieve a worktree stack for querying exclude information excludes = ["dep:gix-ignore", "dep:gix-worktree", "index"] ## Provide facilities to edit trees conveniently. ## ## Not that currently, this requires [Rust 1.75](https://caniuse.rs/features/return_position_impl_trait_in_trait). ## This feature toggle is likely going away then. tree-editor = [] ## Query attributes and excludes. Enables access to pathspecs, worktree checkouts, filter-pipelines and submodules. attributes = [ "excludes", "dep:gix-filter", "dep:gix-pathspec", "dep:gix-attributes", "dep:gix-submodule", "gix-worktree?/attributes", "command", ] ## Add support for mailmaps, as way of determining the final name of commmiters and authors. mailmap = ["dep:gix-mailmap", "revision"] ## Make revspec parsing possible, as well describing revision. revision = ["gix-revision/describe", "gix-revision/merge_base", "index"] ## If enabled, revspecs now support the regex syntax like `@^{/^.*x}`. Otherwise, only substring search is supported. ## This feature does increase compile time for niche-benefit, but is required for fully git-compatible revspec parsing. revparse-regex = ["regex", "revision"] ## Make it possible to diff blobs line by line. Note that this feature is integral for implementing tree-diffs as well due to the handling of rename-tracking, ## which relies on line-by-line diffs in some cases. blob-diff = ["gix-diff/blob", "attributes"] ## Add functions to specifically merge files, using the standard three-way merge that git offers. merge = ["tree-editor", "blob-diff", "dep:gix-merge", "attributes"] ## Make it possible to turn a tree into a stream of bytes, which can be decoded to entries and turned into various other formats. worktree-stream = ["gix-worktree-stream", "attributes"] ## Create archives from a tree in the repository, similar to what `git archive` does. ## ## Note that we disable all default features which strips it off all container support, like `tar` and `zip`. ## Your application should add it as dependency and re-activate the desired features. worktree-archive = ["gix-archive", "worktree-stream", "attributes"] #! #### Mutually Exclusive Network Client #! #! Either `async-*` or `blocking-*` versions of these toggles may be enabled at a time. #! For this reason, these must be chosen by the user of the library and can't be pre-selected. #! Making a choice here also affects which crypto-library ends up being used. ## Make `gix-protocol` available along with an async client. async-network-client = [ "gix-protocol/async-client", "gix-pack/streaming-input", "dep:gix-transport", "attributes", "credentials", ] ## Use this if your crate uses `async-std` as runtime, and enable basic runtime integration when connecting to remote servers via the `git://` protocol. async-network-client-async-std = [ "async-std", "async-network-client", "gix-transport/async-std", ] ## Make `gix-protocol` available along with a blocking client, providing access to the `file://`, `git://` and `ssh://` transports. blocking-network-client = [ "gix-protocol/blocking-client", "gix-pack/streaming-input", "dep:gix-transport", "attributes", "credentials", ] ## Stacks with `blocking-network-client` to provide support for HTTP/S using **curl**, and implies blocking networking as a whole, making the `https://` transport available. blocking-http-transport-curl = [ "blocking-network-client", "gix-transport/http-client-curl", ] ## Stacks with `blocking-http-transport-curl` and also enables the `rustls` backend to avoid `openssl`. blocking-http-transport-curl-rustls = [ "blocking-http-transport-curl", "gix-transport/http-client-curl-rust-tls", ] ## Stacks with `blocking-network-client` to provide support for HTTP/S using **reqwest**, and implies blocking networking as a whole, making the `https://` transport available. blocking-http-transport-reqwest = [ "blocking-network-client", "gix-transport/http-client-reqwest", ] ## Stacks with `blocking-http-transport-reqwest` and enables `https://` via the `rustls` crate. blocking-http-transport-reqwest-rust-tls = [ "blocking-http-transport-reqwest", "gix-transport/http-client-reqwest-rust-tls", ] ## Stacks with `blocking-http-transport-reqwest` and enables `https://` via the `rustls` crate. ## This also makes use of `trust-dns` to avoid `getaddrinfo`, but note it comes with its own problems. blocking-http-transport-reqwest-rust-tls-trust-dns = [ "blocking-http-transport-reqwest", "gix-transport/http-client-reqwest-rust-tls-trust-dns", ] ## Stacks with `blocking-http-transport-reqwest` and enables `https://` via the `native-tls` crate. blocking-http-transport-reqwest-native-tls = [ "blocking-http-transport-reqwest", "gix-transport/http-client-reqwest-native-tls", ] #! #### Performance #! #! The reason these features exist is to allow optimization for compile time and optimize for compatibility by default. This means that some performance options around #! SHA1 and ZIP might not compile on all platforms, so it depends on the end-user who compiles the application to chose these based on their needs. ## Activate features that maximize performance, like using threads, but leave everything else that might affect compatibility out to allow users more fine-grained ## control over performance features like which `zlib*` implementation to use. ## No C toolchain is involved. max-control = ["parallel", "pack-cache-lru-static", "pack-cache-lru-dynamic"] ## Activate features that maximize performance, like usage of threads, `and access to caching in object databases, skipping the ones known to cause compile failures ## on some platforms. ## Note that this configuration still uses a pure Rust zlib implementation which isn't the fastest compared to its C-alternatives. ## No C toolchain is involved. max-performance-safe = ["max-control"] ## If set, walkdir iterators will be multi-threaded which affects the listing of loose objects and references. ## Note, however, that this will use `rayon` under the hood and spawn threads for each traversal to avoid a global rayon thread pool. ## Thus this option is more interesting to one-off client applications, rather than the server. parallel-walkdir = ["gix-features/fs-walkdir-parallel"] ## The tempfile registry uses a better implementation of a thread-safe hashmap, relying on an external crate. ## This may be useful when tempfiles are created and accessed in a massively parallel fashion and you know that this is indeed faster than ## the simpler implementation that is the default. hp-tempfile-registry = ["gix-tempfile/hp-hashmap"] ## Make certain data structure threadsafe (or `Sync`) to facilitate multithreading. Further, many algorithms will now use multiple threads by default. ## ## If unset, most of `gix` can only be used in a single thread as data structures won't be `Send` anymore. parallel = ["gix-features/parallel"] ## Provide a fixed-size allocation-free LRU cache for packs. It's useful if caching is desired while keeping the memory footprint ## for the LRU-cache itself low. pack-cache-lru-static = ["gix-pack/pack-cache-lru-static"] ## Provide a hash-map based LRU cache whose eviction is based a memory cap calculated from object data. pack-cache-lru-dynamic = ["gix-pack/pack-cache-lru-dynamic"] ## Activate other features that maximize performance, like usage of threads, `zlib-ng` and access to caching in object databases. ## Note that some platforms might suffer from compile failures, which is when `max-performance-safe` should be used. max-performance = ["max-performance-safe", "zlib-ng", "fast-sha1"] ## If enabled, use assembly versions of sha1 on supported platforms. ## This might cause compile failures as well which is why it can be turned off separately. fast-sha1 = ["gix-features/fast-sha1"] ## Use the C-based zlib-ng backend, which can compress and decompress significantly faster. ## Note that this will cause duplicate symbol errors if the application also depends on `zlib` - use `zlib-ng-compat` in that case. zlib-ng = ["gix-features/zlib-ng"] ## Use zlib-ng via its zlib-compat API. Useful if you already need zlib for C ## code elsewhere in your dependencies. Otherwise, use `zlib-ng`. zlib-ng-compat = ["gix-features/zlib-ng-compat"] ## Use a slower C-based backend which can compress and decompress significantly faster than the rust version. ## Unlike `zlib-ng-compat`, this allows using dynamic linking with system `zlib` libraries and doesn't require cmake. zlib-stock = ["gix-features/zlib-stock"] #! #### Other #! #! The catch-all of feature toggles. ## Enable tracing using the `tracing` crate for coarse tracing. tracing = ["gix-features/tracing"] ## Enable tracing using the `tracing` crate for detailed tracing. Also enables coarse tracing. tracing-detail = ["gix-features/tracing-detail", "tracing"] ## When parsing objects by default errors will only be available on the granularity of success or failure, and with the above flag enabled ## details information about the error location will be collected. ## Use it in applications which expect broken or invalid objects or for debugging purposes. ## Incorrectly formatted objects aren't very common otherwise. verbose-object-parsing-errors = ["gix-object/verbose-object-parsing-errors"] ## Data structures implement `serde::Serialize` and `serde::Deserialize`. serde = [ "dep:serde", "gix-pack/serde", "gix-object/serde", "gix-protocol/serde", "gix-transport?/serde", "gix-ref/serde", "gix-odb/serde", "gix-index?/serde", "gix-mailmap?/serde", "gix-url/serde", "gix-attributes?/serde", "gix-ignore?/serde", "gix-revision/serde", "gix-worktree?/serde", "gix-commitgraph/serde", "gix-credentials?/serde", ] ## Re-export the progress tree root which allows to obtain progress from various functions which take `impl gix::Progress`. ## Applications which want to display progress will probably need this implementation. progress-tree = ["prodash/progress-tree"] ## Print debugging information about usage of object database caches, useful for tuning cache sizes. cache-efficiency-debug = ["gix-features/cache-efficiency-debug"] [dependencies] gix-utils = { version = "^0.1.13", path = "../gix-utils" } gix-fs = { version = "^0.12.1", path = "../gix-fs" } gix-ref = { version = "^0.49.1", path = "../gix-ref" } gix-discover = { version = "^0.37.0", path = "../gix-discover" } gix-tempfile = { version = "^15.0.0", path = "../gix-tempfile", default-features = false } gix-lock = { version = "^15.0.0", path = "../gix-lock" } gix-validate = { version = "^0.9.2", path = "../gix-validate" } gix-sec = { version = "^0.10.10", path = "../gix-sec" } gix-date = { version = "^0.9.3", path = "../gix-date" } gix-refspec = { version = "^0.27.0", path = "../gix-refspec" } gix-filter = { version = "^0.16.0", path = "../gix-filter", optional = true } gix-dir = { version = "^0.11.0", path = "../gix-dir", optional = true } gix-config = { version = "^0.42.0", path = "../gix-config" } gix-odb = { version = "^0.66.0", path = "../gix-odb" } gix-hash = { version = "^0.15.1", path = "../gix-hash" } gix-shallow = { version = "^0.1.0", path = "../gix-shallow" } gix-object = { version = "^0.46.1", path = "../gix-object" } gix-actor = { version = "^0.33.1", path = "../gix-actor" } gix-pack = { version = "^0.56.0", path = "../gix-pack", default-features = false, features = [ "object-cache-dynamic", ] } gix-revision = { version = "^0.31.1", path = "../gix-revision", default-features = false } gix-revwalk = { version = "^0.17.0", path = "../gix-revwalk" } gix-negotiate = { version = "^0.17.0", path = "../gix-negotiate", optional = true } gix-path = { version = "^0.10.13", path = "../gix-path" } gix-url = { version = "^0.28.2", path = "../gix-url" } gix-traverse = { version = "^0.43.1", path = "../gix-traverse" } gix-diff = { version = "^0.49.0", path = "../gix-diff", default-features = false } gix-merge = { version = "^0.2.0", path = "../gix-merge", default-features = false, optional = true } gix-mailmap = { version = "^0.25.1", path = "../gix-mailmap", optional = true } gix-features = { version = "^0.39.1", path = "../gix-features", features = [ "progress", "once_cell", ] } gix-trace = { version = "^0.1.11", path = "../gix-trace" } gix-glob = { version = "^0.17.1", path = "../gix-glob" } gix-credentials = { version = "^0.26.0", path = "../gix-credentials", optional = true } gix-prompt = { version = "^0.9.0", path = "../gix-prompt", optional = true } gix-index = { version = "^0.37.0", path = "../gix-index", optional = true } gix-attributes = { version = "^0.23.1", path = "../gix-attributes", optional = true } gix-ignore = { version = "^0.12.1", path = "../gix-ignore", optional = true } gix-worktree = { version = "^0.38.0", path = "../gix-worktree", optional = true, default-features = false } gix-worktree-state = { version = "^0.16.0", path = "../gix-worktree-state", optional = true } gix-hashtable = { version = "^0.6.0", path = "../gix-hashtable" } gix-commitgraph = { version = "^0.25.1", path = "../gix-commitgraph" } gix-pathspec = { version = "^0.8.1", path = "../gix-pathspec", optional = true } gix-submodule = { version = "^0.16.0", path = "../gix-submodule", optional = true } gix-status = { version = "^0.16.0", path = "../gix-status", optional = true, features = [ "worktree-rewrites", ] } gix-command = { version = "^0.4.0", path = "../gix-command", optional = true } gix-worktree-stream = { version = "^0.18.0", path = "../gix-worktree-stream", optional = true } gix-archive = { version = "^0.18.0", path = "../gix-archive", default-features = false, optional = true } # For communication with remotes gix-protocol = { version = "^0.47.0", path = "../gix-protocol" } gix-transport = { version = "^0.44.0", path = "../gix-transport", optional = true } # Just to get the progress-tree feature prodash = { version = "29.0.0", optional = true, features = ["progress-tree"] } once_cell = "1.14.0" signal-hook = { version = "0.3.9", default-features = false, optional = true } thiserror = "2.0.0" serde = { version = "1.0.114", optional = true, default-features = false, features = [ "derive", ] } smallvec = "1.9.0" async-std = { version = "1.12.0", optional = true } ## For use in rev-parse, which provides searching commits by running a regex on their message. ## ## If disabled, the text will be search verbatim in any portion of the commit message, similar to ## how a simple unanchored regex of only 'normal' characters would work. regex = { version = "1.6.0", optional = true, default-features = false, features = [ "std", ] } # for `interrupt` feature parking_lot = { version = "0.12.1", optional = true } document-features = { version = "0.2.0", optional = true } [dev-dependencies] # For additional features that aren't enabled by default due to MSRV gix = { path = ".", default-features = false, features = ["need-more-recent-msrv"] } pretty_assertions = "1.4.0" gix-testtools = { path = "../tests/tools" } is_ci = "1.1.1" anyhow = "1" walkdir = "2.3.2" serial_test = { version = "3.1.0", default-features = false } async-std = { version = "1.12.0", features = ["attributes"] } termtree = "0.5.1" insta = "1.40.0" [package.metadata.docs.rs] features = [ "document-features", "max-performance", "blocking-network-client", "blocking-http-transport-curl", "need-more-recent-msrv", "serde", ] gix-0.69.1/LICENSE-APACHE000064400000000000000000000247461046102023000124510ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. gix-0.69.1/LICENSE-MIT000064400000000000000000000017771046102023000121600ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gix-0.69.1/src/assets/init/HEAD000064400000000000000000000000251046102023000141650ustar 00000000000000ref: refs/heads/main gix-0.69.1/src/assets/init/description000064400000000000000000000001151046102023000160070ustar 00000000000000Unnamed repository; everything before the `;` is the name of the repository. gix-0.69.1/src/assets/init/hooks/applypatch-msg.sample000075500000000000000000000022021046102023000210220ustar 00000000000000#!/bin/sh # A sample hook to check commit messages created by `git am` ########################################################### # # When you receive a patch via email, the `git am` command is commonly used to apply # that patch. During the `git am` process, the `applypatch-msg` hook is executed before # creating the commit. Its purpose is to validate and modify the commit log message # before the patch is applied as a commit in your Git repository. # # This script serves as an example to validate that the commit message introduced by # the patch from an email would pass the `commit-msg` hook, which would be executed # if you had created the commit yourself. # # This hook is the first and followed up by `pre-applypatch` and `post-applypatch`. # # To enable this hook remove the `.sample` suffix from this file entirely. # Retrieve the path of the commit-msg hook script. commitmsg="$(git rev-parse --git-path hooks/commit-msg)" # If the commit-msg hook script is executable, execute it and pass any command-line arguments to it. test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} # Be sure to exit without error if `exec` isn't called. : gix-0.69.1/src/assets/init/hooks/commit-msg.sample000075500000000000000000000023721046102023000201550ustar 00000000000000#!/bin/sh # A sample hook to check commit messages created by `git commit` ################################################################ # # This example script checks commit messages for duplicate `Signed-off-by` # lines and rejects the commit if these are present. # # It is called by "git commit" with a single argument: the name of the file # that contains the final commit message, which would be used in the commit. # A a non-zero exit status after issuing an appropriate message stops the operation. # The hook is allowed to edit the commit message file by rewriting the file # containing it. # # To enable this hook remove the `.sample` suffix from this file entirely. # Check for duplicate Signed-off-by lines in the commit message. # The following command uses grep to find lines starting with "Signed-off-by: " # in the commit message file specified by the first argument `$1`. # It then sorts the lines, counts the number of occurrences of each line, # and removes any lines that occur only once. # If there are any remaining lines, it means there are duplicate Signed-off-by lines. test "$(grep '^Signed-off-by: ' "$1" | sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" = "" || { echo "Remove duplicate Signed-off-by lines and repeat the commit." 1>&2 exit 1 } gix-0.69.1/src/assets/init/hooks/docs.url000064400000000000000000000000421046102023000163370ustar 00000000000000https://git-scm.com/docs/githooks gix-0.69.1/src/assets/init/hooks/fsmonitor-watchman.sample000075500000000000000000000014131046102023000217140ustar 00000000000000#!/usr/bin/sh # How to use hook-based fs-monitor integrations ############################################### # This script is meant as a placeholder for integrating filesystem monitors with git # using hooks in order to speed up commands like `git-status`. # # To setup the fs-monitor for use with watchman, run # `git config core.fsmonitor .git/hooks/fsmonitor-watchman` and paste the content of # the example script over at https://github.com/git/git/blob/aa9166bcc0ba654fc21f198a30647ec087f733ed/templates/hooks--fsmonitor-watchman.sample # into `.git/hooks/fsmonitor-watchman`. # # Note that by now and as of this writing on MacOS and Windows and starting from git 2.35.1 # one can use the built-in fs-monitor implementation using `git config core.fsmonitor true` exit 42 gix-0.69.1/src/assets/init/hooks/post-update.sample000075500000000000000000000010721046102023000203420ustar 00000000000000#!/bin/sh # A sample hook that runs after receiving a pack on a remote ############################################################ # This hook is called after a pack was received on the remote, i.e. after a successful `git push` operation. # It's useful on the server side only. # # There many more receive hooks which are documented in the official documentation: https://git-scm.com/docs/githooks. # # To enable this hook remove the `.sample` suffix from this file entirely. # Update static files to support the 'dumb' git HTTP protocol. exec git update-server-info gix-0.69.1/src/assets/init/hooks/pre-applypatch.sample000075500000000000000000000025131046102023000210270ustar 00000000000000#!/bin/sh # A sample hook to check commit messages created by `git am` ########################################################### # This hook script is triggered by `git am` without any context just before creating a commit, # which is useful to inspect the current tree or run scripts for further verification. # # If it exits with a non-zero exit code, the commit will not be created. Everything printed # to the output or error channels will be visible to the user. # # Note that there is a sibling hook called `post-applypatch` (also without further context) # which is run after the commit was created. It is useful to use the commit hash for further # processing, like sending information to the involved parties. # Finally, the `applypatch-msg` hook is called at the very beginning of the `git am` operation # to provide access to the commit-message. # # To enable this hook remove the `.sample` suffix from this file entirely. # Retrieve the path to the pre-commit hook script using the "git rev-parse" command. precommit="$(git rev-parse --git-path hooks/pre-commit)" # Check if the pre-commit hook script exists and is executable. # If it does, execute it passing the arguments from this script (if any) using the "exec" command. test -x "$precommit" && exec "$precommit" ${1+"$@"} # Be sure to exit without error if `exec` isn't called. : gix-0.69.1/src/assets/init/hooks/pre-commit.sample000075500000000000000000000014371046102023000201560ustar 00000000000000#!/bin/sh # A sample hook to prevent commits with merge-markers ##################################################### # This example hook rejects changes that are about to be committed with merge markers, # as that would be a clear indication of a failed merge. It is triggered by `git commit` # and returning with non-zero exit status prevents the commit from being created. # # To enable this hook remove the `.sample` suffix from this file entirely. # Check for merge markers in modified files for file in $(git diff --cached --name-only); do if grep -q -E '^(<<<<<<<|=======|>>>>>>>|\|\|\|\|\|\|\|)$' "$file"; then echo "Error: File '$file' contains merge markers. Please remove them before committing." exit 1 fi done # Exit with success if there are no errors exit 0 gix-0.69.1/src/assets/init/hooks/pre-merge-commit.sample000075500000000000000000000013141046102023000212450ustar 00000000000000#!/bin/sh # A sample hook to check commits created by `git merge` ####################################################### # # This hook is invoked by `git merge` without further context right before creating a commit. # It should be used to validate the current state that is supposed to be committed, or exit # with a non-zero status to prevent the commit. # All output will be visible to the user. # # To enable this hook remove the `.sample` suffix from this file entirely. # Check if the pre-commit hook exists and is executable. If it is, it executes the pre-commit hook script. test -x "$GIT_DIR/hooks/pre-commit" && exec "$GIT_DIR/hooks/pre-commit" # Be sure to exit without error if `exec` isn't called. : gix-0.69.1/src/assets/init/hooks/pre-push.sample000075500000000000000000000037221046102023000176440ustar 00000000000000#!/bin/sh # Check for "DELME" in commit messages of about-to-be-pushed commits #################################################################### # This hook script is triggered by `git push` right after a connection to the remote # was established and its initial response was received, and right before generating # and pushing a pack-file. # The operation will be aborted when exiting with a non-zero status. # # The following arguments are provided: # # $1 - The symbolic name of the remote to push to, like "origin" or the URL like "https://github.com/GitoxideLabs/gitoxide" if there is no such name. # $2 - The URL of the remote to push to, like "https://github.com/GitoxideLabs/gitoxide". # # The hook should then read from standard input in a line-by-line fashion and split the following space-separated fields: # # * local ref - the left side of a ref-spec, i.e. "local" of the "local:refs/heads/remote" ref-spec # * local hash - the hash of the commit pointed to by `local ref` # * remote ref - the right side of a ref-spec, i.e. "refs/heads/remote" of the "local:refs/heads/remote" ref-spec # * remote hash - the hash of the commit pointed to by `remote ref` # # In this example, we abort the push if any of the about-to-be-pushed commits have "DELME" in their commit message. # # To enable this hook remove the `.sample` suffix from this file entirely. remote="$1" url="$2" # Check each commit being pushed while read _local_ref local_hash _remote_ref _remote_hash; do # Skip if the local hash is all zeroes (deletion) zero_sha=$(printf "%0${#local_hash}d" 0) if [ "$local_hash" = "$zero_sha" ]; then continue fi # Get the commit message commit_msg=$(git log --format=%s -n 1 "$local_hash") # Check if the commit message contains "DELME" if echo "$commit_msg" | grep -iq "DELME"; then echo "Error: Found commit with 'DELME' in message. Push aborted to $remote ($url) aborted." 1>&2 exit 1 fi done # If no commit with "DELME" found, allow the push exit 0 gix-0.69.1/src/assets/init/hooks/pre-rebase.sample000075500000000000000000000031041046102023000201200ustar 00000000000000#!/bin/sh # A sample hook to validate the branches involved in a rebase operation ####################################################################### # # This hook is invoked right before `git rebase` starts its work and # prevents anything else to happen by returning a non-zero exit code. # # The following arguments are provided: # # $1 - the branch that contains the commit from which $2 was forked. # $2 - the branch being rebased or no second argument at all if the rebase applies to `HEAD`. # # This example hook aborts the rebase operation if the branch being rebased is not up to date # with the latest changes from the upstream branch, or if there are any uncommitted changes. # # To enable this hook remove the `.sample` suffix from this file entirely. upstream_branch=$1 if [ "$#" -eq 2 ]; then branch_being_rebased=$2 else branch_being_rebased=$(git symbolic-ref --quiet --short HEAD) || exit 0 # ignore rebases on detached heads fi # Check if the branch being rebased is behind the upstream branch if git log --oneline ${upstream_branch}..${branch_being_rebased} > /dev/null; then echo "Warning: The branch being rebased (${branch_being_rebased}) is behind the upstream branch (${upstream_branch})." 1>&2 echo "Please update your branch before rebasing." 1>&2 exit 1 fi # Check if there are any uncommitted changes if ! git diff-index --quiet HEAD --; then echo "Warning: There are uncommitted changes in your branch ${branch_being_rebased}." 1>&2 echo "Please commit or stash your changes before rebasing." 1>&2 exit 2 fi # All good, let the rebase proceed. exit 0 gix-0.69.1/src/assets/init/hooks/prepare-commit-msg.sample000075500000000000000000000035761046102023000216200ustar 00000000000000#!/bin/sh # A hook called by `git commit` to adjust the commit message right before the user sees it ########################################################################################## # # This script is called by `git commit` after commit message was initialized and right before # an editor is launched. # # It receives one to three arguments: # # $1 - the path to the file containing the commit message. It can be edited to change the message. # $2 - the kind of source of the message contained in $1. Possible values are # "message" - a message was provided via `-m` or `-F` # "commit" - `-c`, `-C` or `--amend` was given # "squash" - the `.git/SQUASH_MSG` file exists # "merge" - this is a merge or the `.git/MERGE` file exists # "template" - `-t` was provided or `commit.template` was set # $3 - If $2 is "commit" then this is the hash of the commit. # It can also take other values, best understood by studying the source code at # https://github.com/git/git/blob/aa9166bcc0ba654fc21f198a30647ec087f733ed/builtin/commit.c#L745 # # The following example # # To enable this hook remove the `.sample` suffix from this file entirely. COMMIT_MSG_FILE=$1 # Check if the commit message file is empty or already contains a message if [ -s "$COMMIT_MSG_FILE" ]; then # If the commit message is already provided, exit without making any changes. # This can happen if the user provided a message via `-m` or a template. exit 0 fi # Retrieve the branch name from the current HEAD commit BRANCH_NAME=$(git symbolic-ref --short HEAD) # Generate a default commit message based on the branch name DEFAULT_MSG="" case "$BRANCH_NAME" in "feature/*") DEFAULT_MSG="feat: " ;; "bugfix/*") DEFAULT_MSG="fix: " ;; *) DEFAULT_MSG="chore: " ;; esac # Set the commit message that will be presented to the user. echo "$DEFAULT_MSG" > "$COMMIT_MSG_FILE" gix-0.69.1/src/assets/init/info/exclude000064400000000000000000000004151046102023000160530ustar 00000000000000# This file contains repository-wide exclude patterns that git will ignore. # They are local and will not be shared when pushing or pulling. # When using Rust the following would be typical exclude patterns. # Remove the '# ' prefix to let them take effect. # /target/ gix-0.69.1/src/attribute_stack.rs000064400000000000000000000044001046102023000150330ustar 00000000000000use std::ops::{Deref, DerefMut}; use crate::{bstr::BStr, types::AttributeStack, Repository}; /// Lifecycle impl<'repo> AttributeStack<'repo> { /// Create a new instance from a `repo` and the underlying pre-configured `stack`. /// /// Note that this type is typically created by [`Repository::attributes()`] or [`Repository::attributes_only()`]. pub fn new(stack: gix_worktree::Stack, repo: &'repo Repository) -> Self { AttributeStack { repo, inner: stack } } /// Detach the repository and return the underlying plumbing datatype. pub fn detach(self) -> gix_worktree::Stack { self.inner } } impl Deref for AttributeStack<'_> { type Target = gix_worktree::Stack; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for AttributeStack<'_> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } /// Platform retrieval impl AttributeStack<'_> { /// Append the `relative` path to the root directory of the cache and load all attribute or ignore files on the way as needed. /// Use `mode` to specify what kind of item lives at `relative` - directories may match against rules specifically. /// If `mode` is `None`, the item at `relative` is assumed to be a file. /// /// The returned platform may be used to access the actual attribute or ignore information. #[doc(alias = "is_path_ignored", alias = "git2")] pub fn at_path( &mut self, relative: impl AsRef, mode: Option, ) -> std::io::Result> { self.inner.at_path(relative, mode, &self.repo.objects) } /// Obtain a platform for attribute or ignore lookups from a repo-`relative` path, typically obtained from an index entry. /// `mode` should reflect whether it's a directory or not, or left at `None` if unknown. /// /// If `relative` ends with `/` and `mode` is `None`, it is automatically assumed to be a directory. pub fn at_entry<'r>( &mut self, relative: impl Into<&'r BStr>, mode: Option, ) -> std::io::Result> { self.inner.at_entry(relative, mode, &self.repo.objects) } } gix-0.69.1/src/clone/access.rs000064400000000000000000000067501046102023000142160ustar 00000000000000use crate::{bstr::BString, clone::PrepareFetch, Repository}; /// Builder impl PrepareFetch { /// Use `f` to apply arbitrary changes to the remote that is about to be used to fetch a pack. /// /// The passed in `remote` will be un-named and pre-configured to be a default remote as we know it from git-clone. /// It is not yet present in the configuration of the repository, /// but each change it will eventually be written to the configuration prior to performing a the fetch operation, /// _all changes done in `f()` will be persisted_. /// /// It can also be used to configure additional options, like those for fetching tags. Note that /// [`with_fetch_tags()`](crate::Remote::with_fetch_tags()) should be called here to configure the clone as desired. /// Otherwise, a clone is configured to be complete and fetches all tags, not only those reachable from all branches. pub fn configure_remote( mut self, f: impl FnMut(crate::Remote<'_>) -> Result, Box> + 'static, ) -> Self { self.configure_remote = Some(Box::new(f)); self } /// Set the remote's name to the given value after it was configured using the function provided via /// [`configure_remote()`](Self::configure_remote()). /// /// If not set here, it defaults to `origin` or the value of `clone.defaultRemoteName`. pub fn with_remote_name(mut self, name: impl Into) -> Result { self.remote_name = Some(crate::remote::name::validated(name)?); Ok(self) } /// Make this clone a shallow one with the respective choice of shallow-ness. pub fn with_shallow(mut self, shallow: crate::remote::fetch::Shallow) -> Self { self.shallow = shallow; self } /// Apply the given configuration `values` right before readying the actual fetch from the remote. /// The configuration is marked with [source API](gix_config::Source::Api), and will not be written back, it's /// retained only in memory. pub fn with_in_memory_config_overrides(mut self, values: impl IntoIterator>) -> Self { self.config_overrides = values.into_iter().map(Into::into).collect(); self } /// Set the `name` of the reference to check out, instead of the remote `HEAD`. /// If `None`, the `HEAD` will be used, which is the default. /// /// Note that `name` should be a partial name like `main` or `feat/one`, but can be a full ref name. /// If a branch on the remote matches, it will automatically be retrieved even without a refspec. pub fn with_ref_name<'a, Name, E>(mut self, name: Option) -> Result where Name: TryInto<&'a gix_ref::PartialNameRef, Error = E>, { self.ref_name = name.map(TryInto::try_into).transpose()?.map(ToOwned::to_owned); Ok(self) } } /// Consumption impl PrepareFetch { /// Persist the contained repository as is even if an error may have occurred when fetching from the remote. pub fn persist(mut self) -> Repository { self.repo.take().expect("present and consumed once") } } impl Drop for PrepareFetch { fn drop(&mut self) { if let Some(repo) = self.repo.take() { std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok(); } } } impl From for Repository { fn from(prep: PrepareFetch) -> Self { prep.persist() } } gix-0.69.1/src/clone/checkout.rs000064400000000000000000000164451046102023000145640ustar 00000000000000use crate::{clone::PrepareCheckout, Repository}; /// pub mod main_worktree { use std::{path::PathBuf, sync::atomic::AtomicBool}; use crate::{clone::PrepareCheckout, Progress, Repository}; /// The error returned by [`PrepareCheckout::main_worktree()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Repository at \"{}\" is a bare repository and cannot have a main worktree checkout", git_dir.display())] BareRepository { git_dir: PathBuf }, #[error("The object pointed to by HEAD is not a treeish")] NoHeadTree(#[from] crate::object::peel::to_kind::Error), #[error("Could not create index from tree at {id}")] IndexFromTree { id: gix_hash::ObjectId, source: gix_index::init::from_tree::Error, }, #[error("Couldn't obtain configuration for core.protect*")] BooleanConfig(#[from] crate::config::boolean::Error), #[error(transparent)] WriteIndex(#[from] gix_index::file::write::Error), #[error(transparent)] CheckoutOptions(#[from] crate::config::checkout_options::Error), #[error(transparent)] IndexCheckout(#[from] gix_worktree_state::checkout::Error), #[error(transparent)] Peel(#[from] crate::reference::peel::Error), #[error("Failed to reopen object database as Arc (only if thread-safety wasn't compiled in)")] OpenArcOdb(#[from] std::io::Error), #[error("The HEAD reference could not be located")] FindHead(#[from] crate::reference::find::existing::Error), #[error("The HEAD reference could not be located")] PeelHeadToId(#[from] crate::head::peel::Error), } /// The progress ids used in [`PrepareCheckout::main_worktree()`]. /// /// Use this information to selectively extract the progress of interest in case the parent application has custom visualization. #[derive(Debug, Copy, Clone)] pub enum ProgressId { /// The amount of files checked out thus far. CheckoutFiles, /// The amount of bytes written in total, the aggregate of the size of the content of all files thus far. BytesWritten, } impl From for gix_features::progress::Id { fn from(v: ProgressId) -> Self { match v { ProgressId::CheckoutFiles => *b"CLCF", ProgressId::BytesWritten => *b"CLCB", } } } /// Modification impl PrepareCheckout { /// Checkout the main worktree, determining how many threads to use by looking at `checkout.workers`, defaulting to using /// on thread per logical core. /// /// Note that this is a no-op if the remote was empty, leaving this repository empty as well. This can be validated by checking /// if the `head()` of the returned repository is *not* unborn. /// /// # Panics /// /// If called after it was successful. The reason here is that it auto-deletes the contained repository, /// and keeps track of this by means of keeping just one repository instance, which is passed to the user /// after success. pub fn main_worktree

( &mut self, mut progress: P, should_interrupt: &AtomicBool, ) -> Result<(Repository, gix_worktree_state::checkout::Outcome), Error> where P: gix_features::progress::NestedProgress, P::SubProgress: gix_features::progress::NestedProgress + 'static, { self.main_worktree_inner(&mut progress, should_interrupt) } fn main_worktree_inner( &mut self, progress: &mut dyn gix_features::progress::DynNestedProgress, should_interrupt: &AtomicBool, ) -> Result<(Repository, gix_worktree_state::checkout::Outcome), Error> { let _span = gix_trace::coarse!("gix::clone::PrepareCheckout::main_worktree()"); let repo = self .repo .as_ref() .expect("BUG: this method may only be called until it is successful"); let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository { git_dir: repo.git_dir().to_owned(), })?; let root_tree_id = match &self.ref_name { Some(reference_val) => Some(repo.find_reference(reference_val)?.peel_to_id_in_place()?), None => repo.head()?.try_peel_to_id_in_place()?, }; let root_tree = match root_tree_id { Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id, None => { return Ok(( self.repo.take().expect("still present"), gix_worktree_state::checkout::Outcome::default(), )); } }; let index = gix_index::State::from_tree(&root_tree, &repo.objects, repo.config.protect_options()?) .map_err(|err| Error::IndexFromTree { id: root_tree, source: err, })?; let mut index = gix_index::File::from_state(index, repo.index_path()); let mut opts = repo .config .checkout_options(repo, gix_worktree::stack::state::attributes::Source::IdMapping)?; opts.destination_is_initially_empty = true; let mut files = progress.add_child_with_id("checkout".to_string(), ProgressId::CheckoutFiles.into()); let mut bytes = progress.add_child_with_id("writing".to_string(), ProgressId::BytesWritten.into()); files.init(Some(index.entries().len()), crate::progress::count("files")); bytes.init(None, crate::progress::bytes()); let start = std::time::Instant::now(); let outcome = gix_worktree_state::checkout( &mut index, workdir, repo.objects.clone().into_arc()?, &files, &bytes, should_interrupt, opts, )?; files.show_throughput(start); bytes.show_throughput(start); index.write(Default::default())?; Ok((self.repo.take().expect("still present").clone(), outcome)) } } } /// Access impl PrepareCheckout { /// Get access to the repository while the checkout isn't yet completed. /// /// # Panics /// /// If the checkout is completed and the [`Repository`] was already passed on to the caller. pub fn repo(&self) -> &Repository { self.repo .as_ref() .expect("present as checkout operation isn't complete") } } /// Consumption impl PrepareCheckout { /// Persist the contained repository as is even if an error may have occurred when checking out the main working tree. pub fn persist(mut self) -> Repository { self.repo.take().expect("present and consumed once") } } impl Drop for PrepareCheckout { fn drop(&mut self) { if let Some(repo) = self.repo.take() { std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok(); } } } impl From for Repository { fn from(prep: PrepareCheckout) -> Self { prep.persist() } } gix-0.69.1/src/clone/fetch/mod.rs000064400000000000000000000246601046102023000146250ustar 00000000000000use crate::bstr::BString; use crate::bstr::ByteSlice; use crate::clone::PrepareFetch; /// The error returned by [`PrepareFetch::fetch_only()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Connect(#[from] crate::remote::connect::Error), #[error(transparent)] PrepareFetch(#[from] crate::remote::fetch::prepare::Error), #[error(transparent)] Fetch(#[from] crate::remote::fetch::Error), #[error(transparent)] RemoteInit(#[from] crate::remote::init::Error), #[error("Custom configuration of remote to clone from failed")] RemoteConfiguration(#[source] Box), #[error("Custom configuration of connection to use when cloning failed")] RemoteConnection(#[source] Box), #[error(transparent)] RemoteName(#[from] crate::config::remote::symbolic_name::Error), #[error(transparent)] ParseConfig(#[from] crate::config::overrides::Error), #[error(transparent)] ApplyConfig(#[from] crate::config::Error), #[error("Failed to load repo-local git configuration before writing")] LoadConfig(#[from] gix_config::file::init::from_paths::Error), #[error("Failed to store configured remote in memory")] SaveConfig(#[from] crate::remote::save::AsError), #[error("Failed to write repository configuration to disk")] SaveConfigIo(#[from] std::io::Error), #[error("The remote HEAD points to a reference named {head_ref_name:?} which is invalid.")] InvalidHeadRef { source: gix_validate::reference::name::Error, head_ref_name: crate::bstr::BString, }, #[error("Failed to update HEAD with values from remote")] HeadUpdate(#[from] crate::reference::edit::Error), #[error("The remote didn't have any ref that matched '{}'", wanted.as_ref().as_bstr())] RefNameMissing { wanted: gix_ref::PartialName }, #[error("The remote has {} refs for '{}', try to use a specific name: {}", candidates.len(), wanted.as_ref().as_bstr(), candidates.iter().filter_map(|n| n.to_str().ok()).collect::>().join(", "))] RefNameAmbiguous { wanted: gix_ref::PartialName, candidates: Vec, }, } /// Modification impl PrepareFetch { /// Fetch a pack and update local branches according to refspecs, providing `progress` and checking `should_interrupt` to stop /// the operation. /// On success, the persisted repository is returned, and this method must not be called again to avoid a **panic**. /// On error, the method may be called again to retry as often as needed. /// /// If the remote repository was empty, that is newly initialized, the returned repository will also be empty and like /// it was newly initialized. /// /// Note that all data we created will be removed once this instance drops if the operation wasn't successful. /// /// ### Note for users of `async` /// /// Even though `async` is technically supported, it will still be blocking in nature as it uses a lot of non-async writes /// and computation under the hood. Thus it should be spawned into a runtime which can handle blocking futures. #[gix_protocol::maybe_async::maybe_async] pub async fn fetch_only

( &mut self, mut progress: P, should_interrupt: &std::sync::atomic::AtomicBool, ) -> Result<(crate::Repository, crate::remote::fetch::Outcome), Error> where P: crate::NestedProgress, P::SubProgress: 'static, { use crate::{bstr::ByteVec, remote, remote::fetch::RefLogMessage}; let repo = self .repo .as_mut() .expect("user error: multiple calls are allowed only until it succeeds"); if !self.config_overrides.is_empty() { let mut snapshot = repo.config_snapshot_mut(); snapshot.append_config(&self.config_overrides, gix_config::Source::Api)?; snapshot.commit()?; } let remote_name = match self.remote_name.as_ref() { Some(name) => name.to_owned(), None => repo .config .resolved .string(crate::config::tree::Clone::DEFAULT_REMOTE_NAME) .map(|n| crate::config::tree::Clone::DEFAULT_REMOTE_NAME.try_into_symbolic_name(n)) .transpose()? .unwrap_or_else(|| "origin".into()), }; let mut remote = repo.remote_at(self.url.clone())?; if remote.fetch_specs.is_empty() { remote = remote .with_refspecs( Some(format!("+refs/heads/*:refs/remotes/{remote_name}/*").as_str()), remote::Direction::Fetch, ) .expect("valid static spec"); } let mut clone_fetch_tags = None; if let Some(f) = self.configure_remote.as_mut() { remote = f(remote).map_err(Error::RemoteConfiguration)?; } else { clone_fetch_tags = remote::fetch::Tags::All.into(); } let config = util::write_remote_to_local_config_file(&mut remote, remote_name.clone())?; // Now we are free to apply remote configuration we don't want to be written to disk. if let Some(fetch_tags) = clone_fetch_tags { remote = remote.with_fetch_tags(fetch_tags); } // Add HEAD after the remote was written to config, we need it to know what to check out later, and assure // the ref that HEAD points to is present no matter what. let head_local_tracking_branch = format!("refs/remotes/{remote_name}/HEAD"); let head_refspec = gix_refspec::parse( format!("HEAD:{head_local_tracking_branch}").as_str().into(), gix_refspec::parse::Operation::Fetch, ) .expect("valid") .to_owned(); let pending_pack: remote::fetch::Prepare<'_, '_, _> = { let mut connection = remote.connect(remote::Direction::Fetch).await?; if let Some(f) = self.configure_connection.as_mut() { f(&mut connection).map_err(Error::RemoteConnection)?; } let mut fetch_opts = { let mut opts = self.fetch_options.clone(); if !opts.extra_refspecs.contains(&head_refspec) { opts.extra_refspecs.push(head_refspec.clone()); } if let Some(ref_name) = &self.ref_name { opts.extra_refspecs.push( gix_refspec::parse(ref_name.as_ref().as_bstr(), gix_refspec::parse::Operation::Fetch) .expect("partial names are valid refspecs") .to_owned(), ); } opts }; match connection.prepare_fetch(&mut progress, fetch_opts.clone()).await { Ok(prepare) => prepare, Err(remote::fetch::prepare::Error::RefMap(remote::ref_map::Error::InitRefMap( gix_protocol::fetch::refmap::init::Error::MappingValidation(err), ))) if err.issues.len() == 1 && fetch_opts.extra_refspecs.contains(&head_refspec) && matches!( err.issues.first(), Some(gix_refspec::match_group::validate::Issue::Conflict { destination_full_ref_name, .. }) if *destination_full_ref_name == head_local_tracking_branch ) => { let head_refspec_idx = fetch_opts .extra_refspecs .iter() .enumerate() .find_map(|(idx, spec)| (*spec == head_refspec).then_some(idx)) .expect("it's contained"); // On the very special occasion that we fail as there is a remote `refs/heads/HEAD` reference that clashes // with our implicit refspec, retry without it. Maybe this tells us that we shouldn't have that implicit // refspec, as git can do this without connecting twice. let connection = remote.connect(remote::Direction::Fetch).await?; fetch_opts.extra_refspecs.remove(head_refspec_idx); connection.prepare_fetch(&mut progress, fetch_opts).await? } Err(err) => return Err(err.into()), } }; // Assure problems with custom branch names fail early, not after getting the pack or during negotiation. if let Some(ref_name) = &self.ref_name { util::find_custom_refname(pending_pack.ref_map(), ref_name)?; } if pending_pack.ref_map().object_hash != repo.object_hash() { unimplemented!("configure repository to expect a different object hash as advertised by the server") } let reflog_message = { let mut b = self.url.to_bstring(); b.insert_str(0, "clone: from "); b }; let outcome = pending_pack .with_write_packed_refs_only(true) .with_reflog_message(RefLogMessage::Override { message: reflog_message.clone(), }) .with_shallow(self.shallow.clone()) .receive(&mut progress, should_interrupt) .await?; util::append_config_to_repo_config(repo, config); util::update_head( repo, &outcome.ref_map, reflog_message.as_ref(), remote_name.as_ref(), self.ref_name.as_ref(), )?; Ok((self.repo.take().expect("still present"), outcome)) } /// Similar to [`fetch_only()`][Self::fetch_only()`], but passes ownership to a utility type to configure a checkout operation. #[cfg(all(feature = "worktree-mutation", feature = "blocking-network-client"))] pub fn fetch_then_checkout

( &mut self, progress: P, should_interrupt: &std::sync::atomic::AtomicBool, ) -> Result<(crate::clone::PrepareCheckout, crate::remote::fetch::Outcome), Error> where P: crate::NestedProgress, P::SubProgress: 'static, { let (repo, fetch_outcome) = self.fetch_only(progress, should_interrupt)?; Ok(( crate::clone::PrepareCheckout { repo: repo.into(), ref_name: self.ref_name.clone(), }, fetch_outcome, )) } } mod util; gix-0.69.1/src/clone/fetch/util.rs000064400000000000000000000245571046102023000150300ustar 00000000000000use std::{borrow::Cow, io::Write}; use gix_ref::{ transaction::{LogChange, RefLog}, FullNameRef, PartialName, }; use super::Error; use crate::{ bstr::{BStr, BString, ByteSlice}, Repository, }; enum WriteMode { Overwrite, Append, } #[allow(clippy::result_large_err)] pub fn write_remote_to_local_config_file( remote: &mut crate::Remote<'_>, remote_name: BString, ) -> Result, Error> { let mut config = gix_config::File::new(local_config_meta(remote.repo)); remote.save_as_to(remote_name, &mut config)?; write_to_local_config(&config, WriteMode::Append)?; Ok(config) } fn local_config_meta(repo: &Repository) -> gix_config::file::Metadata { let meta = repo.config.resolved.meta().clone(); assert_eq!( meta.source, gix_config::Source::Local, "local path is the default for new sections" ); meta } fn write_to_local_config(config: &gix_config::File<'static>, mode: WriteMode) -> std::io::Result<()> { assert_eq!( config.meta().source, gix_config::Source::Local, "made for appending to local configuration file" ); let mut local_config = std::fs::OpenOptions::new() .create(false) .write(matches!(mode, WriteMode::Overwrite)) .append(matches!(mode, WriteMode::Append)) .open(config.meta().path.as_deref().expect("local config with path set"))?; local_config.write_all(config.detect_newline_style())?; config.write_to_filter(&mut local_config, |s| s.meta().source == gix_config::Source::Local) } pub fn append_config_to_repo_config(repo: &mut Repository, config: gix_config::File<'static>) { let repo_config = gix_features::threading::OwnShared::make_mut(&mut repo.config.resolved); repo_config.append(config); } /// HEAD cannot be written by means of refspec by design, so we have to do it manually here. Also create the pointed-to ref /// if we have to, as it might not have been naturally included in the ref-specs. /// Lastly, use `ref_name` if it was provided instead, and let `HEAD` point to it. pub fn update_head( repo: &mut Repository, ref_map: &crate::remote::fetch::RefMap, reflog_message: &BStr, remote_name: &BStr, ref_name: Option<&PartialName>, ) -> Result<(), Error> { use gix_ref::{ transaction::{PreviousValue, RefEdit}, Target, }; let head_info = match ref_name { Some(ref_name) => Some(find_custom_refname(ref_map, ref_name)?), None => ref_map.remote_refs.iter().find_map(|r| { Some(match r { gix_protocol::handshake::Ref::Symbolic { full_ref_name, target, tag: _, object, } if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target.as_bstr())), gix_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => { (Some(object.as_ref()), None) } gix_protocol::handshake::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => { (None, Some(target.as_bstr())) } _ => return None, }) }), }; let Some((head_peeled_id, head_ref)) = head_info else { return Ok(()); }; let head: gix_ref::FullName = "HEAD".try_into().expect("valid"); let reflog_message = || LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: reflog_message.to_owned(), }; match head_ref { Some(referent) => { let referent: gix_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef { head_ref_name: referent.to_owned(), source: err, })?; repo.refs .transaction() .packed_refs(gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates( Box::new(&repo.objects), )) .prepare( { let mut edits = vec![RefEdit { change: gix_ref::transaction::Change::Update { log: reflog_message(), expected: PreviousValue::Any, new: Target::Symbolic(referent.clone()), }, name: head.clone(), deref: false, }]; if let Some(head_peeled_id) = head_peeled_id { edits.push(RefEdit { change: gix_ref::transaction::Change::Update { log: reflog_message(), expected: PreviousValue::Any, new: Target::Object(head_peeled_id.to_owned()), }, name: referent.clone(), deref: false, }); }; edits }, gix_lock::acquire::Fail::Immediately, gix_lock::acquire::Fail::Immediately, ) .map_err(crate::reference::edit::Error::from)? .commit( repo.committer() .transpose() .map_err(|err| Error::HeadUpdate(crate::reference::edit::Error::ParseCommitterTime(err)))?, ) .map_err(crate::reference::edit::Error::from)?; if let Some(head_peeled_id) = head_peeled_id { let mut log = reflog_message(); log.mode = RefLog::Only; repo.edit_reference(RefEdit { change: gix_ref::transaction::Change::Update { log, expected: PreviousValue::Any, new: Target::Object(head_peeled_id.to_owned()), }, name: head, deref: false, })?; } setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?; } None => { repo.edit_reference(RefEdit { change: gix_ref::transaction::Change::Update { log: reflog_message(), expected: PreviousValue::Any, new: Target::Object( head_peeled_id .expect("detached heads always point to something") .to_owned(), ), }, name: head, deref: false, })?; } }; Ok(()) } pub(super) fn find_custom_refname<'a>( ref_map: &'a crate::remote::fetch::RefMap, ref_name: &PartialName, ) -> Result<(Option<&'a gix_hash::oid>, Option<&'a BStr>), Error> { let group = gix_refspec::MatchGroup::from_fetch_specs(Some( gix_refspec::parse(ref_name.as_ref().as_bstr(), gix_refspec::parse::Operation::Fetch) .expect("partial names are valid refs"), )); // TODO: to fix ambiguity, implement priority system let filtered_items: Vec<_> = ref_map .mappings .iter() .filter_map(|m| { m.remote .as_name() .and_then(|name| m.remote.as_id().map(|id| (name, id))) }) .map(|(full_ref_name, target)| gix_refspec::match_group::Item { full_ref_name, target, object: None, }) .collect(); let res = group.match_remotes(filtered_items.iter().copied()); match res.mappings.len() { 0 => Err(Error::RefNameMissing { wanted: ref_name.clone(), }), 1 => { let item = filtered_items[res.mappings[0] .item_index .expect("we map by name only and have no object-id in refspec")]; Ok((Some(item.target), Some(item.full_ref_name))) } _ => Err(Error::RefNameAmbiguous { wanted: ref_name.clone(), candidates: res .mappings .iter() .filter_map(|m| match m.lhs { gix_refspec::match_group::SourceRef::FullName(name) => Some(name.to_owned()), gix_refspec::match_group::SourceRef::ObjectId(_) => None, }) .collect(), }), } } /// Set up the remote configuration for `branch` so that it points to itself, but on the remote, if and only if currently /// saved refspecs are able to match it. /// For that we reload the remote of `remote_name` and use its `ref_specs` for match. fn setup_branch_config( repo: &mut Repository, branch: &FullNameRef, branch_id: Option<&gix_hash::oid>, remote_name: &BStr, ) -> Result<(), Error> { let short_name = match branch.category_and_short_name() { Some((gix_ref::Category::LocalBranch, shortened)) => match shortened.to_str() { Ok(s) => s, Err(_) => return Ok(()), }, _ => return Ok(()), }; let remote = repo .find_remote(remote_name) .expect("remote was just created and must be visible in config"); let group = gix_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(gix_refspec::RefSpec::to_ref)); let null = gix_hash::ObjectId::null(repo.object_hash()); let res = group.match_remotes( Some(gix_refspec::match_group::Item { full_ref_name: branch.as_bstr(), target: branch_id.unwrap_or(&null), object: None, }) .into_iter(), ); if !res.mappings.is_empty() { let mut config = repo.config_snapshot_mut(); let mut section = config .new_section("branch", Some(Cow::Owned(short_name.into()))) .expect("section header name is always valid per naming rules, our input branch name is valid"); section.push("remote".try_into().expect("valid at compile time"), Some(remote_name)); section.push( "merge".try_into().expect("valid at compile time"), Some(branch.as_bstr()), ); write_to_local_config(&config, WriteMode::Overwrite)?; config.commit().expect("configuration we set is valid"); } Ok(()) } gix-0.69.1/src/clone/mod.rs000064400000000000000000000172361046102023000135350ustar 00000000000000#![allow(clippy::result_large_err)] use crate::{bstr::BString, config::tree::gitoxide, remote}; type ConfigureRemoteFn = Box) -> Result, Box>>; #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] type ConfigureConnectionFn = Box< dyn FnMut( &mut remote::Connection<'_, '_, Box>, ) -> Result<(), Box>, >; /// A utility to collect configuration on how to fetch from a remote and initiate a fetch operation. It will delete the newly /// created repository on when dropped without successfully finishing a fetch. #[must_use] pub struct PrepareFetch { /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user repo: Option, /// The name of the remote, which defaults to `origin` if not overridden. remote_name: Option, /// Additional config `values` that are applied in-memory before starting the fetch process. config_overrides: Vec, /// A function to configure a remote prior to fetching a pack. configure_remote: Option, /// A function to configure a connection before using it. #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] configure_connection: Option, /// Options for preparing a fetch operation. #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] fetch_options: remote::ref_map::Options, /// The url to clone from #[cfg_attr(not(feature = "blocking-network-client"), allow(dead_code))] url: gix_url::Url, /// How to handle shallow clones #[cfg_attr(not(feature = "blocking-network-client"), allow(dead_code))] shallow: remote::fetch::Shallow, /// The name of the reference to fetch. If `None`, the reference pointed to by `HEAD` will be checked out. #[cfg_attr(not(feature = "blocking-network-client"), allow(dead_code))] ref_name: Option, } /// The error returned by [`PrepareFetch::new()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Init(#[from] crate::init::Error), #[error(transparent)] UrlParse(#[from] gix_url::parse::Error), #[error("Failed to turn a the relative file url \"{}\" into an absolute one", url.to_bstring())] CanonicalizeUrl { url: gix_url::Url, source: gix_path::realpath::Error, }, } /// Instantiation impl PrepareFetch { /// Create a new repository at `path` with `create_opts` which is ready to clone from `url`, possibly after making additional adjustments to /// configuration and settings. /// /// Note that this is merely a handle to perform the actual connection to the remote, and if any of it fails the freshly initialized repository /// will be removed automatically as soon as this instance drops. /// /// # Deviation /// /// Similar to `git`, a missing user name and email configuration is not terminal and we will fill it in with dummy values. However, /// instead of deriving values from the system, ours are hardcoded to indicate what happened. #[allow(clippy::result_large_err)] pub fn new( url: Url, path: impl AsRef, kind: crate::create::Kind, create_opts: crate::create::Options, open_opts: crate::open::Options, ) -> Result where Url: TryInto, gix_url::parse::Error: From, { Self::new_inner( url.try_into().map_err(gix_url::parse::Error::from)?, path.as_ref(), kind, create_opts, open_opts, ) } #[allow(clippy::result_large_err)] fn new_inner( mut url: gix_url::Url, path: &std::path::Path, kind: crate::create::Kind, mut create_opts: crate::create::Options, open_opts: crate::open::Options, ) -> Result { create_opts.destination_must_be_empty = true; let mut repo = crate::ThreadSafeRepository::init_opts(path, kind, create_opts, open_opts)?.to_thread_local(); url.canonicalize(repo.options.current_dir_or_empty()) .map_err(|err| Error::CanonicalizeUrl { url: url.clone(), source: err, })?; if repo.committer().is_none() { let mut config = gix_config::File::new(gix_config::file::Metadata::api()); config .set_raw_value(&gitoxide::Committer::NAME_FALLBACK, "no name configured during clone") .expect("works - statically known"); config .set_raw_value(&gitoxide::Committer::EMAIL_FALLBACK, "noEmailAvailable@example.com") .expect("works - statically known"); let mut repo_config = repo.config_snapshot_mut(); repo_config.append(config); repo_config.commit().expect("configuration is still valid"); } Ok(PrepareFetch { url, #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] fetch_options: Default::default(), repo: Some(repo), config_overrides: Vec::new(), remote_name: None, configure_remote: None, #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] configure_connection: None, shallow: remote::fetch::Shallow::NoChange, ref_name: None, }) } } /// A utility to collect configuration on how to perform a checkout into a working tree, /// and when dropped without checking out successfully the fetched repository will be deleted from disk. #[must_use] #[cfg(feature = "worktree-mutation")] #[derive(Debug)] pub struct PrepareCheckout { /// A freshly initialized repository which is owned by us, or `None` if it was successfully checked out. pub(self) repo: Option, /// The name of the reference to check out. If `None`, the reference pointed to by `HEAD` will be checked out. pub(self) ref_name: Option, } // This module encapsulates functionality that works with both feature toggles. Can be combined with `fetch` // once async and clone are a thing. #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] mod access_feat { use crate::clone::PrepareFetch; /// Builder impl PrepareFetch { /// Set a callback to use for configuring the connection to use right before connecting to the remote. /// /// It is most commonly used for custom configuration. // TODO: tests pub fn configure_connection( mut self, f: impl FnMut( &mut crate::remote::Connection<'_, '_, Box>, ) -> Result<(), Box> + 'static, ) -> Self { self.configure_connection = Some(Box::new(f)); self } /// Set additional options to adjust parts of the fetch operation that are not affected by the git configuration. pub fn with_fetch_options(mut self, opts: crate::remote::ref_map::Options) -> Self { self.fetch_options = opts; self } } } /// #[cfg(any(feature = "async-network-client-async-std", feature = "blocking-network-client"))] pub mod fetch; mod access; /// #[cfg(feature = "worktree-mutation")] pub mod checkout; gix-0.69.1/src/commit.rs000064400000000000000000000264041046102023000131430ustar 00000000000000//! #![allow(clippy::empty_docs)] /// An empty array of a type usable with the `gix::easy` API to help declaring no parents should be used pub const NO_PARENT_IDS: [gix_hash::ObjectId; 0] = []; /// The error returned by [`commit(…)`][crate::Repository::commit()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ParseTime(#[from] crate::config::time::Error), #[error("Committer identity is not configured")] CommitterMissing, #[error("Author identity is not configured")] AuthorMissing, #[error(transparent)] ReferenceNameValidation(#[from] gix_ref::name::Error), #[error(transparent)] WriteObject(#[from] crate::object::write::Error), #[error(transparent)] ReferenceEdit(#[from] crate::reference::edit::Error), } /// #[cfg(feature = "revision")] pub mod describe { use std::borrow::Cow; use gix_hash::ObjectId; use gix_hashtable::HashMap; use crate::{bstr::BStr, ext::ObjectIdExt, Repository}; /// The result of [`try_resolve()`][Platform::try_resolve()]. pub struct Resolution<'repo> { /// The outcome of the describe operation. pub outcome: gix_revision::describe::Outcome<'static>, /// The id to describe. pub id: crate::Id<'repo>, } impl Resolution<'_> { /// Turn this instance into something displayable. pub fn format(self) -> Result, Error> { let prefix = self.id.shorten()?; Ok(self.outcome.into_format(prefix.hex_len())) } /// Turn this instance into something displayable, possibly with dirty-suffix. /// /// If `dirty_suffix` is `Some(suffix)`, a possibly expensive [dirty check](crate::Repository::is_dirty()) will be /// performed so that the `suffix` is appended to the output. If it is `None`, no check will be performed and /// there will be no suffix. /// Note that obtaining the dirty-state of the repository can be expensive. #[cfg(feature = "status")] pub fn format_with_dirty_suffix( self, dirty_suffix: impl Into>, ) -> Result, Error> { let prefix = self.id.shorten()?; let mut dirty_suffix = dirty_suffix.into(); if dirty_suffix.is_some() && !self.id.repo.is_dirty()? { dirty_suffix.take(); } let mut format = self.outcome.into_format(prefix.hex_len()); format.dirty_suffix = dirty_suffix; Ok(format) } } /// The error returned by [`try_format()`][Platform::try_format()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenCache(#[from] crate::repository::commit_graph_if_enabled::Error), #[error(transparent)] Describe(#[from] gix_revision::describe::Error), #[error("Could not produce an unambiguous shortened id for formatting.")] ShortId(#[from] crate::id::shorten::Error), #[error(transparent)] RefIter(#[from] crate::reference::iter::Error), #[error(transparent)] RefIterInit(#[from] crate::reference::iter::init::Error), #[error(transparent)] #[cfg(feature = "status")] DetermineIsDirty(#[from] crate::status::is_dirty::Error), } /// A selector to choose what kind of references should contribute to names. #[derive(Default, Debug, Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)] pub enum SelectRef { /// Only use annotated tags for names. #[default] AnnotatedTags, /// Use all tags for names, annotated or plain reference. AllTags, /// Use all references, including local branch names. AllRefs, } impl SelectRef { fn names(&self, repo: &Repository) -> Result>, Error> { let platform = repo.references()?; Ok(match self { SelectRef::AllTags | SelectRef::AllRefs => { let mut refs: Vec<_> = match self { SelectRef::AllRefs => platform.all()?, SelectRef::AllTags => platform.tags()?, _ => unreachable!(), } .filter_map(Result::ok) .filter_map(|mut r: crate::Reference<'_>| { let target_id = r.target().try_id().map(ToOwned::to_owned); let peeled_id = r.peel_to_id_in_place().ok()?; let (prio, tag_time) = match target_id { Some(target_id) if peeled_id != *target_id => { let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?; (1, tag.tagger().ok()??.time.seconds) } _ => (0, 0), }; ( peeled_id.inner, prio, tag_time, Cow::from(r.inner.name.shorten().to_owned()), ) .into() }) .collect(); // By priority, then by time ascending, then lexicographically. // More recent entries overwrite older ones due to collection into hashmap. refs.sort_by( |(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| { a_prio .cmp(b_prio) .then_with(|| a_time.cmp(b_time)) .then_with(|| b_name.cmp(a_name)) }, ); refs.into_iter().map(|(a, _, _, b)| (a, b)).collect() } SelectRef::AnnotatedTags => { let mut peeled_commits_and_tag_date: Vec<_> = platform .tags()? .filter_map(Result::ok) .filter_map(|r: crate::Reference<'_>| { // TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be // so rather follow symrefs till the first object and then peel tags after the first object was found. let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?; let tag_time = tag.tagger().ok().and_then(|s| s.map(|s| s.time.seconds)).unwrap_or(0); let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id; Some((commit_id, tag_time, Cow::::from(r.name().shorten().to_owned()))) }) .collect(); // Sort by time ascending, then lexicographically. // More recent entries overwrite older ones due to collection into hashmap. peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| { a_time.cmp(b_time).then_with(|| b_name.cmp(a_name)) }); peeled_commits_and_tag_date .into_iter() .map(|(a, _, c)| (a, c)) .collect() } }) } } /// A support type to allow configuring a `git describe` operation pub struct Platform<'repo> { pub(crate) id: gix_hash::ObjectId, /// The owning repository. pub repo: &'repo crate::Repository, pub(crate) select: SelectRef, pub(crate) first_parent: bool, pub(crate) id_as_fallback: bool, pub(crate) max_candidates: usize, } impl<'repo> Platform<'repo> { /// Configure which names to `select` from which describe can chose. pub fn names(mut self, select: SelectRef) -> Self { self.select = select; self } /// If true, shorten the graph traversal time by just traversing the first parent of merge commits. pub fn traverse_first_parent(mut self, first_parent: bool) -> Self { self.first_parent = first_parent; self } /// Only consider the given amount of candidates, instead of the default of 10. pub fn max_candidates(mut self, candidates: usize) -> Self { self.max_candidates = candidates; self } /// If true, even if no candidate is available a format will always be produced. pub fn id_as_fallback(mut self, use_fallback: bool) -> Self { self.id_as_fallback = use_fallback; self } /// Try to find a name for the configured commit id using all prior configuration, returning `Some(describe::Format)` /// if one was found, or `None` if that wasn't the case. pub fn try_format(&self) -> Result>, Error> { self.try_resolve()?.map(Resolution::format).transpose() } /// Try to find a name for the configured commit id using all prior configuration, returning `Some(Outcome)` /// if one was found. /// /// The outcome provides additional information, but leaves the caller with the burden /// /// # Performance /// /// It is greatly recommended to [assure an object cache is set](crate::Repository::object_cache_size_if_unset()) /// to save ~40% of time. pub fn try_resolve_with_cache( &self, cache: Option<&'_ gix_commitgraph::Graph>, ) -> Result>, Error> { let mut graph = self.repo.revision_graph(cache); let outcome = gix_revision::describe( &self.id, &mut graph, gix_revision::describe::Options { name_by_oid: self.select.names(self.repo)?, fallback_to_oid: self.id_as_fallback, first_parent: self.first_parent, max_candidates: self.max_candidates, }, )?; Ok(outcome.map(|outcome| Resolution { outcome, id: self.id.attach(self.repo), })) } /// Like [`Self::try_resolve_with_cache()`], but obtains the commitgraph-cache internally for a single use. /// /// # Performance /// /// Prefer to use the [`Self::try_resolve_with_cache()`] method when processing more than one commit at a time. pub fn try_resolve(&self) -> Result>, Error> { let cache = self.repo.commit_graph_if_enabled()?; self.try_resolve_with_cache(cache.as_ref()) } /// Like [`try_format()`](Self::try_format()), but turns `id_as_fallback()` on to always produce a format. pub fn format(&mut self) -> Result, Error> { self.id_as_fallback = true; Ok(self.try_format()?.expect("BUG: fallback must always produce a format")) } } } gix-0.69.1/src/config/cache/access.rs000064400000000000000000000545051046102023000154270ustar 00000000000000#![allow(clippy::result_large_err)] use gix_config::file::Metadata; use gix_lock::acquire::Fail; use std::{borrow::Cow, path::PathBuf, time::Duration}; use crate::{ config, config::{ boolean, cache::util::{ApplyLeniency, ApplyLeniencyDefaultValue}, tree::{Core, Key}, Cache, }, remote, repository::identity, }; /// Access impl Cache { #[cfg(feature = "blob-diff")] pub(crate) fn diff_algorithm(&self) -> Result { use crate::config::{cache::util::ApplyLeniencyDefault, diff::algorithm::Error, tree::Diff}; self.diff_algorithm .get_or_try_init(|| { let name = self .resolved .string(Diff::ALGORITHM) .unwrap_or_else(|| Cow::Borrowed("myers".into())); config::tree::Diff::ALGORITHM .try_into_algorithm(name) .or_else(|err| match err { Error::Unimplemented { .. } if self.lenient_config => Ok(gix_diff::blob::Algorithm::Histogram), err => Err(err), }) .with_lenient_default(self.lenient_config) }) .copied() } #[cfg(feature = "blob-diff")] pub(crate) fn diff_drivers(&self) -> Result, config::diff::drivers::Error> { use crate::config::cache::util::ApplyLeniencyDefault; let mut out = Vec::::new(); for section in self .resolved .sections_by_name("diff") .into_iter() .flatten() .filter(|s| (self.filter_config_section)(s.meta())) { let Some(name) = section.header().subsection_name().filter(|n| !n.is_empty()) else { continue; }; let driver = match out.iter_mut().find(|d| d.name == name) { Some(existing) => existing, None => { out.push(gix_diff::blob::Driver { name: name.into(), ..Default::default() }); out.last_mut().expect("just pushed") } }; if let Some(binary) = section.value_implicit("binary") { driver.is_binary = config::tree::Diff::DRIVER_BINARY .try_into_binary(binary) .with_leniency(self.lenient_config) .map_err(|err| config::diff::drivers::Error { name: driver.name.clone(), attribute: "binary", source: Box::new(err), })?; } if let Some(command) = section.value(config::tree::Diff::DRIVER_COMMAND.name) { driver.command = command.into_owned().into(); } if let Some(textconv) = section.value(config::tree::Diff::DRIVER_TEXTCONV.name) { driver.binary_to_text_command = textconv.into_owned().into(); } if let Some(algorithm) = section.value("algorithm") { driver.algorithm = config::tree::Diff::DRIVER_ALGORITHM .try_into_algorithm(algorithm) .or_else(|err| match err { config::diff::algorithm::Error::Unimplemented { .. } if self.lenient_config => { Ok(gix_diff::blob::Algorithm::Histogram) } err => Err(err), }) .with_lenient_default(self.lenient_config) .map_err(|err| config::diff::drivers::Error { name: driver.name.clone(), attribute: "algorithm", source: Box::new(err), })? .into(); } } Ok(out) } #[cfg(feature = "merge")] pub(crate) fn merge_drivers(&self) -> Result, config::merge::drivers::Error> { let mut out = Vec::::new(); for section in self .resolved .sections_by_name("merge") .into_iter() .flatten() .filter(|s| (self.filter_config_section)(s.meta())) { let Some(name) = section.header().subsection_name().filter(|n| !n.is_empty()) else { continue; }; let driver = match out.iter_mut().find(|d| d.name == name) { Some(existing) => existing, None => { out.push(gix_merge::blob::Driver { name: name.into(), display_name: name.into(), ..Default::default() }); out.last_mut().expect("just pushed") } }; if let Some(command) = section.value(config::tree::Merge::DRIVER_COMMAND.name) { driver.command = command.into_owned(); } if let Some(recursive_name) = section.value(config::tree::Merge::DRIVER_RECURSIVE.name) { driver.recursive = Some(recursive_name.into_owned()); } } Ok(out) } #[cfg(feature = "merge")] pub(crate) fn merge_pipeline_options( &self, ) -> Result { Ok(gix_merge::blob::pipeline::Options { large_file_threshold_bytes: self.big_file_threshold()?, }) } #[cfg(feature = "blob-diff")] pub(crate) fn diff_pipeline_options( &self, ) -> Result { Ok(gix_diff::blob::pipeline::Options { large_file_threshold_bytes: self.big_file_threshold()?, fs: self.fs_capabilities()?, }) } #[cfg(feature = "blob-diff")] pub(crate) fn diff_renames(&self) -> Result<(Option, bool), crate::diff::new_rewrites::Error> { self.diff_renames .get_or_try_init(|| crate::diff::new_rewrites(&self.resolved, self.lenient_config)) .copied() } #[cfg(feature = "blob-diff")] pub(crate) fn big_file_threshold(&self) -> Result { Ok(self .resolved .integer("core.bigFileThreshold") .map(|number| Core::BIG_FILE_THRESHOLD.try_into_u64(number)) .transpose() .with_leniency(self.lenient_config)? .unwrap_or(512 * 1024 * 1024)) } /// Returns a user agent for use with servers. #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] pub(crate) fn user_agent_tuple(&self) -> (&'static str, Option>) { use config::tree::Gitoxide; let agent = self .user_agent .get_or_init(|| { self.resolved .string(Gitoxide::USER_AGENT.logical_name().as_str()) .map_or_else(|| crate::env::agent().into(), |s| s.to_string()) }) .to_owned(); ("agent", Some(gix_protocol::agent(agent).into())) } /// Return `true` if packet-tracing is enabled. Lenient and defaults to `false`. #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] pub(crate) fn trace_packet(&self) -> bool { use config::tree::Gitoxide; self.resolved .boolean(Gitoxide::TRACE_PACKET) .and_then(Result::ok) .unwrap_or_default() } pub(crate) fn personas(&self) -> &identity::Personas { self.personas .get_or_init(|| identity::Personas::from_config_and_env(&self.resolved)) } pub(crate) fn url_rewrite(&self) -> &remote::url::Rewrite { self.url_rewrite .get_or_init(|| remote::url::Rewrite::from_config(&self.resolved, self.filter_config_section)) } #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub(crate) fn url_scheme(&self) -> Result<&remote::url::SchemePermission, config::protocol::allow::Error> { self.url_scheme .get_or_try_init(|| remote::url::SchemePermission::from_config(&self.resolved, self.filter_config_section)) } pub(crate) fn may_use_commit_graph(&self) -> Result { const DEFAULT: bool = true; self.resolved.boolean("core.commitGraph").map_or(Ok(DEFAULT), |res| { Core::COMMIT_GRAPH .enrich_error(res) .with_lenient_default_value(self.lenient_config, DEFAULT) }) } /// Returns (file-timeout, pack-refs timeout) pub(crate) fn lock_timeout( &self, ) -> Result<(gix_lock::acquire::Fail, gix_lock::acquire::Fail), config::lock_timeout::Error> { let mut out: [gix_lock::acquire::Fail; 2] = Default::default(); for (idx, (key, default_ms)) in [(&Core::FILES_REF_LOCK_TIMEOUT, 100), (&Core::PACKED_REFS_TIMEOUT, 1000)] .into_iter() .enumerate() { out[idx] = self .resolved .integer_filter(key, &mut self.filter_config_section.clone()) .map(|res| key.try_into_lock_timeout(res)) .transpose() .with_leniency(self.lenient_config)? .unwrap_or_else(|| Fail::AfterDurationWithBackoff(Duration::from_millis(default_ms))); } Ok((out[0], out[1])) } /// The path to the user-level excludes file to ignore certain files in the worktree. #[cfg(feature = "excludes")] pub(crate) fn excludes_file(&self) -> Option> { self.trusted_file_path(Core::EXCLUDES_FILE)? .map(std::borrow::Cow::into_owned) .into() } /// A helper to obtain a file from trusted configuration at `section_name`, `subsection_name`, and `key`, which is interpolated /// if present. pub(crate) fn trusted_file_path( &self, key: impl gix_config::AsKey, ) -> Option, gix_config::path::interpolate::Error>> { trusted_file_path( &self.resolved, key, &mut self.filter_config_section.clone(), self.lenient_config, self.environment, ) } pub(crate) fn apply_leniency(&self, res: Option>) -> Result, E> { res.transpose().with_leniency(self.lenient_config) } pub(crate) fn fs_capabilities(&self) -> Result { Ok(gix_fs::Capabilities { precompose_unicode: boolean(self, "core.precomposeUnicode", &Core::PRECOMPOSE_UNICODE, false)?, ignore_case: boolean(self, "core.ignoreCase", &Core::IGNORE_CASE, false)?, executable_bit: boolean(self, "core.fileMode", &Core::FILE_MODE, true)?, symlink: boolean(self, "core.symlinks", &Core::SYMLINKS, true)?, }) } #[cfg(feature = "index")] pub(crate) fn stat_options(&self) -> Result { use crate::config::tree::gitoxide; Ok(gix_index::entry::stat::Options { trust_ctime: boolean(self, "core.trustCTime", &Core::TRUST_C_TIME, true)?, use_nsec: boolean(self, "gitoxide.core.useNsec", &gitoxide::Core::USE_NSEC, false)?, use_stdev: boolean(self, "gitoxide.core.useStdev", &gitoxide::Core::USE_STDEV, false)?, check_stat: self .apply_leniency( self.resolved .string(Core::CHECK_STAT) .map(|v| Core::CHECK_STAT.try_into_checkstat(v)), )? .unwrap_or(true), }) } #[cfg(any(feature = "index", feature = "tree-editor"))] pub(crate) fn protect_options(&self) -> Result { const IS_WINDOWS: bool = cfg!(windows); const IS_MACOS: bool = cfg!(target_os = "macos"); const ALWAYS_ON_FOR_SAFETY: bool = true; Ok(gix_validate::path::component::Options { protect_windows: config::tree::gitoxide::Core::PROTECT_WINDOWS .enrich_error( self.resolved .boolean(config::tree::gitoxide::Core::PROTECT_WINDOWS) .unwrap_or(Ok(IS_WINDOWS)), ) .with_lenient_default_value(self.lenient_config, IS_WINDOWS)?, protect_hfs: config::tree::Core::PROTECT_HFS .enrich_error( self.resolved .boolean(config::tree::Core::PROTECT_HFS) .unwrap_or(Ok(IS_MACOS)), ) .with_lenient_default_value(self.lenient_config, IS_MACOS)?, protect_ntfs: config::tree::Core::PROTECT_NTFS .enrich_error( self.resolved .boolean(config::tree::Core::PROTECT_NTFS) .unwrap_or(Ok(ALWAYS_ON_FOR_SAFETY)), ) .with_lenient_default_value(self.lenient_config, ALWAYS_ON_FOR_SAFETY)?, }) } /// Collect everything needed to checkout files into a worktree. /// Note that some of the options being returned will be defaulted so safe settings, the caller might have to override them /// depending on the use-case. #[cfg(feature = "worktree-mutation")] pub(crate) fn checkout_options( &self, repo: &crate::Repository, attributes_source: gix_worktree::stack::state::attributes::Source, ) -> Result { use crate::config::tree::gitoxide; let git_dir = repo.git_dir(); let thread_limit = self.apply_leniency( self.resolved .integer_filter("checkout.workers", &mut self.filter_config_section.clone()) .map(|value| crate::config::tree::Checkout::WORKERS.try_from_workers(value)), )?; let capabilities = self.fs_capabilities()?; let filters = { let mut filters = gix_filter::Pipeline::new(repo.command_context()?, crate::filter::Pipeline::options(repo)?); if let Ok(mut head) = repo.head() { let ctx = filters.driver_context_mut(); ctx.ref_name = head.referent_name().map(|name| name.as_bstr().to_owned()); ctx.treeish = head.peel_to_commit_in_place().ok().map(|commit| commit.id); } filters }; let filter_process_delay = if boolean( self, "gitoxide.core.filterProcessDelay", &gitoxide::Core::FILTER_PROCESS_DELAY, true, )? { gix_filter::driver::apply::Delay::Allow } else { gix_filter::driver::apply::Delay::Forbid }; Ok(gix_worktree_state::checkout::Options { filter_process_delay, validate: self.protect_options()?, filters, attributes: self .assemble_attribute_globals(git_dir, attributes_source, self.attributes)? .0, fs: capabilities, thread_limit, destination_is_initially_empty: false, overwrite_existing: false, keep_going: false, stat_options: self.stat_options().map_err(|err| match err { config::stat_options::Error::ConfigCheckStat(err) => { config::checkout_options::Error::ConfigCheckStat(err) } config::stat_options::Error::ConfigBoolean(err) => config::checkout_options::Error::ConfigBoolean(err), })?, }) } #[cfg(feature = "excludes")] pub(crate) fn assemble_exclude_globals( &self, git_dir: &std::path::Path, overrides: Option, source: gix_worktree::stack::state::ignore::Source, buf: &mut Vec, ) -> Result { let excludes_file = match self.excludes_file().transpose()? { Some(user_path) => Some(user_path), None => self.xdg_config_path("ignore")?, }; Ok(gix_worktree::stack::state::Ignore::new( overrides.unwrap_or_default(), gix_ignore::Search::from_git_dir(git_dir, excludes_file, buf)?, None, source, )) } // TODO: at least one test, maybe related to core.attributesFile configuration. #[cfg(feature = "attributes")] pub(crate) fn assemble_attribute_globals( &self, git_dir: &std::path::Path, source: gix_worktree::stack::state::attributes::Source, attributes: crate::open::permissions::Attributes, ) -> Result<(gix_worktree::stack::state::Attributes, Vec), config::attribute_stack::Error> { use gix_attributes::Source; let configured_or_user_attributes = match self.trusted_file_path(Core::ATTRIBUTES_FILE).transpose()? { Some(attributes) => Some(attributes), None => { if attributes.git { self.xdg_config_path("attributes").ok().flatten().map(Cow::Owned) } else { None } } }; let attribute_files = [gix_attributes::Source::GitInstallation, gix_attributes::Source::System] .into_iter() .filter(|source| match source { Source::GitInstallation => attributes.git_binary, Source::System => attributes.system, Source::Git | Source::Local => unreachable!("we don't offer turning this off right now"), }) .filter_map(|source| source.storage_location(&mut Self::make_source_env(self.environment))) .chain(configured_or_user_attributes); let info_attributes_path = git_dir.join("info").join("attributes"); let mut buf = Vec::new(); let mut collection = gix_attributes::search::MetadataCollection::default(); let state = gix_worktree::stack::state::Attributes::new( gix_attributes::Search::new_globals(attribute_files, &mut buf, &mut collection)?, Some(info_attributes_path), source, collection, ); Ok((state, buf)) } #[cfg(feature = "attributes")] pub(crate) fn pathspec_defaults( &self, ) -> Result { use crate::config::tree::gitoxide; let res = gix_pathspec::Defaults::from_environment(&mut |name| { let key = [ &gitoxide::Pathspec::ICASE, &gitoxide::Pathspec::GLOB, &gitoxide::Pathspec::NOGLOB, &gitoxide::Pathspec::LITERAL, ] .iter() .find(|key| key.environment_override().expect("set") == name) .expect("we must know all possible input variable names"); let val = self.resolved.string(key).map(gix_path::from_bstr)?; Some(val.into_owned().into()) }); if res.is_err() && self.lenient_config { Ok(gix_pathspec::Defaults::default()) } else { res } } #[cfg(any(feature = "attributes", feature = "excludes"))] pub(crate) fn xdg_config_path( &self, resource_file_name: &str, ) -> Result, gix_sec::permission::Error> { std::env::var_os("XDG_CONFIG_HOME") .map(|path| (PathBuf::from(path), &self.environment.xdg_config_home)) .or_else(|| { gix_path::env::home_dir().map(|mut p| { ( { p.push(".config"); p }, &self.environment.home, ) }) }) .and_then(|(base, permission)| { let resource = base.join("git").join(resource_file_name); permission.check(resource).transpose() }) .transpose() } /// Return the home directory if we are allowed to read it and if it is set in the environment. /// /// We never fail for here even if the permission is set to deny as we `gix-config` will fail later /// if it actually wants to use the home directory - we don't want to fail prematurely. #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] pub(crate) fn home_dir(&self) -> Option { home_dir(self.environment) } } pub(crate) fn trusted_file_path<'config>( config: &'config gix_config::File<'_>, key: impl gix_config::AsKey, filter: impl FnMut(&Metadata) -> bool, lenient_config: bool, environment: crate::open::permissions::Environment, ) -> Option, gix_config::path::interpolate::Error>> { let path = config.path_filter(&key, filter)?; if lenient_config && path.is_empty() { let _key = key.as_key(); gix_trace::info!( "Ignored empty path at {section_name}.{subsection_name:?}.{name} due to lenient configuration", section_name = _key.section_name, subsection_name = _key.subsection_name, name = _key.value_name ); return None; } let install_dir = crate::path::install_dir().ok(); let home = home_dir(environment); let ctx = config::cache::interpolate_context(install_dir.as_deref(), home.as_deref()); Some(path.interpolate(ctx)) } pub(crate) fn home_dir(environment: crate::open::permissions::Environment) -> Option { gix_path::env::home_dir().and_then(|path| environment.home.check_opt(path)) } fn boolean( me: &Cache, full_key: &str, key: &'static config::tree::keys::Boolean, default: bool, ) -> Result { debug_assert_eq!( full_key, key.logical_name(), "BUG: key name and hardcoded name must match" ); Ok(me .apply_leniency(me.resolved.boolean(full_key).map(|v| key.enrich_error(v)))? .unwrap_or(default)) } gix-0.69.1/src/config/cache/incubate.rs000064400000000000000000000120501046102023000157450ustar 00000000000000#![allow(clippy::result_large_err)] use super::{util, Error}; use crate::config::cache::util::{ApplyLeniency, ApplyLeniencyDefaultValue}; use crate::config::tree::{gitoxide, Core, Extensions}; /// A utility to deal with the cyclic dependency between the ref store and the configuration. The ref-store needs the /// object hash kind, and the configuration needs the current branch name to resolve conditional includes with `onbranch`. pub(crate) struct StageOne { pub git_dir_config: gix_config::File<'static>, pub buf: Vec, pub is_bare: bool, pub lossy: Option, pub object_hash: gix_hash::Kind, pub reflog: Option, pub precompose_unicode: bool, pub protect_windows: bool, } /// Initialization impl StageOne { pub fn new( common_dir: &std::path::Path, git_dir: &std::path::Path, git_dir_trust: gix_sec::Trust, lossy: Option, lenient: bool, ) -> Result { let mut buf = Vec::with_capacity(512); let mut config = load_config( common_dir.join("config"), &mut buf, gix_config::Source::Local, git_dir_trust, lossy, lenient, )?; // Note that we assume the repo is bare by default unless we are told otherwise. This is relevant if // the repo doesn't have a configuration file. let is_bare = util::config_bool(&config, &Core::BARE, "core.bare", true, lenient)?; let repo_format_version = config .integer("core.repositoryFormatVersion") .map(|version| Core::REPOSITORY_FORMAT_VERSION.try_into_usize(version)) .transpose()? .unwrap_or_default(); let object_hash = (repo_format_version != 1) .then_some(Ok(gix_hash::Kind::Sha1)) .or_else(|| { config .string(Extensions::OBJECT_FORMAT) .map(|format| Extensions::OBJECT_FORMAT.try_into_object_format(format)) }) .transpose()? .unwrap_or(gix_hash::Kind::Sha1); let extension_worktree = util::config_bool( &config, &Extensions::WORKTREE_CONFIG, "extensions.worktreeConfig", false, lenient, )?; if extension_worktree { let worktree_config = load_config( git_dir.join("config.worktree"), &mut buf, gix_config::Source::Worktree, git_dir_trust, lossy, lenient, )?; config.append(worktree_config); }; let precompose_unicode = config .boolean(&Core::PRECOMPOSE_UNICODE) .map(|v| Core::PRECOMPOSE_UNICODE.enrich_error(v)) .transpose() .with_leniency(lenient) .map_err(Error::ConfigBoolean)? .unwrap_or_default(); const IS_WINDOWS: bool = cfg!(windows); let protect_windows = gitoxide::Core::PROTECT_WINDOWS .enrich_error( config .boolean(gitoxide::Core::PROTECT_WINDOWS) .unwrap_or(Ok(IS_WINDOWS)), ) .with_lenient_default_value(lenient, IS_WINDOWS)?; let reflog = util::query_refupdates(&config, lenient)?; Ok(StageOne { git_dir_config: config, buf, is_bare, lossy, object_hash, reflog, precompose_unicode, protect_windows, }) } } fn load_config( config_path: std::path::PathBuf, buf: &mut Vec, source: gix_config::Source, git_dir_trust: gix_sec::Trust, lossy: Option, lenient: bool, ) -> Result, Error> { let metadata = gix_config::file::Metadata::from(source) .at(&config_path) .with(git_dir_trust); let mut file = match std::fs::File::open(&config_path) { Ok(f) => f, Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(gix_config::File::new(metadata)), Err(err) => { let err = Error::Io { source: err, path: config_path, }; if lenient { gix_trace::warn!("ignoring: {err:#?}"); return Ok(gix_config::File::new(metadata)); } else { return Err(err); } } }; buf.clear(); if let Err(err) = std::io::copy(&mut file, buf) { let err = Error::Io { source: err, path: config_path, }; if lenient { gix_trace::warn!("ignoring: {err:#?}"); buf.clear(); } else { return Err(err); } }; let config = gix_config::File::from_bytes_owned( buf, metadata, gix_config::file::init::Options { includes: gix_config::file::includes::Options::no_follow(), ..util::base_options(lossy, lenient) }, )?; Ok(config) } gix-0.69.1/src/config/cache/init.rs000064400000000000000000000514471046102023000151330ustar 00000000000000#![allow(clippy::result_large_err)] use std::{borrow::Cow, ffi::OsString}; use gix_sec::Permission; use super::{interpolate_context, util, Error, StageOne}; use crate::{ bstr::BString, config, config::{ cache::util::ApplyLeniency, tree::{gitoxide, Core, Gitoxide, Http}, Cache, }, open, repository::init::setup_objects, }; /// Initialization impl Cache { #[allow(clippy::too_many_arguments)] pub fn from_stage_one( StageOne { git_dir_config, mut buf, lossy, is_bare, object_hash, reflog: _, precompose_unicode: _, protect_windows: _, }: StageOne, git_dir: &std::path::Path, branch_name: Option<&gix_ref::FullNameRef>, filter_config_section: fn(&gix_config::file::Metadata) -> bool, git_install_dir: Option<&std::path::Path>, home: Option<&std::path::Path>, environment @ open::permissions::Environment { git_prefix, ssh_prefix: _, xdg_config_home: _, home: _, http_transport, identity, objects, }: open::permissions::Environment, attributes: open::permissions::Attributes, open::permissions::Config { git_binary: use_installation, system: use_system, git: use_git, user: use_user, env: use_env, includes: use_includes, }: open::permissions::Config, lenient_config: bool, api_config_overrides: &[BString], cli_config_overrides: &[BString], ) -> Result { let options = gix_config::file::init::Options { includes: if use_includes { gix_config::file::includes::Options::follow( interpolate_context(git_install_dir, home), gix_config::file::includes::conditional::Context { git_dir: git_dir.into(), branch_name, }, ) } else { gix_config::file::includes::Options::no_follow() }, ..util::base_options(lossy, lenient_config) }; let config = { let git_prefix = &git_prefix; let mut metas = [ gix_config::source::Kind::GitInstallation, gix_config::source::Kind::System, gix_config::source::Kind::Global, ] .iter() .flat_map(|kind| kind.sources()) .filter_map(|source| { match source { gix_config::Source::GitInstallation if !use_installation => return None, gix_config::Source::System if !use_system => return None, gix_config::Source::Git if !use_git => return None, gix_config::Source::User if !use_user => return None, _ => {} } source .storage_location(&mut Self::make_source_env(environment)) .map(|p| (source, p.into_owned())) }) .map(|(source, path)| gix_config::file::Metadata { path: Some(path), source: *source, level: 0, trust: gix_sec::Trust::Full, }); let err_on_nonexisting_paths = false; let mut globals = gix_config::File::from_paths_metadata_buf( &mut metas, &mut buf, err_on_nonexisting_paths, gix_config::file::init::Options { includes: gix_config::file::includes::Options::no_follow(), ..options }, ) .map_err(|err| match err { gix_config::file::init::from_paths::Error::Init(err) => Error::from(err), gix_config::file::init::from_paths::Error::Io { source, path } => Error::Io { source, path }, })? .unwrap_or_default(); let local_meta = git_dir_config.meta_owned(); globals.append(git_dir_config); globals.resolve_includes(options)?; if use_env { globals.append(gix_config::File::from_env(options)?.unwrap_or_default()); } if !cli_config_overrides.is_empty() { config::overrides::append(&mut globals, cli_config_overrides, gix_config::Source::Cli, |_| None) .map_err(|err| Error::ConfigOverrides { err, source: gix_config::Source::Cli, })?; } if !api_config_overrides.is_empty() { config::overrides::append(&mut globals, api_config_overrides, gix_config::Source::Api, |_| None) .map_err(|err| Error::ConfigOverrides { err, source: gix_config::Source::Api, })?; } apply_environment_overrides(&mut globals, *git_prefix, http_transport, identity, objects)?; globals.set_meta(local_meta); globals }; let hex_len = util::parse_core_abbrev(&config, object_hash).with_leniency(lenient_config)?; use util::config_bool; let reflog = util::query_refupdates(&config, lenient_config)?; let refs_namespace = util::query_refs_namespace(&config, lenient_config)?; let ignore_case = config_bool(&config, &Core::IGNORE_CASE, "core.ignoreCase", false, lenient_config)?; let use_multi_pack_index = config_bool( &config, &Core::MULTIPACK_INDEX, "core.multiPackIndex", true, lenient_config, )?; #[cfg(feature = "revision")] let object_kind_hint = util::disambiguate_hint(&config, lenient_config)?; let (static_pack_cache_limit_bytes, pack_cache_bytes, object_cache_bytes) = util::parse_object_caches(&config, lenient_config, filter_config_section)?; // NOTE: When adding a new initial cache, consider adjusting `reread_values_and_clear_caches()` as well. Ok(Cache { resolved: config.into(), use_multi_pack_index, object_hash, #[cfg(feature = "revision")] object_kind_hint, static_pack_cache_limit_bytes, pack_cache_bytes, object_cache_bytes, reflog, refs_namespace, is_bare, ignore_case, hex_len, filter_config_section, environment, lenient_config, attributes, user_agent: Default::default(), personas: Default::default(), url_rewrite: Default::default(), #[cfg(feature = "blob-diff")] diff_renames: Default::default(), #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] url_scheme: Default::default(), #[cfg(feature = "blob-diff")] diff_algorithm: Default::default(), }) } /// Call this with new `config` to update values and clear caches. Note that none of the values will be applied if a single /// one is invalid. /// However, those that are lazily read won't be re-evaluated right away and might thus pass now but fail later. /// /// Note that we unconditionally re-read all values. pub fn reread_values_and_clear_caches_replacing_config(&mut self, config: crate::Config) -> Result<(), Error> { let prev = std::mem::replace(&mut self.resolved, config); match self.reread_values_and_clear_caches() { Err(err) => { drop(std::mem::replace(&mut self.resolved, prev)); Err(err) } Ok(()) => Ok(()), } } /// Similar to `reread_values_and_clear_caches_replacing_config()`, but works on the existing configuration instead of a passed /// in one that it them makes the default. pub fn reread_values_and_clear_caches(&mut self) -> Result<(), Error> { let config = &self.resolved; let hex_len = util::parse_core_abbrev(config, self.object_hash).with_leniency(self.lenient_config)?; use util::config_bool; let ignore_case = config_bool( config, &Core::IGNORE_CASE, "core.ignoreCase", false, self.lenient_config, )?; #[cfg(feature = "revision")] { let object_kind_hint = util::disambiguate_hint(config, self.lenient_config)?; self.object_kind_hint = object_kind_hint; } let reflog = util::query_refupdates(config, self.lenient_config)?; let refs_namespace = util::query_refs_namespace(config, self.lenient_config)?; self.hex_len = hex_len; self.ignore_case = ignore_case; self.reflog = reflog; self.refs_namespace = refs_namespace; self.user_agent = Default::default(); self.personas = Default::default(); self.url_rewrite = Default::default(); #[cfg(feature = "blob-diff")] { self.diff_renames = Default::default(); self.diff_algorithm = Default::default(); } ( self.static_pack_cache_limit_bytes, self.pack_cache_bytes, self.object_cache_bytes, ) = util::parse_object_caches(config, self.lenient_config, self.filter_config_section)?; #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] { self.url_scheme = Default::default(); } Ok(()) } pub(crate) fn make_source_env( crate::open::permissions::Environment { xdg_config_home, git_prefix, home, .. }: open::permissions::Environment, ) -> impl FnMut(&str) -> Option { move |name| { match name { git_ if git_.starts_with("GIT_") => Some(git_prefix), "XDG_CONFIG_HOME" => Some(xdg_config_home), "HOME" => { return if home.is_allowed() { gix_path::env::home_dir().map(Into::into) } else { None } } _ => None, } .and_then(|perm| perm.check_opt(name).and_then(gix_path::env::var)) } } } impl crate::Repository { /// Replace our own configuration with `config` and re-read all cached values, and apply them to select in-memory instances. pub(crate) fn reread_values_and_clear_caches_replacing_config( &mut self, config: crate::Config, ) -> Result<(), Error> { let (a, b, c) = ( self.config.static_pack_cache_limit_bytes, self.config.pack_cache_bytes, self.config.object_cache_bytes, ); self.config.reread_values_and_clear_caches_replacing_config(config)?; self.apply_changed_values(); if a != self.config.static_pack_cache_limit_bytes || b != self.config.pack_cache_bytes || c != self.config.object_cache_bytes { setup_objects(&mut self.objects, &self.config); } Ok(()) } fn apply_changed_values(&mut self) { self.refs.write_reflog = util::reflog_or_default(self.config.reflog, self.work_dir().is_some()); self.refs.namespace.clone_from(&self.config.refs_namespace); } } fn apply_environment_overrides( config: &mut gix_config::File<'static>, git_prefix: Permission, http_transport: Permission, identity: Permission, objects: Permission, ) -> Result<(), Error> { fn env(key: &'static dyn config::tree::Key) -> &'static str { key.the_environment_override() } fn var_as_bstring(var: &str, perm: Permission) -> Option { perm.check_opt(var) .and_then(std::env::var_os) .and_then(|val| gix_path::os_string_into_bstring(val).ok()) } let mut env_override = gix_config::File::new(gix_config::file::Metadata::from(gix_config::Source::EnvOverride)); for (section_name, subsection_name, permission, data) in [ ( "core", None, git_prefix, &[{ let key = &Core::WORKTREE; (env(key), key.name) }][..], ), ( "http", None, http_transport, &[ ("GIT_HTTP_LOW_SPEED_LIMIT", "lowSpeedLimit"), ("GIT_HTTP_LOW_SPEED_TIME", "lowSpeedTime"), ("GIT_HTTP_USER_AGENT", "userAgent"), { let key = &Http::SSL_CA_INFO; (env(key), key.name) }, { let key = &Http::SSL_VERSION; (env(key), key.name) }, ][..], ), ( "gitoxide", None, git_prefix, &[{ let key = &Gitoxide::TRACE_PACKET; (env(key), key.name) }], ), ( "gitoxide", Some(Cow::Borrowed("https".into())), http_transport, &[ ("HTTPS_PROXY", gitoxide::Https::PROXY.name), ("https_proxy", gitoxide::Https::PROXY.name), ], ), ( "gitoxide", Some(Cow::Borrowed("http".into())), http_transport, &[ ("ALL_PROXY", "allProxy"), { let key = &gitoxide::Http::ALL_PROXY; (env(key), key.name) }, ("NO_PROXY", "noProxy"), { let key = &gitoxide::Http::NO_PROXY; (env(key), key.name) }, { let key = &gitoxide::Http::PROXY; (env(key), key.name) }, { let key = &gitoxide::Http::VERBOSE; (env(key), key.name) }, { let key = &gitoxide::Http::PROXY_AUTH_METHOD; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("http".into())), git_prefix, &[{ let key = &gitoxide::Http::SSL_NO_VERIFY; (env(key), key.name) }], ), ( "gitoxide", Some(Cow::Borrowed("credentials".into())), git_prefix, &[ { let key = &gitoxide::Credentials::TERMINAL_PROMPT; (env(key), key.name) }, { let key = &gitoxide::Credentials::HELPER_STDERR; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("committer".into())), identity, &[ { let key = &gitoxide::Committer::NAME_FALLBACK; (env(key), key.name) }, { let key = &gitoxide::Committer::EMAIL_FALLBACK; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("core".into())), git_prefix, &[ { let key = &gitoxide::Core::SHALLOW_FILE; (env(key), key.name) }, { let key = &gitoxide::Core::REFS_NAMESPACE; (env(key), key.name) }, { let key = &gitoxide::Core::EXTERNAL_COMMAND_STDERR; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("author".into())), identity, &[ { let key = &gitoxide::Author::NAME_FALLBACK; (env(key), key.name) }, { let key = &gitoxide::Author::EMAIL_FALLBACK; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("commit".into())), git_prefix, &[ { let key = &gitoxide::Commit::COMMITTER_DATE; (env(key), key.name) }, { let key = &gitoxide::Commit::AUTHOR_DATE; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("allow".into())), http_transport, &[("GIT_PROTOCOL_FROM_USER", "protocolFromUser")], ), ( "gitoxide", Some(Cow::Borrowed("user".into())), identity, &[{ let key = &gitoxide::User::EMAIL_FALLBACK; (env(key), key.name) }], ), ( "gitoxide", Some(Cow::Borrowed("objects".into())), objects, &[ { let key = &gitoxide::Objects::REPLACE_REF_BASE; (env(key), key.name) }, { let key = &gitoxide::Objects::CACHE_LIMIT; (env(key), key.name) }, ], ), ( "gitoxide", Some(Cow::Borrowed("ssh".into())), git_prefix, &[{ let key = &gitoxide::Ssh::COMMAND_WITHOUT_SHELL_FALLBACK; (env(key), key.name) }], ), ( "gitoxide", Some(Cow::Borrowed("pathspec".into())), git_prefix, &[ { let key = &gitoxide::Pathspec::LITERAL; (env(key), key.name) }, { let key = &gitoxide::Pathspec::GLOB; (env(key), key.name) }, { let key = &gitoxide::Pathspec::NOGLOB; (env(key), key.name) }, { let key = &gitoxide::Pathspec::ICASE; (env(key), key.name) }, ], ), ( "ssh", None, git_prefix, &[{ let key = &config::tree::Ssh::VARIANT; (env(key), key.name) }], ), #[cfg(feature = "blob-diff")] ( "diff", None, git_prefix, &[{ let key = &config::tree::Diff::EXTERNAL; (env(key), key.name) }], ), ] { let mut section = env_override .new_section(section_name, subsection_name) .expect("statically known valid section name"); for (var, key) in data { if let Some(value) = var_as_bstring(var, permission) { section.push_with_comment( (*key).try_into().expect("statically known to be valid"), Some(value.as_ref()), format!("from {var}").as_str(), ); } } if section.num_values() == 0 { let id = section.id(); env_override.remove_section_by_id(id); } } { let mut section = env_override .new_section("core", None) .expect("statically known valid section name"); for (var, key, permission) in [ { let key = &Core::DELTA_BASE_CACHE_LIMIT; (env(key), key.name, objects) }, { let key = &Core::SSH_COMMAND; (env(key), key.name, git_prefix) }, { let key = &Core::USE_REPLACE_REFS; (env(key), key.name, objects) }, ] { if let Some(value) = var_as_bstring(var, permission) { section.push_with_comment( key.try_into().expect("statically known to be valid"), Some(value.as_ref()), format!("from {var}").as_str(), ); } } if section.num_values() == 0 { let id = section.id(); env_override.remove_section_by_id(id); } } if !env_override.is_void() { config.append(env_override); } Ok(()) } gix-0.69.1/src/config/cache/mod.rs000064400000000000000000000005341046102023000147360ustar 00000000000000use super::{Cache, Error}; mod incubate; pub(crate) use incubate::StageOne; mod init; impl std::fmt::Debug for Cache { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Cache").finish_non_exhaustive() } } pub(crate) mod access; pub(crate) mod util; pub(crate) use util::interpolate_context; gix-0.69.1/src/config/cache/util.rs000064400000000000000000000126301046102023000151340ustar 00000000000000#![allow(clippy::result_large_err)] use super::Error; use crate::{ config, config::tree::{gitoxide, Core}, }; pub(crate) fn interpolate_context<'a>( git_install_dir: Option<&'a std::path::Path>, home_dir: Option<&'a std::path::Path>, ) -> gix_config::path::interpolate::Context<'a> { gix_config::path::interpolate::Context { git_install_dir, home_dir, home_for_user: Some(gix_config::path::interpolate::home_for_user), // TODO: figure out how to configure this } } pub(crate) fn base_options(lossy: Option, lenient: bool) -> gix_config::file::init::Options<'static> { gix_config::file::init::Options { lossy: lossy.unwrap_or(!cfg!(debug_assertions)), ignore_io_errors: lenient, ..Default::default() } } pub(crate) fn config_bool( config: &gix_config::File<'_>, key: &'static config::tree::keys::Boolean, key_str: &str, default: bool, lenient: bool, ) -> Result { use config::tree::Key; debug_assert_eq!( key_str, key.logical_name(), "BUG: key name and hardcoded name must match" ); config .boolean(key_str) .map_or(Ok(default), |res| key.enrich_error(res)) .map_err(Error::from) .with_lenient_default(lenient) } pub(crate) fn query_refupdates( config: &gix_config::File<'static>, lenient_config: bool, ) -> Result, Error> { let key = "core.logAllRefUpdates"; Core::LOG_ALL_REF_UPDATES .try_into_ref_updates(config.boolean(key)) .with_leniency(lenient_config) .map_err(Into::into) } pub(crate) fn query_refs_namespace( config: &gix_config::File<'static>, lenient_config: bool, ) -> Result, config::refs_namespace::Error> { let key = "gitoxide.core.refsNamespace"; config .string(key) .map(|ns| gitoxide::Core::REFS_NAMESPACE.try_into_refs_namespace(ns)) .transpose() .with_leniency(lenient_config) } pub(crate) fn reflog_or_default( config_reflog: Option, has_worktree: bool, ) -> gix_ref::store::WriteReflog { config_reflog.unwrap_or(if has_worktree { gix_ref::store::WriteReflog::Normal } else { gix_ref::store::WriteReflog::Disable }) } /// Return `(pack_cache_bytes, object_cache_bytes)` as parsed from gix-config pub(crate) fn parse_object_caches( config: &gix_config::File<'static>, lenient: bool, mut filter_config_section: fn(&gix_config::file::Metadata) -> bool, ) -> Result<(Option, Option, usize), Error> { let static_pack_cache_limit = config .integer_filter("gitoxide.core.deltaBaseCacheLimit", &mut filter_config_section) .map(|res| gitoxide::Core::DEFAULT_PACK_CACHE_MEMORY_LIMIT.try_into_usize(res)) .transpose() .with_leniency(lenient)?; let pack_cache_bytes = config .integer_filter("core.deltaBaseCacheLimit", &mut filter_config_section) .map(|res| Core::DELTA_BASE_CACHE_LIMIT.try_into_usize(res)) .transpose() .with_leniency(lenient)?; let object_cache_bytes = config .integer_filter("gitoxide.objects.cacheLimit", &mut filter_config_section) .map(|res| gitoxide::Objects::CACHE_LIMIT.try_into_usize(res)) .transpose() .with_leniency(lenient)? .unwrap_or_default(); Ok((static_pack_cache_limit, pack_cache_bytes, object_cache_bytes)) } pub(crate) fn parse_core_abbrev( config: &gix_config::File<'static>, object_hash: gix_hash::Kind, ) -> Result, Error> { Ok(config .string("core.abbrev") .map(|abbrev| Core::ABBREV.try_into_abbreviation(abbrev, object_hash)) .transpose()? .flatten()) } #[cfg(feature = "revision")] pub(crate) fn disambiguate_hint( config: &gix_config::File<'static>, lenient_config: bool, ) -> Result, config::key::GenericErrorWithValue> { match config.string("core.disambiguate") { None => Ok(None), Some(value) => Core::DISAMBIGUATE .try_into_object_kind_hint(value) .with_leniency(lenient_config), } } // TODO: Use a specialization here once trait specialization is stabilized. Would be perfect here for `T: Default`. pub trait ApplyLeniency { fn with_leniency(self, is_lenient: bool) -> Self; } pub trait ApplyLeniencyDefault { fn with_lenient_default(self, is_lenient: bool) -> Self; } pub trait ApplyLeniencyDefaultValue { fn with_lenient_default_value(self, is_lenient: bool, default: T) -> Self; } impl ApplyLeniency for Result, E> { fn with_leniency(self, is_lenient: bool) -> Self { match self { Ok(v) => Ok(v), Err(_) if is_lenient => Ok(None), Err(err) => Err(err), } } } impl ApplyLeniencyDefault for Result where T: Default, { fn with_lenient_default(self, is_lenient: bool) -> Self { match self { Ok(v) => Ok(v), Err(_) if is_lenient => Ok(T::default()), Err(err) => Err(err), } } } impl ApplyLeniencyDefaultValue for Result { fn with_lenient_default_value(self, is_lenient: bool, default: T) -> Self { match self { Ok(v) => Ok(v), Err(_) if is_lenient => Ok(default), Err(err) => Err(err), } } } gix-0.69.1/src/config/mod.rs000064400000000000000000000603711046102023000137000ustar 00000000000000pub use gix_config::*; use gix_features::threading::OnceCell; use crate::{bstr::BString, repository::identity, Repository}; pub(crate) mod cache; mod snapshot; #[cfg(feature = "credentials")] pub use snapshot::credential_helpers; /// pub mod overrides; pub mod tree; pub use tree::root::Tree; /// A platform to access configuration values as read from disk. /// /// Note that these values won't update even if the underlying file(s) change. pub struct Snapshot<'repo> { /// The owning repository. pub repo: &'repo Repository, } /// A platform to access configuration values and modify them in memory, while making them available when this platform is dropped /// as form of auto-commit. /// Note that the values will only affect this instance of the parent repository, and not other clones that may exist. /// /// Note that these values won't update even if the underlying file(s) change. /// /// Use [`forget()`][Self::forget()] to not apply any of the changes. // TODO: make it possible to load snapshots with reloading via .config() and write mutated snapshots back to disk which should be the way // to affect all instances of a repo, probably via `config_mut()` and `config_mut_at()`. pub struct SnapshotMut<'repo> { /// The owning repository. pub repo: Option<&'repo mut Repository>, pub(crate) config: gix_config::File<'static>, } /// A utility structure created by [`SnapshotMut::commit_auto_rollback()`] that restores the previous configuration on drop. pub struct CommitAutoRollback<'repo> { /// The owning repository. pub repo: Option<&'repo mut Repository>, pub(crate) prev_config: crate::Config, } /// pub mod section { /// A filter that returns `true` for `meta` if the meta-data attached to a configuration section can be trusted. /// This is either the case if its file is fully trusted, or if it's a section from a system-wide file. pub fn is_trusted(meta: &gix_config::file::Metadata) -> bool { meta.trust == gix_sec::Trust::Full || meta.source.kind() != gix_config::source::Kind::Repository } } /// pub mod set_value { /// The error produced when calling [`SnapshotMut::set(_subsection)?_value()`][crate::config::SnapshotMut::set_value()] #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] SetRaw(#[from] gix_config::file::set_raw_value::Error), #[error(transparent)] Validate(#[from] crate::config::tree::key::validate::Error), #[error("The key needs a subsection parameter to be valid.")] SubSectionRequired, #[error("The key must not be used with a subsection")] SubSectionForbidden, } } /// The error returned when failing to initialize the repository configuration. /// /// This configuration is on the critical path when opening a repository. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigBoolean(#[from] boolean::Error), #[error(transparent)] ConfigUnsigned(#[from] unsigned_integer::Error), #[error(transparent)] ConfigTypedString(#[from] key::GenericErrorWithValue), #[error(transparent)] RefsNamespace(#[from] refs_namespace::Error), #[error("Cannot handle objects formatted as {:?}", .name)] UnsupportedObjectFormat { name: BString }, #[error(transparent)] CoreAbbrev(#[from] abbrev::Error), #[error("Could not read configuration file at \"{}\"", path.display())] Io { source: std::io::Error, path: std::path::PathBuf, }, #[error(transparent)] Init(#[from] gix_config::file::init::Error), #[error(transparent)] ResolveIncludes(#[from] gix_config::file::includes::Error), #[error(transparent)] FromEnv(#[from] gix_config::file::init::from_env::Error), #[error("The path {path:?} at the 'core.worktree' configuration could not be interpolated")] PathInterpolation { path: BString, source: gix_config::path::interpolate::Error, }, #[error("{source:?} configuration overrides at open or init time could not be applied.")] ConfigOverrides { #[source] err: overrides::Error, source: gix_config::Source, }, } /// pub mod merge { /// pub mod pipeline_options { /// The error produced when obtaining options needed to fill in [gix_merge::blob::pipeline::Options]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] BigFileThreshold(#[from] crate::config::unsigned_integer::Error), } } /// pub mod drivers { /// The error produced when obtaining a list of [Drivers](gix_merge::blob::Driver). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigBoolean(#[from] crate::config::boolean::Error), } } } /// pub mod diff { /// pub mod algorithm { use crate::bstr::BString; /// The error produced when obtaining `diff.algorithm`. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Unknown diff algorithm named '{name}'")] Unknown { name: BString }, #[error("The '{name}' algorithm is not yet implemented")] Unimplemented { name: BString }, } } /// pub mod pipeline_options { /// The error produced when obtaining options needed to fill in [gix_diff::blob::pipeline::Options]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FilesystemCapabilities(#[from] crate::config::boolean::Error), #[error(transparent)] BigFileThreshold(#[from] crate::config::unsigned_integer::Error), } } /// pub mod drivers { use crate::bstr::BString; /// The error produced when obtaining a list of [Drivers](gix_diff::blob::Driver). #[derive(Debug, thiserror::Error)] #[error("Failed to parse value of 'diff.{name}.{attribute}'")] pub struct Error { /// The name of the driver. pub name: BString, /// The name of the attribute we tried to parse. pub attribute: &'static str, /// The actual error that occurred. pub source: Box, } } } /// pub mod stat_options { /// The error produced when collecting stat information, and returned by [Repository::stat_options()](crate::Repository::stat_options()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigCheckStat(#[from] super::key::GenericErrorWithValue), #[error(transparent)] ConfigBoolean(#[from] super::boolean::Error), } } /// #[cfg(feature = "attributes")] pub mod checkout_options { /// The error produced when collecting all information needed for checking out files into a worktree. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigCheckStat(#[from] super::key::GenericErrorWithValue), #[error(transparent)] ConfigBoolean(#[from] super::boolean::Error), #[error(transparent)] CheckoutWorkers(#[from] super::checkout::workers::Error), #[error(transparent)] Attributes(#[from] super::attribute_stack::Error), #[error(transparent)] FilterPipelineOptions(#[from] crate::filter::pipeline::options::Error), #[error(transparent)] CommandContext(#[from] crate::config::command_context::Error), } } /// #[cfg(feature = "attributes")] pub mod command_context { use crate::config; /// The error produced when collecting all information relevant to spawned commands, /// obtained via [Repository::command_context()](crate::Repository::command_context()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Boolean(#[from] config::boolean::Error), #[error(transparent)] ParseBool(#[from] gix_config::value::Error), } } /// pub mod exclude_stack { use std::path::PathBuf; /// The error produced when setting up a stack to query `gitignore` information. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not read repository exclude")] Io(#[from] std::io::Error), #[error(transparent)] EnvironmentPermission(#[from] gix_sec::permission::Error), #[error("The value for `core.excludesFile` could not be read from configuration")] ExcludesFilePathInterpolation(#[from] gix_config::path::interpolate::Error), } } /// pub mod attribute_stack { /// The error produced when setting up the attribute stack to query `gitattributes`. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("An attribute file could not be read")] Io(#[from] std::io::Error), #[error("Failed to interpolate the attribute file configured at `core.attributesFile`")] AttributesFileInterpolation(#[from] gix_config::path::interpolate::Error), } } /// pub mod protocol { /// pub mod allow { use crate::bstr::BString; /// The error returned when obtaining the permission for a particular scheme. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] #[error("The value {value:?} must be allow|deny|user in configuration key protocol{0}.allow", scheme.as_ref().map(|s| format!(".{s}")).unwrap_or_default())] pub struct Error { pub scheme: Option, pub value: BString, } } } /// pub mod ssh_connect_options { /// The error produced when obtaining ssh connection configuration. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] #[error(transparent)] pub struct Error(#[from] super::key::GenericErrorWithValue); } /// pub mod key { use crate::bstr::BString; const fn prefix(kind: char) -> &'static str { match kind { 'n' => "", // nothing 'k' => "The value of key", // generic key 't' => "The date format at key", // time 'i' => "The timeout at key", // timeout 'd' => "The duration [ms] at key", // duration 'b' => "The boolean at key", // boolean 'v' => "The key", // generic key with value 'r' => "The refspec at", // refspec 's' => "The ssl version at", // ssl-version 'u' => "The url at", // url 'w' => "The utf-8 string at", // string _ => panic!("BUG: invalid prefix kind - add a case for it here"), } } const fn suffix(kind: char) -> &'static str { match kind { 'd' => "could not be decoded", // decoding 'i' => "was invalid", // invalid 'u' => "could not be parsed as unsigned integer", // unsigned integer 'p' => "could not be parsed", // parsing _ => panic!("BUG: invalid suffix kind - add a case for it here"), } } /// A generic error suitable to produce decent messages for all kinds of configuration errors with config-key granularity. /// /// This error is meant to be reusable and help produce uniform error messages related to parsing any configuration key. #[derive(Debug, thiserror::Error)] #[error("{} \"{key}{}\"{} {}", prefix(PREFIX), value.as_ref().map(|v| format!("={v}")).unwrap_or_default(), environment_override.as_deref().map(|var| format!(" (possibly from {var})")).unwrap_or_default(), suffix(SUFFIX))] pub struct Error { /// The configuration key that contained the value. pub key: BString, /// The value that was assigned to `key`. pub value: Option, /// The associated environment variable that would override this value. pub environment_override: Option<&'static str>, /// The source of the error if there was one. pub source: Option, } /// Initialization /// Instantiate a new error from the given `key`. /// /// Note that specifics of the error message are defined by the `PREFIX` and `SUFFIX` which is usually defined by a typedef. impl From<&'static T> for Error where E: std::error::Error + Send + Sync + 'static, T: super::tree::Key, { fn from(key: &'static T) -> Self { Error { key: key.logical_name().into(), value: None, environment_override: key.environment_override(), source: None, } } } /// Initialization impl Error where E: std::error::Error + Send + Sync + 'static, { /// Instantiate an error with all data from `key` along with the `value` of the key. pub fn from_value(key: &'static impl super::tree::Key, value: BString) -> Self { Error::from(key).with_value(value) } } /// Builder impl Error where E: std::error::Error + Send + Sync + 'static, { /// Attach the given `err` as source. pub fn with_source(mut self, err: E) -> Self { self.source = Some(err); self } /// Attach the given `value` as value we observed when the error was produced. pub fn with_value(mut self, value: BString) -> Self { self.value = Some(value); self } } /// A generic key error for use when it doesn't seem worth it say more than 'key is invalid' along with meta-data. pub type GenericError = Error; /// A generic key error which will also contain a value. pub type GenericErrorWithValue = Error; } /// pub mod encoding { use crate::bstr::BString; /// The error produced when failing to parse the `core.checkRoundTripEncoding` key. #[derive(Debug, thiserror::Error)] #[error("The encoding named '{encoding}' seen in key '{key}={value}' is unsupported")] pub struct Error { /// The configuration key that contained the value. pub key: BString, /// The value that was assigned to `key`. pub value: BString, /// The encoding that failed. pub encoding: BString, } } /// pub mod checkout { /// pub mod workers { use crate::config; /// The error produced when failing to parse the `checkout.workers` key. pub type Error = config::key::Error; } } /// pub mod abbrev { use crate::bstr::BString; /// The error describing an incorrect `core.abbrev` value. #[derive(Debug, thiserror::Error)] #[error("Invalid value for 'core.abbrev' = '{}'. It must be between 4 and {}", .value, .max)] pub struct Error { /// The value found in the git configuration pub value: BString, /// The maximum abbreviation length, the length of an object hash. pub max: u8, } } /// pub mod remote { /// pub mod symbolic_name { /// The error produced when failing to produce a symbolic remote name from configuration. pub type Error = super::super::key::Error; } } /// pub mod time { /// The error produced when failing to parse time from configuration. pub type Error = super::key::Error; } /// pub mod lock_timeout { /// The error produced when failing to parse timeout for locks. pub type Error = super::key::Error; } /// pub mod duration { /// The error produced when failing to parse durations (in milliseconds). pub type Error = super::key::Error; } /// pub mod boolean { /// The error produced when failing to parse time from configuration. pub type Error = super::key::Error; } /// pub mod unsigned_integer { /// The error produced when failing to parse a signed integer from configuration. pub type Error = super::key::Error; } /// pub mod url { /// The error produced when failing to parse a url from the configuration. pub type Error = super::key::Error; } /// pub mod string { /// The error produced when failing to interpret configuration as UTF-8 encoded string. pub type Error = super::key::Error; } /// pub mod refspec { /// The error produced when failing to parse a refspec from the configuration. pub type Error = super::key::Error; } /// pub mod refs_namespace { /// The error produced when failing to parse a refspec from the configuration. pub type Error = super::key::Error; } /// pub mod ssl_version { /// The error produced when failing to parse a refspec from the configuration. pub type Error = super::key::Error; } /// pub mod transport { use std::borrow::Cow; use crate::bstr::BStr; /// The error produced when configuring a transport for a particular protocol. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error( "Could not interpret configuration key {key:?} as {kind} integer of desired range with value: {actual}" )] InvalidInteger { key: &'static str, kind: &'static str, actual: i64, }, #[error("Could not interpret configuration key {key:?}")] ConfigValue { source: gix_config::value::Error, key: &'static str, }, #[error("Could not interpolate path at key {key:?}")] InterpolatePath { source: gix_config::path::interpolate::Error, key: &'static str, }, #[error("Could not decode value at key {key:?} as UTF-8 string")] IllformedUtf8 { key: Cow<'static, BStr>, source: crate::config::string::Error, }, #[error("Invalid URL passed for configuration")] ParseUrl(#[from] gix_url::parse::Error), #[error("Could obtain configuration for an HTTP url")] Http(#[from] http::Error), } /// pub mod http { use std::borrow::Cow; use crate::bstr::BStr; /// The error produced when configuring a HTTP transport. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Boolean(#[from] crate::config::boolean::Error), #[error(transparent)] UnsignedInteger(#[from] crate::config::unsigned_integer::Error), #[error(transparent)] ConnectTimeout(#[from] crate::config::duration::Error), #[error("The proxy authentication at key `{key}` is invalid")] InvalidProxyAuthMethod { source: crate::config::key::GenericErrorWithValue, key: Cow<'static, BStr>, }, #[error("Could not configure the credential helpers for the authenticated proxy url")] #[cfg(feature = "credentials")] ConfigureProxyAuthenticate(#[from] crate::config::snapshot::credential_helpers::Error), #[error(transparent)] InvalidSslVersion(#[from] crate::config::ssl_version::Error), #[error("The HTTP version must be 'HTTP/2' or 'HTTP/1.1'")] InvalidHttpVersion(#[from] crate::config::key::GenericErrorWithValue), #[error("The follow redirects value 'initial', or boolean true or false")] InvalidFollowRedirects(#[source] crate::config::key::GenericErrorWithValue), } } } /// Utility type to keep pre-obtained configuration values, only for those required during initial setup /// and other basic operations that are common enough to warrant a permanent cache. /// /// All other values are obtained lazily using `OnceCell`. #[derive(Clone)] pub(crate) struct Cache { pub resolved: crate::Config, /// The hex-length to assume when shortening object ids. If `None`, it should be computed based on the approximate object count. pub hex_len: Option, /// true if the repository is designated as 'bare', without work tree. pub is_bare: bool, /// The type of hash to use. pub object_hash: gix_hash::Kind, /// If true, multi-pack indices, whether present or not, may be used by the object database. pub use_multi_pack_index: bool, /// The representation of `core.logallrefupdates`, or `None` if the variable wasn't set. pub reflog: Option, /// The representation of `gitoxide.core.refsNamespace`, or `None` if the variable wasn't set. pub refs_namespace: Option, /// The configured user agent for presentation to servers. pub(crate) user_agent: OnceCell, /// identities for later use, lazy initialization. pub(crate) personas: OnceCell, /// A lazily loaded rewrite list for remote urls pub(crate) url_rewrite: OnceCell, /// The lazy-loaded rename information for diffs. #[cfg(feature = "blob-diff")] pub(crate) diff_renames: OnceCell<(Option, bool)>, /// A lazily loaded mapping to know which url schemes to allow #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub(crate) url_scheme: OnceCell, /// The algorithm to use when diffing blobs #[cfg(feature = "blob-diff")] pub(crate) diff_algorithm: OnceCell, /// The amount of bytes to use for a memory backed delta pack cache. If `Some(0)`, no cache is used, if `None` /// a standard cache is used which costs near to nothing and always pays for itself. pub(crate) pack_cache_bytes: Option, /// The amount of bytes to use for caching whole objects, or 0 to turn it off entirely. pub(crate) object_cache_bytes: usize, /// The amount of bytes we can hold in our static LRU cache. Otherwise, go with the defaults. pub(crate) static_pack_cache_limit_bytes: Option, /// The config section filter from the options used to initialize this instance. Keep these in sync! filter_config_section: fn(&gix_config::file::Metadata) -> bool, /// The object kind to pick if a prefix is ambiguous. #[cfg(feature = "revision")] pub object_kind_hint: Option, /// If true, we are on a case-insensitive file system. pub ignore_case: bool, /// If true, we should default what's possible if something is misconfigured, on case by case basis, to be more resilient. /// Also, available in options! Keep in sync! pub lenient_config: bool, #[cfg_attr(not(feature = "worktree-mutation"), allow(dead_code))] attributes: crate::open::permissions::Attributes, environment: crate::open::permissions::Environment, // TODO: make core.precomposeUnicode available as well. } /// Utilities shared privately across the crate, for lack of a better place. pub(crate) mod shared { use crate::{ config, config::{cache::util::ApplyLeniency, tree::Core}, }; pub fn is_replace_refs_enabled( config: &gix_config::File<'static>, lenient: bool, mut filter_config_section: fn(&gix_config::file::Metadata) -> bool, ) -> Result, config::boolean::Error> { config .boolean_filter("core.useReplaceRefs", &mut filter_config_section) .map(|b| Core::USE_REPLACE_REFS.enrich_error(b)) .transpose() .with_leniency(lenient) } } gix-0.69.1/src/config/overrides.rs000064400000000000000000000037531046102023000151240ustar 00000000000000use std::convert::TryFrom; use crate::bstr::{BStr, BString, ByteSlice}; /// The error returned by [`SnapshotMut::apply_cli_overrides()`][crate::config::SnapshotMut::append_config()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("{input:?} is not a valid configuration key. Examples are 'core.abbrev' or 'remote.origin.url'")] InvalidKey { input: BString }, #[error("Key {key:?} could not be parsed")] SectionKey { key: BString, source: gix_config::parse::section::value_name::Error, }, #[error(transparent)] SectionHeader(#[from] gix_config::parse::section::header::Error), } pub(crate) fn append( config: &mut gix_config::File<'static>, values: impl IntoIterator>, source: gix_config::Source, mut make_comment: impl FnMut(&BStr) -> Option, ) -> Result<(), Error> { let mut file = gix_config::File::new(gix_config::file::Metadata::from(source)); for key_value in values { let key_value = key_value.as_ref(); let mut tokens = key_value.splitn(2, |b| *b == b'=').map(ByteSlice::trim); let key = tokens.next().expect("always one value").as_bstr(); let value = tokens.next(); let key = gix_config::KeyRef::parse_unvalidated(key).ok_or_else(|| Error::InvalidKey { input: key.into() })?; let mut section = file.section_mut_or_create_new(key.section_name, key.subsection_name)?; let value_name = gix_config::parse::section::ValueName::try_from(key.value_name.to_owned()).map_err(|err| { Error::SectionKey { source: err, key: key.value_name.into(), } })?; let comment = make_comment(key_value); let value = value.map(ByteSlice::as_bstr); match comment { Some(comment) => section.push_with_comment(value_name, value, &**comment), None => section.push(value_name, value), }; } config.append(file); Ok(()) } gix-0.69.1/src/config/snapshot/_impls.rs000064400000000000000000000033201046102023000162320ustar 00000000000000use std::{ fmt::{Debug, Formatter}, ops::{Deref, DerefMut}, }; use crate::config::{CommitAutoRollback, Snapshot, SnapshotMut}; impl Debug for Snapshot<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str(&self.repo.config.resolved.to_string()) } } impl Debug for CommitAutoRollback<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str(&self.repo.as_ref().expect("still present").config.resolved.to_string()) } } impl Debug for SnapshotMut<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str(&self.config.to_string()) } } impl Drop for SnapshotMut<'_> { fn drop(&mut self) { if let Some(repo) = self.repo.take() { self.commit_inner(repo).ok(); }; } } impl Drop for CommitAutoRollback<'_> { fn drop(&mut self) { if let Some(repo) = self.repo.take() { self.rollback_inner(repo).ok(); } } } impl Deref for SnapshotMut<'_> { type Target = gix_config::File<'static>; fn deref(&self) -> &Self::Target { &self.config } } impl Deref for Snapshot<'_> { type Target = gix_config::File<'static>; fn deref(&self) -> &Self::Target { self.plumbing() } } impl Deref for CommitAutoRollback<'_> { type Target = crate::Repository; fn deref(&self) -> &Self::Target { self.repo.as_ref().expect("always present") } } impl DerefMut for CommitAutoRollback<'_> { fn deref_mut(&mut self) -> &mut Self::Target { self.repo.as_mut().expect("always present") } } impl DerefMut for SnapshotMut<'_> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.config } } gix-0.69.1/src/config/snapshot/access.rs000064400000000000000000000221701046102023000162140ustar 00000000000000#![allow(clippy::result_large_err)] use std::borrow::Cow; use std::ffi::OsStr; use gix_features::threading::OwnShared; use crate::bstr::ByteSlice; use crate::{ bstr::{BStr, BString}, config::{CommitAutoRollback, Snapshot, SnapshotMut}, }; /// Access configuration values, frozen in time, using a `key` which is a `.` separated string of up to /// three tokens, namely `section_name.[subsection_name.]value_name`, like `core.bare` or `remote.origin.url`. /// /// Note that single-value methods always return the last value found, which is the one set most recently in the /// hierarchy of configuration files, aka 'last one wins'. impl<'repo> Snapshot<'repo> { /// Return the boolean at `key`, or `None` if there is no such value or if the value can't be interpreted as /// boolean. /// /// For a non-degenerating version, use [`try_boolean(…)`][Self::try_boolean()]. /// /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust. pub fn boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option { self.try_boolean(key).and_then(Result::ok) } /// Like [`boolean()`][Self::boolean()], but it will report an error if the value couldn't be interpreted as boolean. pub fn try_boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option> { self.repo.config.resolved.boolean(key.into()) } /// Return the resolved integer at `key`, or `None` if there is no such value or if the value can't be interpreted as /// integer or exceeded the value range. /// /// For a non-degenerating version, use [`try_integer(…)`][Self::try_integer()]. /// /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust. pub fn integer<'a>(&self, key: impl Into<&'a BStr>) -> Option { self.try_integer(key).and_then(Result::ok) } /// Like [`integer()`][Self::integer()], but it will report an error if the value couldn't be interpreted as boolean. pub fn try_integer<'a>(&self, key: impl Into<&'a BStr>) -> Option> { self.repo.config.resolved.integer(key.into()) } /// Return the string at `key`, or `None` if there is no such value. /// /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust. pub fn string<'a>(&self, key: impl Into<&'a BStr>) -> Option> { self.repo.config.resolved.string(key.into()) } /// Return the trusted and fully interpolated path at `key`, or `None` if there is no such value /// or if no value was found in a trusted file. /// An error occurs if the path could not be interpolated to its final value. pub fn trusted_path<'a>( &self, key: impl Into<&'a BStr>, ) -> Option, gix_config::path::interpolate::Error>> { self.repo.config.trusted_file_path(key.into()) } /// Return the trusted string at `key` for launching using [command::prepare()](gix_command::prepare()), /// or `None` if there is no such value or if no value was found in a trusted file. pub fn trusted_program<'a>(&self, key: impl Into<&'a BStr>) -> Option> { let value = self .repo .config .resolved .string_filter(key.into(), &mut self.repo.config.filter_config_section.clone())?; Some(match gix_path::from_bstr(value) { Cow::Borrowed(v) => Cow::Borrowed(v.as_os_str()), Cow::Owned(v) => Cow::Owned(v.into_os_string()), }) } } /// Utilities and additional access impl Snapshot<'_> { /// Returns the underlying configuration implementation for a complete API, despite being a little less convenient. /// /// It's expected that more functionality will move up depending on demand. pub fn plumbing(&self) -> &gix_config::File<'static> { &self.repo.config.resolved } } /// Utilities impl<'repo> SnapshotMut<'repo> { /// Append configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true` /// to the end of the repository configuration, with each section marked with the given `source`. /// /// Note that doing so applies the configuration at the very end, so it will always override what came before it /// even though the `source` is of lower priority as what's there. pub fn append_config( &mut self, values: impl IntoIterator>, source: gix_config::Source, ) -> Result<&mut Self, crate::config::overrides::Error> { crate::config::overrides::append(&mut self.config, values, source, |v| Some(format!("-c {v}").into()))?; Ok(self) } /// Apply all changes made to this instance. /// /// Note that this would also happen once this instance is dropped, but using this method may be more intuitive and won't squelch errors /// in case the new configuration is partially invalid. pub fn commit(mut self) -> Result<&'repo mut crate::Repository, crate::config::Error> { let repo = self.repo.take().expect("always present here"); self.commit_inner(repo) } /// Set the value at `key` to `new_value`, possibly creating the section if it doesn't exist yet, or overriding the most recent existing /// value, which will be returned. pub fn set_value<'b>( &mut self, key: &'static dyn crate::config::tree::Key, new_value: impl Into<&'b BStr>, ) -> Result, crate::config::set_value::Error> { if let Some(crate::config::tree::SubSectionRequirement::Parameter(_)) = key.subsection_requirement() { return Err(crate::config::set_value::Error::SubSectionRequired); } let value = new_value.into(); key.validate(value)?; let section = key.section(); let current = match section.parent() { Some(parent) => { self.config .set_raw_value_by(parent.name(), Some(section.name().into()), key.name(), value)? } None => self.config.set_raw_value_by(section.name(), None, key.name(), value)?, }; Ok(current.map(std::borrow::Cow::into_owned)) } /// Set the value at `key` to `new_value` in the given `subsection`, possibly creating the section and sub-section if it doesn't exist yet, /// or overriding the most recent existing value, which will be returned. pub fn set_subsection_value<'a, 'b>( &mut self, key: &'static dyn crate::config::tree::Key, subsection: impl Into<&'a BStr>, new_value: impl Into<&'b BStr>, ) -> Result, crate::config::set_value::Error> { if let Some(crate::config::tree::SubSectionRequirement::Never) = key.subsection_requirement() { return Err(crate::config::set_value::Error::SubSectionForbidden); } let value = new_value.into(); key.validate(value)?; let name = key .full_name(Some(subsection.into())) .expect("we know it needs a subsection"); let key = gix_config::KeyRef::parse_unvalidated((**name).as_bstr()) .expect("statically known keys can always be parsed"); let current = self.config .set_raw_value_by(key.section_name, key.subsection_name, key.value_name.to_owned(), value)?; Ok(current.map(std::borrow::Cow::into_owned)) } pub(crate) fn commit_inner( &mut self, repo: &'repo mut crate::Repository, ) -> Result<&'repo mut crate::Repository, crate::config::Error> { repo.reread_values_and_clear_caches_replacing_config(std::mem::take(&mut self.config).into())?; Ok(repo) } /// Create a structure the temporarily commits the changes, but rolls them back when dropped. pub fn commit_auto_rollback(mut self) -> Result, crate::config::Error> { let repo = self.repo.take().expect("this only runs once on consumption"); let prev_config = OwnShared::clone(&repo.config.resolved); Ok(CommitAutoRollback { repo: self.commit_inner(repo)?.into(), prev_config, }) } /// Don't apply any of the changes after consuming this instance, effectively forgetting them, returning the changed configuration. pub fn forget(mut self) -> gix_config::File<'static> { self.repo.take(); std::mem::take(&mut self.config) } } /// Utilities impl<'repo> CommitAutoRollback<'repo> { /// Rollback the changes previously applied and all values before the change. pub fn rollback(mut self) -> Result<&'repo mut crate::Repository, crate::config::Error> { let repo = self.repo.take().expect("still present, consumed only once"); self.rollback_inner(repo) } pub(crate) fn rollback_inner( &mut self, repo: &'repo mut crate::Repository, ) -> Result<&'repo mut crate::Repository, crate::config::Error> { repo.reread_values_and_clear_caches_replacing_config(OwnShared::clone(&self.prev_config))?; Ok(repo) } } gix-0.69.1/src/config/snapshot/credential_helpers.rs000064400000000000000000000255321046102023000206140ustar 00000000000000pub use error::Error; use crate::config::Snapshot; mod error { use crate::bstr::BString; /// The error returned by [`Snapshot::credential_helpers()`][super::Snapshot::credential_helpers()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not parse 'useHttpPath' key in section {section}")] InvalidUseHttpPath { section: BString, source: gix_config::value::Error, }, #[error("core.askpass could not be read")] CoreAskpass(#[from] gix_config::path::interpolate::Error), #[error(transparent)] BooleanConfig(#[from] crate::config::boolean::Error), } } impl Snapshot<'_> { /// Returns the configuration for all git-credential helpers from trusted configuration that apply /// to the given `url` along with an action preconfigured to invoke the cascade with. /// For details, please see [this function](function::credential_helpers). pub fn credential_helpers( &self, url: gix_url::Url, ) -> Result< ( gix_credentials::helper::Cascade, gix_credentials::helper::Action, gix_prompt::Options<'static>, ), Error, > { let repo = self.repo; function::credential_helpers( url, &repo.config.resolved, repo.config.lenient_config, &mut repo.filter_config_section(), repo.config.environment, false, ) } } pub(super) mod function { use crate::bstr::{ByteSlice, ByteVec}; use crate::config::cache::util::ApplyLeniency; use crate::config::credential_helpers::Error; use crate::config::tree::gitoxide::Credentials; use crate::config::tree::{credential, Core, Credential}; use std::borrow::Cow; /// Returns the configuration for all git-credential helpers from trusted configuration that apply /// to the given `url` along with an action preconfigured to invoke the cascade with to retrieve it. /// This includes `url` which may be altered to contain a user-name as configured. /// /// These can be invoked to obtain credentials. Note that the `url` is expected to be the one used /// to connect to a remote, and thus should already have passed the url-rewrite engine. /// /// * `config` /// - the configuration to obtain credential helper configuration from. /// * `is_lenient_config` /// - if `true`, minor configuration errors will be ignored. /// * `filter` /// - A way to choose which sections in `config` can be trusted. This is important as we will execute programs /// from the paths contained within. /// * `environment` /// - Determines how environment variables can be used. /// - Actually used are `GIT_*` and `SSH_*` environment variables to configure git prompting capabilities. /// * `use_http_path` /// - Typically, this should be false to let the `url` configuration decide if the value should be enabled. /// - If `false`, credentials are effectively per host. /// /// # Deviation /// /// - Invalid urls can't be used to obtain credential helpers as they are rejected early when creating a valid `url` here. /// - Parsed urls will automatically drop the port if it's the default, i.e. `http://host:80` becomes `http://host` when parsed. /// This affects the prompt provided to the user, so that git will use the verbatim url, whereas we use `http://host`. /// - Upper-case scheme and host will be lower-cased automatically when parsing into a url, so prompts differ compared to git. /// - A **difference in prompt might affect the matching of getting existing stored credentials**, and it's a question of this being /// a feature or a bug. // TODO: when dealing with `http.*.*` configuration, generalize this algorithm as needed and support precedence. pub fn credential_helpers( mut url: gix_url::Url, config: &gix_config::File<'_>, is_lenient_config: bool, mut filter: impl FnMut(&gix_config::file::Metadata) -> bool, environment: crate::open::permissions::Environment, mut use_http_path: bool, ) -> Result< ( gix_credentials::helper::Cascade, gix_credentials::helper::Action, gix_prompt::Options<'static>, ), Error, > { let mut programs = Vec::new(); let url_had_user_initially = url.user().is_some(); normalize(&mut url); if let Some(credential_sections) = config.sections_by_name_and_filter("credential", &mut filter) { for section in credential_sections { let section = match section.header().subsection_name() { Some(pattern) => gix_url::parse(pattern).ok().and_then(|mut pattern| { normalize(&mut pattern); let is_http = matches!(pattern.scheme, gix_url::Scheme::Https | gix_url::Scheme::Http); let scheme = &pattern.scheme; let host = pattern.host(); let ports = is_http .then(|| (pattern.port_or_default(), url.port_or_default())) .unwrap_or((pattern.port, url.port)); let path = (!(is_http && pattern.path_is_root())).then_some(&pattern.path); if !path.map_or(true, |path| path == &url.path) { return None; } if pattern.user().is_some() && pattern.user() != url.user() { return None; } (scheme == &url.scheme && host_matches(host, url.host()) && ports.0 == ports.1).then_some(( section, &credential::UrlParameter::HELPER, &credential::UrlParameter::USERNAME, &credential::UrlParameter::USE_HTTP_PATH, )) }), None => Some(( section, &Credential::HELPER, &Credential::USERNAME, &Credential::USE_HTTP_PATH, )), }; if let Some((section, helper_key, username_key, use_http_path_key)) = section { for value in section.values(helper_key.name) { if value.trim().is_empty() { programs.clear(); } else { programs.push(gix_credentials::Program::from_custom_definition(value.into_owned())); } } if let Some(Some(user)) = (!url_had_user_initially).then(|| { section .value(username_key.name) .filter(|n| !n.trim().is_empty()) .and_then(|n| { let n: Vec<_> = Cow::into_owned(n).into(); n.into_string().ok() }) }) { url.set_user(Some(user)); } if let Some(toggle) = section .value(use_http_path_key.name) .map(|val| { gix_config::Boolean::try_from(val) .map_err(|err| Error::InvalidUseHttpPath { source: err, section: section.header().to_bstring(), }) .map(|b| b.0) }) .transpose()? { use_http_path = toggle; } } } } let allow_git_env = environment.git_prefix.is_allowed(); let allow_ssh_env = environment.ssh_prefix.is_allowed(); let prompt_options = gix_prompt::Options { askpass: crate::config::cache::access::trusted_file_path( config, &Core::ASKPASS, &mut filter, is_lenient_config, environment, ) .transpose() .ignore_empty()? .map(|c| Cow::Owned(c.into_owned())), mode: config .boolean(&Credentials::TERMINAL_PROMPT) .map(|val| Credentials::TERMINAL_PROMPT.enrich_error(val)) .transpose() .with_leniency(is_lenient_config)? .and_then(|val| (!val).then_some(gix_prompt::Mode::Disable)) .unwrap_or_default(), } .apply_environment(allow_git_env, allow_ssh_env, false /* terminal prompt */); Ok(( gix_credentials::helper::Cascade { programs, use_http_path, // The default ssh implementation uses binaries that do their own auth, so our passwords aren't used. query_user_only: url.scheme == gix_url::Scheme::Ssh, stderr: config .boolean(&Credentials::HELPER_STDERR) .map(|val| Credentials::HELPER_STDERR.enrich_error(val)) .transpose() .with_leniency(is_lenient_config)? .unwrap_or(true), }, gix_credentials::helper::Action::get_for_url(url.to_bstring()), prompt_options, )) } fn host_matches(pattern: Option<&str>, host: Option<&str>) -> bool { match (pattern, host) { (Some(pattern), Some(host)) => { let lfields = pattern.split('.'); let rfields = host.split('.'); if lfields.clone().count() != rfields.clone().count() { return false; } lfields.zip(rfields).all(|(pat, value)| { gix_glob::wildmatch(pat.into(), value.into(), gix_glob::wildmatch::Mode::empty()) }) } (None, None) => true, (Some(_), None) | (None, Some(_)) => false, } } fn normalize(url: &mut gix_url::Url) { if !url.path_is_root() && url.path.ends_with(b"/") { url.path.pop(); } } trait IgnoreEmptyPath { fn ignore_empty(self) -> Self; } impl IgnoreEmptyPath for Result>, gix_config::path::interpolate::Error> { fn ignore_empty(self) -> Self { match self { Ok(maybe_path) => Ok(maybe_path), Err(gix_config::path::interpolate::Error::Missing { .. }) => Ok(None), Err(err) => Err(err), } } } } gix-0.69.1/src/config/snapshot/mod.rs000064400000000000000000000002631046102023000155310ustar 00000000000000mod _impls; mod access; /// #[cfg(feature = "credentials")] pub mod credential_helpers; #[cfg(feature = "credentials")] pub use credential_helpers::function::credential_helpers; gix-0.69.1/src/config/tree/keys.rs000064400000000000000000000532631046102023000150350ustar 00000000000000#![allow(clippy::result_large_err)] use gix_config::KeyRef; use std::{ borrow::Cow, error::Error, fmt::{Debug, Formatter}, }; use crate::{ bstr::BStr, config, config::tree::{Key, Link, Note, Section, SubSectionRequirement}, }; /// Implements a value without any constraints, i.e. a any value. pub struct Any { /// The key of the value in the git configuration. pub name: &'static str, /// The parent section of the key. pub section: &'static dyn Section, /// The subsection requirement to use. pub subsection_requirement: Option, /// A link to other resources that might be eligible as value. pub link: Option, /// A note about this key. pub note: Option, /// The way validation and transformation should happen. validate: T, } /// Init impl Any { /// Create a new instance from `name` and `section` pub const fn new(name: &'static str, section: &'static dyn Section) -> Self { Any::new_with_validate(name, section, validate::All) } } /// Init other validate implementations impl Any { /// Create a new instance from `name` and `section` pub const fn new_with_validate(name: &'static str, section: &'static dyn Section, validate: T) -> Self { Any { name, section, subsection_requirement: Some(SubSectionRequirement::Never), link: None, note: None, validate, } } } /// Builder impl Any { /// Set the subsection requirement to non-default values. pub const fn with_subsection_requirement(mut self, requirement: Option) -> Self { self.subsection_requirement = requirement; self } /// Associate an environment variable with this key. /// /// This is mainly useful for enriching error messages. pub const fn with_environment_override(mut self, var: &'static str) -> Self { self.link = Some(Link::EnvironmentOverride(var)); self } /// Set a link to another key which serves as fallback to provide a value if this key is not set. pub const fn with_fallback(mut self, key: &'static dyn Key) -> Self { self.link = Some(Link::FallbackKey(key)); self } /// Attach an informative message to this key. pub const fn with_note(mut self, message: &'static str) -> Self { self.note = Some(Note::Informative(message)); self } /// Inform about a deviation in how this key is interpreted. pub const fn with_deviation(mut self, message: &'static str) -> Self { self.note = Some(Note::Deviation(message)); self } } /// Conversion impl Any { /// Try to convert `value` into a refspec suitable for the `op` operation. pub fn try_into_refspec( &'static self, value: std::borrow::Cow<'_, BStr>, op: gix_refspec::parse::Operation, ) -> Result { gix_refspec::parse(value.as_ref(), op) .map(|spec| spec.to_owned()) .map_err(|err| config::refspec::Error::from_value(self, value.into_owned()).with_source(err)) } /// Try to interpret `value` as UTF-8 encoded string. pub fn try_into_string(&'static self, value: Cow<'_, BStr>) -> Result { use crate::bstr::ByteVec; Vec::from(value.into_owned()).into_string().map_err(|err| { let utf8_err = err.utf8_error().clone(); config::string::Error::from_value(self, err.into_vec().into()).with_source(utf8_err) }) } } impl Debug for Any { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { self.logical_name().fmt(f) } } impl std::fmt::Display for Any { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str(&self.logical_name()) } } impl Key for Any { fn name(&self) -> &str { self.name } fn validate(&self, value: &BStr) -> Result<(), config::tree::key::validate::Error> { Ok(self.validate.validate(value)?) } fn section(&self) -> &dyn Section { self.section } fn subsection_requirement(&self) -> Option<&SubSectionRequirement> { self.subsection_requirement.as_ref() } fn link(&self) -> Option<&Link> { self.link.as_ref() } fn note(&self) -> Option<&Note> { self.note.as_ref() } } impl gix_config::AsKey for Any { fn as_key(&self) -> gix_config::KeyRef<'_> { self.try_as_key().expect("infallible") } fn try_as_key(&self) -> Option> { let section_name = self.section.parent().map_or_else(|| self.section.name(), Section::name); let subsection_name = if self.section.parent().is_some() { Some(self.section.name().into()) } else { None }; let value_name = self.name; gix_config::KeyRef { section_name, subsection_name, value_name, } .into() } } /// A key which represents a date. pub type Time = Any; /// The `core.(filesRefLockTimeout|packedRefsTimeout)` keys, or any other lock timeout for that matter. pub type LockTimeout = Any; /// Keys specifying durations in milliseconds. pub type DurationInMilliseconds = Any; /// A key which represents any unsigned integer. pub type UnsignedInteger = Any; /// A key that represents a remote name, either as url or symbolic name. pub type RemoteName = Any; /// A key that represents a boolean value. pub type Boolean = Any; /// A key that represents an executable program, shell script or shell commands. /// /// Once obtained with [trusted_program()](crate::config::Snapshot::trusted_program()) /// one can run it with [command::prepare()](gix_command::prepare), possibly after /// [obtaining](crate::Repository::command_context) and [setting](gix_command::Prepare::with_context) /// a git [command context](gix_command::Context) (depending on the commands needs). pub type Program = Any; /// A key that represents an executable program as identified by name or path. /// /// Once obtained with [trusted_program()](crate::config::Snapshot::trusted_program()) /// one can run it with [command::prepare()](gix_command::prepare), possibly after /// [obtaining](crate::Repository::command_context) and [setting](gix_command::Prepare::with_context) /// a git [command context](gix_command::Context) (depending on the commands needs). pub type Executable = Any; /// A key that represents a path (to a resource). pub type Path = Any; /// A key that represents a URL. pub type Url = Any; /// A key that represents a UTF-8 string. pub type String = Any; /// A key that represents a `RefSpec` for pushing. pub type PushRefSpec = Any; /// A key that represents a `RefSpec` for fetching. pub type FetchRefSpec = Any; mod duration { use std::time::Duration; use crate::{ config, config::tree::{keys::DurationInMilliseconds, Section}, }; impl DurationInMilliseconds { /// Create a new instance. pub const fn new_duration(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, super::validate::DurationInMilliseconds) } /// Return a valid duration as parsed from an integer that is interpreted as milliseconds. pub fn try_into_duration( &'static self, value: Result, ) -> Result { let value = value.map_err(|err| config::duration::Error::from(self).with_source(err))?; Ok(match value { val if val < 0 => Duration::from_secs(u64::MAX), val => Duration::from_millis(val.try_into().expect("i64 to u64 always works if positive")), }) } } } mod lock_timeout { use std::time::Duration; use gix_lock::acquire::Fail; use crate::{ config, config::tree::{keys::LockTimeout, Section}, }; impl LockTimeout { /// Create a new instance. pub const fn new_lock_timeout(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, super::validate::LockTimeout) } /// Return information on how long to wait for locked files. pub fn try_into_lock_timeout( &'static self, value: Result, ) -> Result { let value = value.map_err(|err| config::lock_timeout::Error::from(self).with_source(err))?; Ok(match value { val if val < 0 => Fail::AfterDurationWithBackoff(Duration::from_secs(u64::MAX)), 0 => Fail::Immediately, val => Fail::AfterDurationWithBackoff(Duration::from_millis( val.try_into().expect("i64 to u64 always works if positive"), )), }) } } } mod refspecs { use crate::config::tree::{ keys::{validate, FetchRefSpec, PushRefSpec}, Section, }; impl PushRefSpec { /// Create a new instance. pub const fn new_push_refspec(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::PushRefSpec) } } impl FetchRefSpec { /// Create a new instance. pub const fn new_fetch_refspec(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::FetchRefSpec) } } } mod url { use std::borrow::Cow; use crate::{ bstr::BStr, config, config::tree::{ keys::{validate, Url}, Section, }, }; impl Url { /// Create a new instance. pub const fn new_url(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::Url) } /// Try to parse `value` as URL. pub fn try_into_url(&'static self, value: Cow<'_, BStr>) -> Result { gix_url::parse(value.as_ref()) .map_err(|err| config::url::Error::from_value(self, value.into_owned()).with_source(err)) } } } impl String { /// Create a new instance. pub const fn new_string(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::String) } } impl Program { /// Create a new instance. pub const fn new_program(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::Program) } } impl Executable { /// Create a new instance. pub const fn new_executable(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::Executable) } } impl Path { /// Create a new instance. pub const fn new_path(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::Path) } } mod workers { use crate::config::tree::{keys::UnsignedInteger, Section}; impl UnsignedInteger { /// Create a new instance. pub const fn new_unsigned_integer(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, super::validate::UnsignedInteger) } /// Convert `value` into a `usize` or wrap it into a specialized error. pub fn try_into_usize( &'static self, value: Result, ) -> Result { value .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err)) .and_then(|value| { value .try_into() .map_err(|_| crate::config::unsigned_integer::Error::from(self)) }) } /// Convert `value` into a `u64` or wrap it into a specialized error. pub fn try_into_u64( &'static self, value: Result, ) -> Result { value .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err)) .and_then(|value| { value .try_into() .map_err(|_| crate::config::unsigned_integer::Error::from(self)) }) } /// Convert `value` into a `u32` or wrap it into a specialized error. pub fn try_into_u32( &'static self, value: Result, ) -> Result { value .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err)) .and_then(|value| { value .try_into() .map_err(|_| crate::config::unsigned_integer::Error::from(self)) }) } } } mod time { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config::tree::{ keys::{validate, Time}, Section, }, }; impl Time { /// Create a new instance. pub const fn new_time(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::Time) } /// Convert the `value` into a date if possible, with `now` as reference time for relative dates. pub fn try_into_time( &self, value: Cow<'_, BStr>, now: Option, ) -> Result { gix_date::parse( value .as_ref() .to_str() .map_err(|_| gix_date::parse::Error::InvalidDateString { input: value.to_string(), })?, now, ) } } } mod boolean { use crate::{ config, config::tree::{ keys::{validate, Boolean}, Section, }, }; impl Boolean { /// Create a new instance. pub const fn new_boolean(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, validate::Boolean) } /// Process the `value` into a result with an improved error message. /// /// `value` is expected to be provided by [`gix_config::File::boolean()`]. pub fn enrich_error( &'static self, value: Result, ) -> Result { value.map_err(|err| config::boolean::Error::from(self).with_source(err)) } } } mod remote_name { use std::borrow::Cow; use crate::{ bstr::{BStr, BString}, config, config::tree::{keys::RemoteName, Section}, }; impl RemoteName { /// Create a new instance. pub const fn new_remote_name(name: &'static str, section: &'static dyn Section) -> Self { Self::new_with_validate(name, section, super::validate::RemoteName) } /// Try to validate `name` as symbolic remote name and return it. #[allow(clippy::result_large_err)] pub fn try_into_symbolic_name( &'static self, name: Cow<'_, BStr>, ) -> Result { crate::remote::name::validated(name.into_owned()) .map_err(|err| config::remote::symbolic_name::Error::from(self).with_source(err)) } } } /// Provide a way to validate a value, or decode a value from `git-config`. pub trait Validate { /// Validate `value` or return an error. fn validate(&self, value: &BStr) -> Result<(), Box>; } /// various implementations of the `Validate` trait. pub mod validate { use std::{borrow::Cow, error::Error}; use crate::{ bstr::{BStr, ByteSlice}, config::tree::keys::Validate, remote, }; /// Everything is valid. #[derive(Default)] pub struct All; impl Validate for All { fn validate(&self, _value: &BStr) -> Result<(), Box> { Ok(()) } } /// Assure that values that parse as git dates are valid. #[derive(Default)] pub struct Time; impl Validate for Time { fn validate(&self, value: &BStr) -> Result<(), Box> { gix_date::parse(value.to_str()?, std::time::SystemTime::now().into())?; Ok(()) } } /// Assure that values that parse as unsigned integers are valid. #[derive(Default)] pub struct UnsignedInteger; impl Validate for UnsignedInteger { fn validate(&self, value: &BStr) -> Result<(), Box> { usize::try_from( gix_config::Integer::try_from(value)? .to_decimal() .ok_or_else(|| format!("integer {value} cannot be represented as `usize`"))?, ) .map_err(|_| "cannot use sign for unsigned integer")?; Ok(()) } } /// Assure that values that parse as git booleans are valid. #[derive(Default)] pub struct Boolean; impl Validate for Boolean { fn validate(&self, value: &BStr) -> Result<(), Box> { gix_config::Boolean::try_from(value)?; Ok(()) } } /// Values that are git remotes, symbolic or urls #[derive(Default)] pub struct RemoteName; impl Validate for RemoteName { fn validate(&self, value: &BStr) -> Result<(), Box> { remote::Name::try_from(Cow::Borrowed(value)) .map_err(|_| format!("Illformed UTF-8 in remote name: \"{}\"", value.to_str_lossy()))?; Ok(()) } } /// Values that are programs - everything is allowed. #[derive(Default)] pub struct Program; impl Validate for Program { fn validate(&self, _value: &BStr) -> Result<(), Box> { Ok(()) } } /// Values that are programs executables, everything is allowed. #[derive(Default)] pub struct Executable; impl Validate for Executable { fn validate(&self, _value: &BStr) -> Result<(), Box> { Ok(()) } } /// Values that parse as URLs. #[derive(Default)] pub struct Url; impl Validate for Url { fn validate(&self, value: &BStr) -> Result<(), Box> { gix_url::parse(value)?; Ok(()) } } /// Values that parse as ref-specs for pushing. #[derive(Default)] pub struct PushRefSpec; impl Validate for PushRefSpec { fn validate(&self, value: &BStr) -> Result<(), Box> { gix_refspec::parse(value, gix_refspec::parse::Operation::Push)?; Ok(()) } } /// Values that parse as ref-specs for pushing. #[derive(Default)] pub struct FetchRefSpec; impl Validate for FetchRefSpec { fn validate(&self, value: &BStr) -> Result<(), Box> { gix_refspec::parse(value, gix_refspec::parse::Operation::Fetch)?; Ok(()) } } /// Timeouts used for file locks. pub struct LockTimeout; impl Validate for LockTimeout { fn validate(&self, value: &BStr) -> Result<(), Box> { let value = gix_config::Integer::try_from(value)? .to_decimal() .ok_or_else(|| format!("integer {value} cannot be represented as integer")); super::super::Core::FILES_REF_LOCK_TIMEOUT.try_into_lock_timeout(Ok(value?))?; Ok(()) } } /// Durations in milliseconds. pub struct DurationInMilliseconds; impl Validate for DurationInMilliseconds { fn validate(&self, value: &BStr) -> Result<(), Box> { let value = gix_config::Integer::try_from(value)? .to_decimal() .ok_or_else(|| format!("integer {value} cannot be represented as integer")); super::super::gitoxide::Http::CONNECT_TIMEOUT.try_into_duration(Ok(value?))?; Ok(()) } } /// A UTF-8 string. pub struct String; impl Validate for String { fn validate(&self, value: &BStr) -> Result<(), Box> { value.to_str()?; Ok(()) } } /// Any path - everything is allowed. pub struct Path; impl Validate for Path { fn validate(&self, _value: &BStr) -> Result<(), Box> { Ok(()) } } } gix-0.69.1/src/config/tree/mod.rs000064400000000000000000000132731046102023000146360ustar 00000000000000//! The tree of supported configuration values for use in [`config_overrides`][crate::open::Options::config_overrides()] //! or for validating and transforming well-known configuration values. //! //! It can also be used to traverse all implemented keys and to validate values before usage as configuration overrides. //! //! ### Leniency //! //! When validating values, we don't apply leniency here which is left to the caller. Leniency is an application defined configuration //! to ignore errors on non-security related values, which might make applications more resilient towards misconfiguration. pub(crate) mod root { use super::sections; use crate::config::tree::Section; /// The root of the configuration tree, suitable to discover all sub-sections at runtime or compile time. #[derive(Copy, Clone, Default)] pub struct Tree; impl Tree { /// The `author` section. pub const AUTHOR: sections::Author = sections::Author; /// The `branch` section. pub const BRANCH: sections::Branch = sections::Branch; /// The `checkout` section. pub const CHECKOUT: sections::Checkout = sections::Checkout; /// The `clone` section. pub const CLONE: sections::Clone = sections::Clone; /// The `committer` section. pub const COMMITTER: sections::Committer = sections::Committer; /// The `core` section. pub const CORE: sections::Core = sections::Core; /// The `credential` section. pub const CREDENTIAL: sections::Credential = sections::Credential; /// The `diff` section. #[cfg(feature = "blob-diff")] pub const DIFF: sections::Diff = sections::Diff; /// The `extensions` section. pub const EXTENSIONS: sections::Extensions = sections::Extensions; /// The `fetch` section. pub const FETCH: sections::Fetch = sections::Fetch; /// The `gitoxide` section. pub const GITOXIDE: sections::Gitoxide = sections::Gitoxide; /// The `http` section. pub const HTTP: sections::Http = sections::Http; /// The `index` section. pub const INDEX: sections::Index = sections::Index; /// The `init` section. pub const INIT: sections::Init = sections::Init; /// The `mailmap` section. pub const MAILMAP: sections::Mailmap = sections::Mailmap; /// The `merge` section. pub const MERGE: sections::Merge = sections::Merge; /// The `pack` section. pub const PACK: sections::Pack = sections::Pack; /// The `protocol` section. pub const PROTOCOL: sections::Protocol = sections::Protocol; /// The `push` section. pub const PUSH: sections::Push = sections::Push; /// The `remote` section. pub const REMOTE: sections::Remote = sections::Remote; /// The `safe` section. pub const SAFE: sections::Safe = sections::Safe; /// The `ssh` section. pub const SSH: sections::Ssh = sections::Ssh; /// The `status` section. #[cfg(feature = "status")] pub const STATUS: sections::Status = sections::Status; /// The `user` section. pub const USER: sections::User = sections::User; /// The `url` section. pub const URL: sections::Url = sections::Url; /// List all available sections. pub fn sections(&self) -> &[&dyn Section] { &[ &Self::AUTHOR, &Self::BRANCH, &Self::CHECKOUT, &Self::CLONE, &Self::COMMITTER, &Self::CORE, &Self::CREDENTIAL, #[cfg(feature = "blob-diff")] &Self::DIFF, &Self::EXTENSIONS, &Self::FETCH, &Self::GITOXIDE, &Self::HTTP, &Self::INDEX, &Self::INIT, &Self::MAILMAP, &Self::MERGE, &Self::PACK, &Self::PROTOCOL, &Self::PUSH, &Self::REMOTE, &Self::SAFE, &Self::SSH, #[cfg(feature = "status")] &Self::STATUS, &Self::USER, &Self::URL, ] } } } mod sections; pub use sections::{ branch, checkout, core, credential, extensions, fetch, gitoxide, http, index, protocol, push, remote, ssh, Author, Branch, Checkout, Clone, Committer, Core, Credential, Extensions, Fetch, Gitoxide, Http, Index, Init, Mailmap, Merge, Pack, Protocol, Push, Remote, Safe, Ssh, Url, User, }; #[cfg(feature = "blob-diff")] pub use sections::{diff, Diff}; #[cfg(feature = "status")] pub use sections::{status, Status}; /// Generic value implementations for static instantiation. pub mod keys; /// pub mod key { /// pub mod validate { /// The error returned by [`Key::validate()`][crate::config::tree::Key::validate()]. #[derive(Debug, thiserror::Error)] #[error(transparent)] #[allow(missing_docs)] pub struct Error { #[from] source: Box, } } /// pub mod validate_assignment { /// The error returned by [`Key::validated_assignment`*()][crate::config::tree::Key::validated_assignment_fmt()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Failed to validate the value to be assigned to this key")] Validate(#[from] super::validate::Error), #[error("{message}")] Name { message: String }, } } } mod traits; pub use traits::{Key, Link, Note, Section, SubSectionRequirement}; gix-0.69.1/src/config/tree/sections/author.rs000064400000000000000000000011301046102023000171750ustar 00000000000000use crate::{ config, config::tree::{gitoxide, keys, Author, Key, Section}, }; impl Author { /// The `author.name` key. pub const NAME: keys::Any = keys::Any::new("name", &config::Tree::AUTHOR).with_fallback(&gitoxide::Author::NAME_FALLBACK); /// The `author.email` key. pub const EMAIL: keys::Any = keys::Any::new("email", &config::Tree::AUTHOR).with_fallback(&gitoxide::Author::EMAIL_FALLBACK); } impl Section for Author { fn name(&self) -> &str { "author" } fn keys(&self) -> &[&dyn Key] { &[&Self::NAME, &Self::EMAIL] } } gix-0.69.1/src/config/tree/sections/branch.rs000064400000000000000000000040401046102023000171330ustar 00000000000000use crate::config::tree::{keys, traits::SubSectionRequirement, Branch, Key, Section}; const NAME_PARAMETER: Option = Some(SubSectionRequirement::Parameter("name")); impl Branch { /// The `branch..merge` key. pub const MERGE: Merge = Merge::new_with_validate("merge", &crate::config::Tree::BRANCH, validate::FullNameRef) .with_subsection_requirement(NAME_PARAMETER); /// The `branch..pushRemote` key. pub const PUSH_REMOTE: keys::RemoteName = keys::RemoteName::new_remote_name("pushRemote", &crate::config::Tree::BRANCH) .with_subsection_requirement(NAME_PARAMETER); /// The `branch..remote` key. pub const REMOTE: keys::RemoteName = keys::RemoteName::new_remote_name("remote", &crate::config::Tree::BRANCH) .with_subsection_requirement(NAME_PARAMETER); } impl Section for Branch { fn name(&self) -> &str { "branch" } fn keys(&self) -> &[&dyn Key] { &[&Self::MERGE, &Self::PUSH_REMOTE, &Self::REMOTE] } } /// The `branch..merge` key. pub type Merge = keys::Any; mod merge { use std::borrow::Cow; use gix_ref::FullNameRef; use crate::{bstr::BStr, config::tree::branch::Merge}; impl Merge { /// Return the validated full ref name from `value` if it is valid. pub fn try_into_fullrefname( value: Cow<'_, BStr>, ) -> Result, gix_validate::reference::name::Error> { match value { Cow::Borrowed(v) => v.try_into().map(Cow::Borrowed), Cow::Owned(v) => v.try_into().map(Cow::Owned), } } } } /// pub mod validate { use crate::{ bstr::BStr, config::tree::{branch::Merge, keys}, }; pub struct FullNameRef; impl keys::Validate for FullNameRef { fn validate(&self, value: &BStr) -> Result<(), Box> { Merge::try_into_fullrefname(value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/checkout.rs000064400000000000000000000036131046102023000175100ustar 00000000000000use crate::{ config, config::tree::{keys, Checkout, Key, Section}, }; impl Checkout { /// The `checkout.workers` key. pub const WORKERS: Workers = Workers::new_with_validate("workers", &config::Tree::CHECKOUT, validate::Workers) .with_deviation("if unset, uses all cores instead of just one"); } /// The `checkout.workers` key. pub type Workers = keys::Any; impl Section for Checkout { fn name(&self) -> &str { "checkout" } fn keys(&self) -> &[&dyn Key] { &[&Self::WORKERS] } } mod workers { use crate::config::tree::checkout::Workers; impl Workers { /// Return the amount of threads to use for checkout, with `0` meaning all available ones, after decoding our integer value from `config`, /// or `None` if the value isn't set which is typically interpreted as "as many threads as available" pub fn try_from_workers( &'static self, value: Result, ) -> Result { match value { Ok(v) if v < 0 => Ok(0), Ok(v) => Ok(v.try_into().expect("positive i64 can always be usize on 64 bit")), Err(err) => Err(crate::config::key::Error::from(&super::Checkout::WORKERS).with_source(err)), } } } } /// pub mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct Workers; impl keys::Validate for Workers { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Checkout::WORKERS.try_from_workers(gix_config::Integer::try_from(value).and_then(|i| { i.to_decimal() .ok_or_else(|| gix_config::value::Error::new("Integer overflow", value.to_owned())) }))?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/clone.rs000064400000000000000000000011411046102023000167750ustar 00000000000000use crate::{ config, config::tree::{keys, Clone, Key, Section}, }; impl Clone { /// The `clone.defaultRemoteName` key. pub const DEFAULT_REMOTE_NAME: keys::RemoteName = keys::RemoteName::new_remote_name("defaultRemoteName", &config::Tree::CLONE); /// The `clone.rejectShallow` key. pub const REJECT_SHALLOW: keys::Boolean = keys::Boolean::new_boolean("rejectShallow", &config::Tree::CLONE); } impl Section for Clone { fn name(&self) -> &str { "clone" } fn keys(&self) -> &[&dyn Key] { &[&Self::DEFAULT_REMOTE_NAME, &Self::REJECT_SHALLOW] } } gix-0.69.1/src/config/tree/sections/committer.rs000064400000000000000000000011661046102023000177070ustar 00000000000000use crate::{ config, config::tree::{gitoxide, keys, Committer, Key, Section}, }; impl Committer { /// The `committer.name` key. pub const NAME: keys::Any = keys::Any::new("name", &config::Tree::COMMITTER).with_fallback(&gitoxide::Committer::NAME_FALLBACK); /// The `committer.email` key. pub const EMAIL: keys::Any = keys::Any::new("email", &config::Tree::COMMITTER).with_fallback(&gitoxide::Committer::EMAIL_FALLBACK); } impl Section for Committer { fn name(&self) -> &str { "committer" } fn keys(&self) -> &[&dyn Key] { &[&Self::NAME, &Self::EMAIL] } } gix-0.69.1/src/config/tree/sections/core.rs000064400000000000000000000523551046102023000166420ustar 00000000000000use crate::{ config, config::tree::{keys, Core, Key, Section}, }; impl Core { /// The `core.abbrev` key. pub const ABBREV: Abbrev = Abbrev::new_with_validate("abbrev", &config::Tree::CORE, validate::Abbrev); /// The `core.bare` key. pub const BARE: keys::Boolean = keys::Boolean::new_boolean("bare", &config::Tree::CORE); /// The `core.bigFileThreshold` key. pub const BIG_FILE_THRESHOLD: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("bigFileThreshold", &config::Tree::CORE); /// The `core.checkStat` key. pub const CHECK_STAT: CheckStat = CheckStat::new_with_validate("checkStat", &config::Tree::CORE, validate::CheckStat); /// The `core.deltaBaseCacheLimit` key. pub const DELTA_BASE_CACHE_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("deltaBaseCacheLimit", &config::Tree::CORE) .with_environment_override("GIX_PACK_CACHE_MEMORY") .with_note("if unset, we default to a small 64 slot fixed-size cache that holds at most 64 full delta base objects of any size. Set to 0 to deactivate it entirely"); /// The `core.disambiguate` key. pub const DISAMBIGUATE: Disambiguate = Disambiguate::new_with_validate("disambiguate", &config::Tree::CORE, validate::Disambiguate); /// The `core.editor` key. pub const EDITOR: keys::Program = keys::Program::new_program("editor", &config::Tree::CORE); /// The `core.fileMode` key. pub const FILE_MODE: keys::Boolean = keys::Boolean::new_boolean("fileMode", &config::Tree::CORE); /// The `core.ignoreCase` key. pub const IGNORE_CASE: keys::Boolean = keys::Boolean::new_boolean("ignoreCase", &config::Tree::CORE); /// The `core.filesRefLockTimeout` key. pub const FILES_REF_LOCK_TIMEOUT: keys::LockTimeout = keys::LockTimeout::new_lock_timeout("filesRefLockTimeout", &config::Tree::CORE); /// The `core.packedRefsTimeout` key. pub const PACKED_REFS_TIMEOUT: keys::LockTimeout = keys::LockTimeout::new_lock_timeout("packedRefsTimeout", &config::Tree::CORE); /// The `core.multiPackIndex` key. pub const MULTIPACK_INDEX: keys::Boolean = keys::Boolean::new_boolean("multiPackIndex", &config::Tree::CORE); /// The `core.logAllRefUpdates` key. pub const LOG_ALL_REF_UPDATES: LogAllRefUpdates = LogAllRefUpdates::new_with_validate("logAllRefUpdates", &config::Tree::CORE, validate::LogAllRefUpdates); /// The `core.precomposeUnicode` key. /// /// Needs application to use [`env::args_os`][crate::env::args_os()] to conform all input paths before they are used. pub const PRECOMPOSE_UNICODE: keys::Boolean = keys::Boolean::new_boolean("precomposeUnicode", &config::Tree::CORE) .with_note("application needs to conform all program input by using gix::env::args_os()"); /// The `core.protectHFS` key. pub const PROTECT_HFS: keys::Boolean = keys::Boolean::new_boolean("protectHFS", &config::Tree::CORE); /// The `core.protectNTFS` key. pub const PROTECT_NTFS: keys::Boolean = keys::Boolean::new_boolean("protectNTFS", &config::Tree::CORE); /// The `core.repositoryFormatVersion` key. pub const REPOSITORY_FORMAT_VERSION: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("repositoryFormatVersion", &config::Tree::CORE); /// The `core.symlinks` key. pub const SYMLINKS: keys::Boolean = keys::Boolean::new_boolean("symlinks", &config::Tree::CORE); /// The `core.trustCTime` key. pub const TRUST_C_TIME: keys::Boolean = keys::Boolean::new_boolean("trustCTime", &config::Tree::CORE); /// The `core.worktree` key. pub const WORKTREE: keys::Any = keys::Any::new("worktree", &config::Tree::CORE) .with_environment_override("GIT_WORK_TREE") .with_deviation("Command-line overrides also work, and they act lie an environment override. If set in the git configuration file, relative paths are relative to it."); /// The `core.askPass` key. pub const ASKPASS: keys::Executable = keys::Executable::new_executable("askPass", &config::Tree::CORE) .with_environment_override("GIT_ASKPASS") .with_note("fallback is 'SSH_ASKPASS'"); /// The `core.excludesFile` key. pub const EXCLUDES_FILE: keys::Path = keys::Path::new_path("excludesFile", &config::Tree::CORE); /// The `core.attributesFile` key. pub const ATTRIBUTES_FILE: keys::Path = keys::Path::new_path("attributesFile", &config::Tree::CORE) .with_deviation("for checkout - it's already queried but needs building of attributes group, and of course support during checkout"); /// The `core.sshCommand` key. pub const SSH_COMMAND: keys::Executable = keys::Executable::new_executable("sshCommand", &config::Tree::CORE) .with_environment_override("GIT_SSH_COMMAND"); /// The `core.useReplaceRefs` key. pub const USE_REPLACE_REFS: keys::Boolean = keys::Boolean::new_boolean("useReplaceRefs", &config::Tree::CORE) .with_environment_override("GIT_NO_REPLACE_OBJECTS"); /// The `core.commitGraph` key. pub const COMMIT_GRAPH: keys::Boolean = keys::Boolean::new_boolean("commitGraph", &config::Tree::CORE); /// The `core.safecrlf` key. #[cfg(feature = "attributes")] pub const SAFE_CRLF: SafeCrlf = SafeCrlf::new_with_validate("safecrlf", &config::Tree::CORE, validate::SafeCrlf); /// The `core.autocrlf` key. #[cfg(feature = "attributes")] pub const AUTO_CRLF: AutoCrlf = AutoCrlf::new_with_validate("autocrlf", &config::Tree::CORE, validate::AutoCrlf); /// The `core.eol` key. #[cfg(feature = "attributes")] pub const EOL: Eol = Eol::new_with_validate("eol", &config::Tree::CORE, validate::Eol); /// The `core.checkRoundTripEncoding` key. #[cfg(feature = "attributes")] pub const CHECK_ROUND_TRIP_ENCODING: CheckRoundTripEncoding = CheckRoundTripEncoding::new_with_validate( "checkRoundTripEncoding", &config::Tree::CORE, validate::CheckRoundTripEncoding, ); } impl Section for Core { fn name(&self) -> &str { "core" } fn keys(&self) -> &[&dyn Key] { &[ &Self::ABBREV, &Self::BARE, &Self::BIG_FILE_THRESHOLD, &Self::CHECK_STAT, &Self::DELTA_BASE_CACHE_LIMIT, &Self::DISAMBIGUATE, &Self::EDITOR, &Self::FILE_MODE, &Self::IGNORE_CASE, &Self::FILES_REF_LOCK_TIMEOUT, &Self::PACKED_REFS_TIMEOUT, &Self::MULTIPACK_INDEX, &Self::LOG_ALL_REF_UPDATES, &Self::PRECOMPOSE_UNICODE, &Self::REPOSITORY_FORMAT_VERSION, &Self::SYMLINKS, &Self::TRUST_C_TIME, &Self::WORKTREE, &Self::PROTECT_HFS, &Self::PROTECT_NTFS, &Self::ASKPASS, &Self::EXCLUDES_FILE, &Self::ATTRIBUTES_FILE, &Self::SSH_COMMAND, &Self::USE_REPLACE_REFS, &Self::COMMIT_GRAPH, #[cfg(feature = "attributes")] &Self::SAFE_CRLF, #[cfg(feature = "attributes")] &Self::AUTO_CRLF, #[cfg(feature = "attributes")] &Self::EOL, #[cfg(feature = "attributes")] &Self::CHECK_ROUND_TRIP_ENCODING, ] } } /// The `core.checkStat` key. pub type CheckStat = keys::Any; /// The `core.abbrev` key. pub type Abbrev = keys::Any; /// The `core.logAllRefUpdates` key. pub type LogAllRefUpdates = keys::Any; /// The `core.disambiguate` key. pub type Disambiguate = keys::Any; #[cfg(feature = "attributes")] mod filter { use super::validate; use crate::config::tree::keys; /// The `core.safecrlf` key. pub type SafeCrlf = keys::Any; /// The `core.autocrlf` key. pub type AutoCrlf = keys::Any; /// The `core.eol` key. pub type Eol = keys::Any; /// The `core.checkRoundTripEncoding` key. pub type CheckRoundTripEncoding = keys::Any; mod check_round_trip_encoding { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::{core::CheckRoundTripEncoding, Key}, }; impl CheckRoundTripEncoding { /// Convert `value` into a list of encodings, which are either space or coma separated. Fail if an encoding is unknown. /// If `None`, the default is returned. pub fn try_into_encodings( &'static self, value: Option>, ) -> Result, config::encoding::Error> { Ok(match value { None => vec![gix_filter::encoding::SHIFT_JIS], Some(value) => { let mut out = Vec::new(); for encoding in value .as_ref() .split(|b| *b == b',' || *b == b' ') .filter(|e| !e.trim().is_empty()) { out.push( gix_filter::encoding::Encoding::for_label(encoding.trim()).ok_or_else(|| { config::encoding::Error { key: self.logical_name().into(), value: value.as_ref().to_owned(), encoding: encoding.into(), } })?, ); } out } }) } } } mod eol { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::core::Eol, }; impl Eol { /// Convert `value` into the default end-of-line mode. /// /// ### Deviation /// /// git will allow any value and silently leaves it unset, we will fail if the value is not known. pub fn try_into_eol( &'static self, value: Cow<'_, BStr>, ) -> Result { Ok(match value.to_str_lossy().as_ref() { "lf" => gix_filter::eol::Mode::Lf, "crlf" => gix_filter::eol::Mode::CrLf, "native" => gix_filter::eol::Mode::default(), _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())), }) } } } mod safecrlf { use std::borrow::Cow; use gix_filter::pipeline::CrlfRoundTripCheck; use crate::{bstr::BStr, config, config::tree::core::SafeCrlf}; impl SafeCrlf { /// Convert `value` into the safe-crlf enumeration, if possible. pub fn try_into_safecrlf( &'static self, value: Cow<'_, BStr>, ) -> Result { if value.as_ref() == "warn" { return Ok(CrlfRoundTripCheck::Warn); } let value = gix_config::Boolean::try_from(value.as_ref()).map_err(|err| { config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err) })?; Ok(if value.into() { CrlfRoundTripCheck::Fail } else { CrlfRoundTripCheck::Skip }) } } } mod autocrlf { use std::borrow::Cow; use gix_filter::eol; use crate::{bstr::BStr, config, config::tree::core::AutoCrlf}; impl AutoCrlf { /// Convert `value` into the safe-crlf enumeration, if possible. pub fn try_into_autocrlf( &'static self, value: Cow<'_, BStr>, ) -> Result { if value.as_ref() == "input" { return Ok(eol::AutoCrlf::Input); } let value = gix_config::Boolean::try_from(value.as_ref()).map_err(|err| { config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err) })?; Ok(if value.into() { eol::AutoCrlf::Enabled } else { eol::AutoCrlf::Disabled }) } } } } #[cfg(feature = "attributes")] pub use filter::*; #[cfg(feature = "revision")] mod disambiguate { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::core::Disambiguate, revision::spec::parse::ObjectKindHint, }; impl Disambiguate { /// Convert a disambiguation marker into the respective enum. pub fn try_into_object_kind_hint( &'static self, value: Cow<'_, BStr>, ) -> Result, config::key::GenericErrorWithValue> { let hint = match value.as_ref().as_bytes() { b"none" => return Ok(None), b"commit" => ObjectKindHint::Commit, b"committish" => ObjectKindHint::Committish, b"tree" => ObjectKindHint::Tree, b"treeish" => ObjectKindHint::Treeish, b"blob" => ObjectKindHint::Blob, _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())), }; Ok(Some(hint)) } } } mod log_all_ref_updates { use crate::{config, config::tree::core::LogAllRefUpdates}; impl LogAllRefUpdates { /// Returns the mode for ref-updates as parsed from `value`. If `value` is not a boolean, we try /// to interpret the string value instead. For correctness, this two step process is necessary as /// the interpretation of booleans in special in `git-config`, i.e. we can't just treat it as string. pub fn try_into_ref_updates( &'static self, value: Option>, ) -> Result, config::key::GenericErrorWithValue> { match value { Some(Ok(bool)) => Ok(Some(if bool { gix_ref::store::WriteReflog::Normal } else { gix_ref::store::WriteReflog::Disable })), Some(Err(err)) => match err.input { val if val.eq_ignore_ascii_case(b"always") => Ok(Some(gix_ref::store::WriteReflog::Always)), val => Err(config::key::GenericErrorWithValue::from_value(self, val)), }, None => Ok(None), } } } } mod check_stat { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::core::CheckStat, }; impl CheckStat { /// Returns true if the full set of stat entries should be checked, and it's just as lenient as git. pub fn try_into_checkstat( &'static self, value: Cow<'_, BStr>, ) -> Result { Ok(match value.as_ref().as_bytes() { b"minimal" => false, b"default" => true, _ => { return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())); } }) } } } mod abbrev { use std::borrow::Cow; use config::abbrev::Error; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::core::Abbrev, }; impl Abbrev { /// Convert the given `hex_len_str` into the amount of characters that a short hash should have. /// If `None` is returned, the correct value can be determined based on the amount of objects in the repo. pub fn try_into_abbreviation( &'static self, hex_len_str: Cow<'_, BStr>, object_hash: gix_hash::Kind, ) -> Result, Error> { let max = object_hash.len_in_hex() as u8; if hex_len_str.trim().is_empty() { return Err(Error { value: hex_len_str.into_owned(), max, }); } if hex_len_str.trim().eq_ignore_ascii_case(b"auto") { Ok(None) } else { let value_bytes = hex_len_str.as_ref(); if let Ok(false) = gix_config::Boolean::try_from(value_bytes).map(Into::into) { Ok(object_hash.len_in_hex().into()) } else { let value = gix_config::Integer::try_from(value_bytes) .map_err(|_| Error { value: hex_len_str.clone().into_owned(), max, })? .to_decimal() .ok_or_else(|| Error { value: hex_len_str.clone().into_owned(), max, })?; if value < 4 || value as usize > object_hash.len_in_hex() { return Err(Error { value: hex_len_str.clone().into_owned(), max, }); } Ok(Some(value as usize)) } } } } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct LockTimeout; impl keys::Validate for LockTimeout { fn validate(&self, value: &BStr) -> Result<(), Box> { let value = gix_config::Integer::try_from(value)? .to_decimal() .ok_or_else(|| format!("integer {value} cannot be represented as integer")); super::Core::FILES_REF_LOCK_TIMEOUT.try_into_lock_timeout(Ok(value?))?; Ok(()) } } pub struct Disambiguate; impl keys::Validate for Disambiguate { #[cfg_attr(not(feature = "revision"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "revision")] super::Core::DISAMBIGUATE.try_into_object_kind_hint(value.into())?; Ok(()) } } pub struct LogAllRefUpdates; impl keys::Validate for LogAllRefUpdates { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Core::LOG_ALL_REF_UPDATES .try_into_ref_updates(Some(gix_config::Boolean::try_from(value).map(|b| b.0)))?; Ok(()) } } pub struct CheckStat; impl keys::Validate for CheckStat { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Core::CHECK_STAT.try_into_checkstat(value.into())?; Ok(()) } } pub struct Abbrev; impl keys::Validate for Abbrev { fn validate(&self, value: &BStr) -> Result<(), Box> { // TODO: when there is options, validate against all hashes and assure all fail to trigger a validation failure. super::Core::ABBREV.try_into_abbreviation(value.into(), gix_hash::Kind::Sha1)?; Ok(()) } } pub struct SafeCrlf; impl keys::Validate for SafeCrlf { #[cfg_attr(not(feature = "attributes"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "attributes")] super::Core::SAFE_CRLF.try_into_safecrlf(value.into())?; Ok(()) } } pub struct AutoCrlf; impl keys::Validate for AutoCrlf { #[cfg_attr(not(feature = "attributes"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "attributes")] super::Core::AUTO_CRLF.try_into_autocrlf(value.into())?; Ok(()) } } pub struct Eol; impl keys::Validate for Eol { #[cfg_attr(not(feature = "attributes"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "attributes")] super::Core::EOL.try_into_eol(value.into())?; Ok(()) } } pub struct CheckRoundTripEncoding; impl keys::Validate for CheckRoundTripEncoding { #[cfg_attr(not(feature = "attributes"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "attributes")] super::Core::CHECK_ROUND_TRIP_ENCODING.try_into_encodings(Some(value.into()))?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/credential.rs000064400000000000000000000033161046102023000200150ustar 00000000000000use crate::{ config, config::tree::{keys, Credential, Key, Section}, }; impl Credential { /// The `credential.helper` key. pub const HELPER: keys::Program = keys::Program::new_program("helper", &config::Tree::CREDENTIAL); /// The `credential.username` key. pub const USERNAME: keys::Any = keys::Any::new("username", &config::Tree::CREDENTIAL); /// The `credential.useHttpPath` key. pub const USE_HTTP_PATH: keys::Boolean = keys::Boolean::new_boolean("useHttpPath", &config::Tree::CREDENTIAL); /// The `credential.` subsection pub const URL_PARAMETER: UrlParameter = UrlParameter; } /// The `credential.` parameter section. pub struct UrlParameter; impl UrlParameter { /// The `credential..helper` key. pub const HELPER: keys::Program = keys::Program::new_program("helper", &Credential::URL_PARAMETER); /// The `credential..username` key. pub const USERNAME: keys::Any = keys::Any::new("username", &Credential::URL_PARAMETER); /// The `credential..useHttpPath` key. pub const USE_HTTP_PATH: keys::Boolean = keys::Boolean::new_boolean("useHttpPath", &Credential::URL_PARAMETER); } impl Section for UrlParameter { fn name(&self) -> &str { "" } fn keys(&self) -> &[&dyn Key] { &[&Self::HELPER, &Self::USERNAME, &Self::USE_HTTP_PATH] } fn parent(&self) -> Option<&dyn Section> { Some(&config::Tree::CREDENTIAL) } } impl Section for Credential { fn name(&self) -> &str { "credential" } fn keys(&self) -> &[&dyn Key] { &[&Self::HELPER, &Self::USERNAME, &Self::USE_HTTP_PATH] } fn sub_sections(&self) -> &[&dyn Section] { &[&Self::URL_PARAMETER] } } gix-0.69.1/src/config/tree/sections/diff.rs000064400000000000000000000165231046102023000166170ustar 00000000000000use crate::{ config, config::tree::{keys, Diff, Key, Section, SubSectionRequirement}, }; impl Diff { /// The `diff.algorithm` key. pub const ALGORITHM: Algorithm = Algorithm::new_with_validate("algorithm", &config::Tree::DIFF, validate::Algorithm) .with_deviation("'patience' diff is not implemented and can default to 'histogram' if lenient config is used, and defaults to histogram if unset for fastest and best results"); /// The `diff.renameLimit` key. pub const RENAME_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer( "renameLimit", &config::Tree::DIFF, ) .with_note( "The limit is actually squared, so 1000 stands for up to 1 million diffs if fuzzy rename tracking is enabled", ); /// The `diff.renames` key. pub const RENAMES: Renames = Renames::new_renames("renames", &config::Tree::DIFF); /// The `diff..command` key. pub const DRIVER_COMMAND: keys::Program = keys::Program::new_program("command", &config::Tree::DIFF) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `diff..textconv` key. pub const DRIVER_TEXTCONV: keys::Program = keys::Program::new_program("textconv", &config::Tree::DIFF) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `diff..algorithm` key. pub const DRIVER_ALGORITHM: Algorithm = Algorithm::new_with_validate("algorithm", &config::Tree::DIFF, validate::Algorithm) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `diff..binary` key. pub const DRIVER_BINARY: Binary = Binary::new_with_validate("binary", &config::Tree::DIFF, validate::Binary) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `diff.external` key. pub const EXTERNAL: keys::Program = keys::Program::new_program("external", &config::Tree::DIFF).with_environment_override("GIT_EXTERNAL_DIFF"); } impl Section for Diff { fn name(&self) -> &str { "diff" } fn keys(&self) -> &[&dyn Key] { &[ &Self::ALGORITHM, &Self::RENAME_LIMIT, &Self::RENAMES, &Self::DRIVER_COMMAND, &Self::DRIVER_TEXTCONV, &Self::DRIVER_ALGORITHM, &Self::DRIVER_BINARY, &Self::EXTERNAL, ] } } /// The `diff.algorithm` key. pub type Algorithm = keys::Any; /// The `diff.renames` key. pub type Renames = keys::Any; /// The `diff..binary` key. pub type Binary = keys::Any; mod algorithm { use std::borrow::Cow; use crate::{ bstr::BStr, config, config::{diff::algorithm::Error, tree::sections::diff::Algorithm}, }; impl Algorithm { /// Derive the diff algorithm identified by `name`, case-insensitively. pub fn try_into_algorithm(&self, name: Cow<'_, BStr>) -> Result { let algo = if name.eq_ignore_ascii_case(b"myers") || name.eq_ignore_ascii_case(b"default") { gix_diff::blob::Algorithm::Myers } else if name.eq_ignore_ascii_case(b"minimal") { gix_diff::blob::Algorithm::MyersMinimal } else if name.eq_ignore_ascii_case(b"histogram") { gix_diff::blob::Algorithm::Histogram } else if name.eq_ignore_ascii_case(b"patience") { return Err(config::diff::algorithm::Error::Unimplemented { name: name.into_owned(), }); } else { return Err(Error::Unknown { name: name.into_owned(), }); }; Ok(algo) } } } mod binary { use crate::config::tree::diff::Binary; impl Binary { /// Convert `value` into a tri-state boolean that can take the special value `auto`, resulting in `None`, or is a boolean. /// If `None` is given, it's treated as implicit boolean `true`, as this method is made to be used /// with [`gix_config::file::section::Body::value_implicit()`]. pub fn try_into_binary( &'static self, value: Option>, ) -> Result, crate::config::key::GenericErrorWithValue> { Ok(match value { None => Some(true), Some(value) => { if value.as_ref() == "auto" { None } else { Some( gix_config::Boolean::try_from(value.as_ref()) .map(|b| b.0) .map_err(|err| { crate::config::key::GenericErrorWithValue::from_value(self, value.into_owned()) .with_source(err) })?, ) } } }) } } } mod renames { use crate::{ bstr::ByteSlice, config::{ key::GenericError, tree::{keys, sections::diff::Renames, Section}, }, diff::rename::Tracking, }; impl Renames { /// Create a new instance. pub const fn new_renames(name: &'static str, section: &'static dyn Section) -> Self { keys::Any::new_with_validate(name, section, super::validate::Renames) } /// Try to convert the configuration into a valid rename tracking variant. Use `value` and if it's an error, interpret /// the boolean as string pub fn try_into_renames( &'static self, value: Result, ) -> Result { Ok(match value { Ok(true) => Tracking::Renames, Ok(false) => Tracking::Disabled, Err(err) => { let value = &err.input; match value.as_bytes() { b"copy" | b"copies" => Tracking::RenamesAndCopies, _ => return Err(GenericError::from_value(self, value.clone()).with_source(err)), } } }) } } } pub(super) mod validate { use crate::{ bstr::BStr, config::tree::{keys, Diff}, }; pub struct Algorithm; impl keys::Validate for Algorithm { fn validate(&self, value: &BStr) -> Result<(), Box> { Diff::ALGORITHM.try_into_algorithm(value.into())?; Ok(()) } } pub struct Renames; impl keys::Validate for Renames { fn validate(&self, value: &BStr) -> Result<(), Box> { let boolean = gix_config::Boolean::try_from(value).map(|b| b.0); Diff::RENAMES.try_into_renames(boolean)?; Ok(()) } } pub struct Binary; impl keys::Validate for Binary { fn validate(&self, value: &BStr) -> Result<(), Box> { Diff::DRIVER_BINARY.try_into_binary(Some(value.into()))?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/extensions.rs000064400000000000000000000034401046102023000201000ustar 00000000000000use crate::{ config, config::tree::{keys, Extensions, Key, Section}, }; impl Extensions { /// The `extensions.worktreeConfig` key. pub const WORKTREE_CONFIG: keys::Boolean = keys::Boolean::new_boolean("worktreeConfig", &config::Tree::EXTENSIONS); /// The `extensions.objectFormat` key. pub const OBJECT_FORMAT: ObjectFormat = ObjectFormat::new_with_validate("objectFormat", &config::Tree::EXTENSIONS, validate::ObjectFormat).with_note( "Support for SHA256 is prepared but not fully implemented yet. For now we abort when encountered", ); } /// The `core.checkStat` key. pub type ObjectFormat = keys::Any; mod object_format { use std::borrow::Cow; use crate::{bstr::BStr, config, config::tree::sections::extensions::ObjectFormat}; impl ObjectFormat { pub fn try_into_object_format( &'static self, value: Cow<'_, BStr>, ) -> Result { if value.as_ref().eq_ignore_ascii_case(b"sha1") { Ok(gix_hash::Kind::Sha1) } else { Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())) } } } } impl Section for Extensions { fn name(&self) -> &str { "extensions" } fn keys(&self) -> &[&dyn Key] { &[&Self::OBJECT_FORMAT, &Self::WORKTREE_CONFIG] } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct ObjectFormat; impl keys::Validate for ObjectFormat { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Extensions::OBJECT_FORMAT.try_into_object_format(value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/fetch.rs000064400000000000000000000072731046102023000170020ustar 00000000000000use crate::{ config, config::tree::{keys, Fetch, Key, Section}, }; impl Fetch { /// The `fetch.negotiationAlgorithm` key. pub const NEGOTIATION_ALGORITHM: NegotiationAlgorithm = NegotiationAlgorithm::new_with_validate( "negotiationAlgorithm", &config::Tree::FETCH, validate::NegotiationAlgorithm, ); /// The `fetch.recurseSubmodules` key. #[cfg(feature = "attributes")] pub const RECURSE_SUBMODULES: RecurseSubmodules = RecurseSubmodules::new_with_validate("recurseSubmodules", &config::Tree::FETCH, validate::RecurseSubmodules); } impl Section for Fetch { fn name(&self) -> &str { "fetch" } fn keys(&self) -> &[&dyn Key] { &[ &Self::NEGOTIATION_ALGORITHM, #[cfg(feature = "attributes")] &Self::RECURSE_SUBMODULES, ] } } /// The `fetch.negotiationAlgorithm` key. pub type NegotiationAlgorithm = keys::Any; /// The `fetch.recurseSubmodules` key. #[cfg(feature = "attributes")] pub type RecurseSubmodules = keys::Any; mod algorithm { #[cfg(feature = "credentials")] impl crate::config::tree::sections::fetch::NegotiationAlgorithm { /// Derive the negotiation algorithm identified by `name`, case-sensitively. pub fn try_into_negotiation_algorithm( &'static self, name: std::borrow::Cow<'_, crate::bstr::BStr>, ) -> Result { use crate::{bstr::ByteSlice, remote::fetch::negotiate::Algorithm}; Ok(match name.as_ref().as_bytes() { b"noop" => Algorithm::Noop, b"consecutive" | b"default" => Algorithm::Consecutive, b"skipping" => Algorithm::Skipping, _ => { return Err(crate::config::key::GenericErrorWithValue::from_value( self, name.into_owned(), )) } }) } } #[cfg(feature = "attributes")] impl crate::config::tree::sections::fetch::RecurseSubmodules { /// Obtain the way submodules should be updated. pub fn try_into_recurse_submodules( &'static self, value: Result, ) -> Result { gix_submodule::config::FetchRecurse::new(value) .map_err(|err| crate::config::key::GenericErrorWithValue::from_value(self, err)) } } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct NegotiationAlgorithm; impl keys::Validate for NegotiationAlgorithm { #[cfg_attr(not(feature = "credentials"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "credentials")] crate::config::tree::Fetch::NEGOTIATION_ALGORITHM.try_into_negotiation_algorithm(value.into())?; Ok(()) } } pub struct RecurseSubmodules; impl keys::Validate for RecurseSubmodules { #[cfg_attr(not(feature = "attributes"), allow(unused_variables))] fn validate(&self, value: &BStr) -> Result<(), Box> { #[cfg(feature = "attributes")] { let boolean = gix_config::Boolean::try_from(value).map(|b| b.0); crate::config::tree::Fetch::RECURSE_SUBMODULES.try_into_recurse_submodules(boolean)?; } Ok(()) } } } gix-0.69.1/src/config/tree/sections/gitoxide.rs000064400000000000000000000511211046102023000175140ustar 00000000000000use crate::{ config, config::tree::{keys, Gitoxide, Key, Section}, }; impl Gitoxide { /// The `gitoxide.allow` section. pub const ALLOW: Allow = Allow; /// The `gitoxide.author` section. pub const AUTHOR: Author = Author; /// The `gitoxide.core` section. pub const CORE: Core = Core; /// The `gitoxide.commit` section. pub const COMMIT: Commit = Commit; /// The `gitoxide.committer` section. pub const COMMITTER: Committer = Committer; /// The `gitoxide.credentials` section. pub const CREDENTIALS: Credentials = Credentials; /// The `gitoxide.http` section. pub const HTTP: Http = Http; /// The `gitoxide.https` section. pub const HTTPS: Https = Https; /// The `gitoxide.objects` section. pub const OBJECTS: Objects = Objects; /// The `gitoxide.ssh` section. pub const SSH: Ssh = Ssh; /// The `gitoxide.user` section. pub const USER: User = User; /// The `gitoxide.pathspec` section. pub const PATHSPEC: Pathspec = Pathspec; /// The `gitoxide.userAgent` Key. pub const USER_AGENT: keys::Any = keys::Any::new("userAgent", &config::Tree::GITOXIDE).with_note( "The user agent presented on the git protocol layer, serving as fallback for when no `http.userAgent` is set", ); /// The `gitoxide.tracePacket` Key. pub const TRACE_PACKET: keys::Boolean = keys::Boolean::new_boolean("tracePacket", &config::Tree::GITOXIDE) .with_environment_override("GIT_TRACE_PACKET"); } impl Section for Gitoxide { fn name(&self) -> &str { "gitoxide" } fn keys(&self) -> &[&dyn Key] { &[&Self::USER_AGENT, &Self::TRACE_PACKET] } fn sub_sections(&self) -> &[&dyn Section] { &[ &Self::ALLOW, &Self::AUTHOR, &Self::CORE, &Self::COMMIT, &Self::COMMITTER, &Self::CREDENTIALS, &Self::HTTP, &Self::HTTPS, &Self::OBJECTS, &Self::SSH, &Self::USER, &Self::PATHSPEC, ] } } mod subsections { use crate::config::{ tree::{http, keys, Gitoxide, Key, Section}, Tree, }; /// The `Core` sub-section. #[derive(Copy, Clone, Default)] pub struct Core; /// The `gitoxide.allow.protocolFromUser` key. pub type RefsNamespace = keys::Any; impl RefsNamespace { /// Derive the negotiation algorithm identified by `name`, case-sensitively. pub fn try_into_refs_namespace( &'static self, name: std::borrow::Cow<'_, crate::bstr::BStr>, ) -> Result { gix_ref::namespace::expand(name.as_ref()) .map_err(|err| crate::config::key::Error::from_value(self, name.into_owned()).with_source(err)) } } impl Core { /// The `gitoxide.core.defaultPackCacheMemoryLimit` key. pub const DEFAULT_PACK_CACHE_MEMORY_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("defaultPackCacheMemoryLimit", &Gitoxide::CORE).with_note( "If unset, we default to 96MB memory cap for the default 64 slot LRU cache for object deltas.", ); /// The `gitoxide.core.useNsec` key. pub const USE_NSEC: keys::Boolean = keys::Boolean::new_boolean("useNsec", &Gitoxide::CORE) .with_note("A runtime version of the USE_NSEC build flag."); /// The `gitoxide.core.useStdev` key. pub const USE_STDEV: keys::Boolean = keys::Boolean::new_boolean("useStdev", &Gitoxide::CORE) .with_note("A runtime version of the USE_STDEV build flag."); /// The `gitoxide.core.protectWindows` key. pub const PROTECT_WINDOWS: keys::Boolean = keys::Boolean::new_boolean("protectWindows", &Gitoxide::CORE) .with_note("enable protections that are enabled by default on Windows"); /// The `gitoxide.core.shallowFile` key. pub const SHALLOW_FILE: keys::Path = keys::Path::new_path("shallowFile", &Gitoxide::CORE) .with_environment_override("GIT_SHALLOW_FILE") .with_deviation( "relative file paths will always be made relative to the git-common-dir, whereas `git` keeps them as is.", ); /// The `gitoxide.core.filterProcessDelay` key (default `true`). /// /// It controls whether or not long running filter driver processes can use the 'delay' capability. pub const FILTER_PROCESS_DELAY: keys::Boolean = keys::Boolean::new_boolean("filterProcessDelay", &Gitoxide::CORE); /// The `gitoxide.core.externalCommandStderr` key (default `true`). /// /// If `true`, the default, `stderr` of worktree filter programs, or any other git-context bearing command /// invoked will be inherited. /// If `false`, it will be suppressed completely. pub const EXTERNAL_COMMAND_STDERR: keys::Boolean = keys::Boolean::new_boolean("externalCommandStderr", &Gitoxide::CORE) .with_environment_override("GIX_EXTERNAL_COMMAND_STDERR"); /// The `gitoxide.core.refsNamespace` key. pub const REFS_NAMESPACE: RefsNamespace = keys::Any::new_with_validate("refsNamespace", &Gitoxide::CORE, super::validate::RefsNamespace) .with_environment_override("GIT_NAMESPACE"); } impl Section for Core { fn name(&self) -> &str { "core" } fn keys(&self) -> &[&dyn Key] { &[ &Self::DEFAULT_PACK_CACHE_MEMORY_LIMIT, &Self::USE_NSEC, &Self::USE_STDEV, &Self::SHALLOW_FILE, &Self::PROTECT_WINDOWS, &Self::FILTER_PROCESS_DELAY, &Self::EXTERNAL_COMMAND_STDERR, &Self::REFS_NAMESPACE, ] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `Http` sub-section. #[derive(Copy, Clone, Default)] pub struct Http; impl Http { /// The `gitoxide.http.proxy` key. pub const PROXY: keys::String = keys::String::new_string("proxy", &Gitoxide::HTTP).with_environment_override("http_proxy"); /// The `gitoxide.http.allProxy` key. pub const ALL_PROXY: keys::String = keys::String::new_string("allProxy", &Gitoxide::HTTP) .with_environment_override("all_proxy") .with_note("fallback environment is `ALL_PROXY`"); /// The `gitoxide.http.verbose` key. /// /// If set, curl will be configured to log verbosely. pub const VERBOSE: keys::Boolean = keys::Boolean::new_boolean("verbose", &Gitoxide::HTTP) .with_environment_override("GIT_CURL_VERBOSE") .with_deviation("we parse it as boolean for convenience (infallible) but git only checks the presence"); /// The `gitoxide.http.noProxy` key. pub const NO_PROXY: keys::String = keys::String::new_string("noProxy", &Gitoxide::HTTP) .with_environment_override("no_proxy") .with_note("fallback environment is `NO_PROXY`"); /// The `gitoxide.http.connectTimeout` key. pub const CONNECT_TIMEOUT: keys::DurationInMilliseconds = keys::DurationInMilliseconds::new_duration("connectTimeout", &Gitoxide::HTTP).with_note( "entirely new, and in milliseconds, to describe how long to wait until a connection attempt is aborted", ); /// The `gitoxide.http.sslVersionMin` key. pub const SSL_VERSION_MIN: http::SslVersion = http::SslVersion::new_ssl_version("sslVersionMin", &Gitoxide::HTTP).with_note( "entirely new to set the lower bound for the allowed ssl version range. Overwrites the min bound of `http.sslVersion` if set. Min and Max must be set to become effective.", ); /// The `gitoxide.http.sslVersionMax` key. pub const SSL_VERSION_MAX: http::SslVersion = http::SslVersion::new_ssl_version("sslVersionMax", &Gitoxide::HTTP).with_note( "entirely new to set the upper bound for the allowed ssl version range. Overwrites the max bound of `http.sslVersion` if set. Min and Max must be set to become effective.", ); /// The `gitoxide.http.sslNoVerify` key. /// /// If set, disable SSL verification. Using this is discouraged as it can lead to /// various security risks. An example where this may be needed is when an internal /// git server uses a self-signed certificate and the user accepts the associated security risks. pub const SSL_NO_VERIFY: keys::Boolean = keys::Boolean::new_boolean("sslNoVerify", &Gitoxide::HTTP) .with_environment_override("GIT_SSL_NO_VERIFY") .with_note("used to disable SSL verification. When this is enabled it takes priority over http.sslVerify"); /// The `gitoxide.http.proxyAuthMethod` key. pub const PROXY_AUTH_METHOD: http::ProxyAuthMethod = http::ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &Gitoxide::HTTP) .with_environment_override("GIT_HTTP_PROXY_AUTHMETHOD"); } impl Section for Http { fn name(&self) -> &str { "http" } fn keys(&self) -> &[&dyn Key] { &[ &Self::PROXY, &Self::ALL_PROXY, &Self::VERBOSE, &Self::NO_PROXY, &Self::CONNECT_TIMEOUT, &Self::SSL_VERSION_MIN, &Self::SSL_VERSION_MAX, &Self::SSL_NO_VERIFY, &Self::PROXY_AUTH_METHOD, ] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `Https` sub-section. #[derive(Copy, Clone, Default)] pub struct Https; impl Https { /// The `gitoxide.https.proxy` key. pub const PROXY: keys::String = keys::String::new_string("proxy", &Gitoxide::HTTPS) .with_environment_override("HTTPS_PROXY") .with_note("fallback environment variable is `https_proxy`"); } impl Section for Https { fn name(&self) -> &str { "https" } fn keys(&self) -> &[&dyn Key] { &[&Self::PROXY] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `allow` sub-section. #[derive(Copy, Clone, Default)] pub struct Allow; /// The `gitoxide.allow.protocolFromUser` key. pub type ProtocolFromUser = keys::Any; impl Allow { /// The `gitoxide.allow.protocolFromUser` key. pub const PROTOCOL_FROM_USER: ProtocolFromUser = ProtocolFromUser::new_with_validate( "protocolFromUser", &Gitoxide::ALLOW, super::validate::ProtocolFromUser, ) .with_environment_override("GIT_PROTOCOL_FROM_USER"); } impl Section for Allow { fn name(&self) -> &str { "allow" } fn keys(&self) -> &[&dyn Key] { &[&Self::PROTOCOL_FROM_USER] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `author` sub-section. #[derive(Copy, Clone, Default)] pub struct Author; impl Author { /// The `gitoxide.author.nameFallback` key. pub const NAME_FALLBACK: keys::Any = keys::Any::new("nameFallback", &Gitoxide::AUTHOR).with_environment_override("GIT_AUTHOR_NAME"); /// The `gitoxide.author.emailFallback` key. pub const EMAIL_FALLBACK: keys::Any = keys::Any::new("emailFallback", &Gitoxide::AUTHOR).with_environment_override("GIT_AUTHOR_EMAIL"); } impl Section for Author { fn name(&self) -> &str { "author" } fn keys(&self) -> &[&dyn Key] { &[&Self::NAME_FALLBACK, &Self::EMAIL_FALLBACK] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `user` sub-section. #[derive(Copy, Clone, Default)] pub struct User; impl User { /// The `gitoxide.user.emailFallback` key. pub const EMAIL_FALLBACK: keys::Any = keys::Any::new("emailFallback", &Gitoxide::USER).with_environment_override("EMAIL"); } impl Section for User { fn name(&self) -> &str { "user" } fn keys(&self) -> &[&dyn Key] { &[&Self::EMAIL_FALLBACK] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `ssh` sub-section. #[derive(Copy, Clone, Default)] pub struct Ssh; impl Ssh { /// The `gitoxide.ssh.commandWithoutShellFallback` key. pub const COMMAND_WITHOUT_SHELL_FALLBACK: keys::Executable = keys::Executable::new_executable("commandWithoutShellFallback", &Gitoxide::SSH) .with_environment_override("GIT_SSH") .with_note("is always executed without shell and treated as fallback"); } impl Section for Ssh { fn name(&self) -> &str { "ssh" } fn keys(&self) -> &[&dyn Key] { &[&Self::COMMAND_WITHOUT_SHELL_FALLBACK] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `pathspec` sub-section. #[derive(Copy, Clone, Default)] pub struct Pathspec; impl Pathspec { /// The `gitoxide.pathspec.glob` key. pub const GLOB: keys::Boolean = keys::Boolean::new_boolean("glob", &Gitoxide::PATHSPEC) .with_environment_override("GIT_GLOB_PATHSPECS") .with_note("pathspec wildcards don't match the slash character, then needing '**' to get past them"); /// The `gitoxide.pathspec.noglob` key. pub const NOGLOB: keys::Boolean = keys::Boolean::new_boolean("noglob", &Gitoxide::PATHSPEC) .with_environment_override("GIT_NOGLOB_PATHSPECS") .with_note("Enable literal matching for glob patterns, effectively disabling globbing"); /// The `gitoxide.pathspec.literal` key. pub const LITERAL: keys::Boolean = keys::Boolean::new_boolean("literal", &Gitoxide::PATHSPEC) .with_environment_override("GIT_LITERAL_PATHSPECS") .with_note("Make the entire spec used verbatim, the only way to get ':()name' verbatim for instance"); /// The `gitoxide.pathspec.icase` key. pub const ICASE: keys::Boolean = keys::Boolean::new_boolean("icase", &Gitoxide::PATHSPEC) .with_environment_override("GIT_ICASE_PATHSPECS") .with_note("Compare string in a case-insensitive manner"); /// The `gitoxide.pathspec.inheritIgnoreCase` key, defaulting to `true` if unspecified. /// If set, pathspecs will automatically be match case-insensitively if the underlying filesystem is configured that way. pub const INHERIT_IGNORE_CASE: keys::Boolean = keys::Boolean::new_boolean("inheritIgnoreCase", &Gitoxide::PATHSPEC) .with_note("Inherit `core.ignoreCase` for defaults in pathspecs"); /// The default value for `gitoxide.pathspec.inheritIgnoreCase`. pub const INHERIT_IGNORE_CASE_DEFAULT: bool = true; } impl Section for Pathspec { fn name(&self) -> &str { "pathspec" } fn keys(&self) -> &[&dyn Key] { &[ &Self::GLOB, &Self::NOGLOB, &Self::LITERAL, &Self::ICASE, &Self::INHERIT_IGNORE_CASE, ] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `objects` sub-section. #[derive(Copy, Clone, Default)] pub struct Objects; impl Objects { /// The `gitoxide.objects.cacheLimit` key. pub const CACHE_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("cacheLimit", &Gitoxide::OBJECTS) .with_note("If unset or 0, there is no object cache") .with_environment_override("GIX_OBJECT_CACHE_MEMORY"); /// The `gitoxide.objects.noReplace` key. pub const NO_REPLACE: keys::Boolean = keys::Boolean::new_boolean("noReplace", &Gitoxide::OBJECTS); /// The `gitoxide.objects.replaceRefBase` key. pub const REPLACE_REF_BASE: keys::Any = keys::Any::new("replaceRefBase", &Gitoxide::OBJECTS).with_environment_override("GIT_REPLACE_REF_BASE"); } impl Section for Objects { fn name(&self) -> &str { "objects" } fn keys(&self) -> &[&dyn Key] { &[&Self::CACHE_LIMIT, &Self::REPLACE_REF_BASE] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `committer` sub-section. #[derive(Copy, Clone, Default)] pub struct Committer; impl Committer { /// The `gitoxide.committer.nameFallback` key. pub const NAME_FALLBACK: keys::Any = keys::Any::new("nameFallback", &Gitoxide::COMMITTER).with_environment_override("GIT_COMMITTER_NAME"); /// The `gitoxide.committer.emailFallback` key. pub const EMAIL_FALLBACK: keys::Any = keys::Any::new("emailFallback", &Gitoxide::COMMITTER).with_environment_override("GIT_COMMITTER_EMAIL"); } impl Section for Committer { fn name(&self) -> &str { "committer" } fn keys(&self) -> &[&dyn Key] { &[&Self::NAME_FALLBACK, &Self::EMAIL_FALLBACK] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `credentials` sub-section. #[derive(Copy, Clone, Default)] pub struct Credentials; impl Credentials { /// The `gitoxide.credentials.terminalPrompt` key. pub const TERMINAL_PROMPT: keys::Boolean = keys::Boolean::new_boolean("terminalPrompt", &Gitoxide::CREDENTIALS) .with_note("This is a custom addition to provide an alternative to the respective environment variable.") .with_environment_override("GIT_TERMINAL_PROMPT"); /// The `gitoxide.credentials.helperStderr` key to control what happens with the credential helpers `stderr`. /// /// If `true`, the default, `stderr` of credential helper programs will be inherited, just like with `git`. /// If `false`, will be suppressed completely. pub const HELPER_STDERR: keys::Boolean = keys::Boolean::new_boolean("helperStderr", &Gitoxide::CREDENTIALS) .with_environment_override("GIX_CREDENTIALS_HELPER_STDERR"); } impl Section for Credentials { fn name(&self) -> &str { "credentials" } fn keys(&self) -> &[&dyn Key] { &[&Self::TERMINAL_PROMPT, &Self::HELPER_STDERR] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } /// The `commit` sub-section. #[derive(Copy, Clone, Default)] pub struct Commit; impl Commit { /// The `gitoxide.commit.authorDate` key. pub const AUTHOR_DATE: keys::Time = keys::Time::new_time("authorDate", &Gitoxide::COMMIT).with_environment_override("GIT_AUTHOR_DATE"); /// The `gitoxide.commit.committerDate` key. pub const COMMITTER_DATE: keys::Time = keys::Time::new_time("committerDate", &Gitoxide::COMMIT).with_environment_override("GIT_COMMITTER_DATE"); } impl Section for Commit { fn name(&self) -> &str { "commit" } fn keys(&self) -> &[&dyn Key] { &[] } fn parent(&self) -> Option<&dyn Section> { Some(&Tree::GITOXIDE) } } } pub use subsections::{Allow, Author, Commit, Committer, Core, Credentials, Http, Https, Objects, Pathspec, Ssh, User}; pub mod validate { use std::error::Error; use crate::{bstr::BStr, config::tree::keys::Validate}; pub struct ProtocolFromUser; impl Validate for ProtocolFromUser { fn validate(&self, value: &BStr) -> Result<(), Box> { if value != "1" { return Err("GIT_PROTOCOL_FROM_USER is either unset or as the value '1'".into()); } Ok(()) } } pub struct RefsNamespace; impl Validate for RefsNamespace { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Core::REFS_NAMESPACE.try_into_refs_namespace(value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/http.rs000064400000000000000000000302061046102023000166600ustar 00000000000000use crate::{ config, config::tree::{keys, Http, Key, Section}, }; impl Http { /// The `http.sslVersion` key. pub const SSL_VERSION: SslVersion = SslVersion::new_ssl_version("sslVersion", &config::Tree::HTTP) .with_environment_override("GIT_SSL_VERSION") .with_deviation( "accepts the new 'default' value which means to use the curl default just like the empty string does", ); /// The `http.sslVerify` key. pub const SSL_VERIFY: keys::Boolean = keys::Boolean::new_boolean("sslVerify", &config::Tree::HTTP) .with_note("also see the `gitoxide.http.sslNoVerify` key"); /// The `http.proxy` key. pub const PROXY: keys::String = keys::String::new_string("proxy", &config::Tree::HTTP).with_deviation("fails on strings with illformed UTF-8"); /// The `http.proxyAuthMethod` key. pub const PROXY_AUTH_METHOD: ProxyAuthMethod = ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &config::Tree::HTTP) .with_deviation("implemented like git, but never actually tried"); /// The `http.version` key. pub const VERSION: Version = Version::new_with_validate("version", &config::Tree::HTTP, validate::Version) .with_deviation("fails on illformed UTF-8"); /// The `http.userAgent` key. pub const USER_AGENT: keys::String = keys::String::new_string("userAgent", &config::Tree::HTTP).with_deviation("fails on illformed UTF-8"); /// The `http.extraHeader` key. pub const EXTRA_HEADER: ExtraHeader = ExtraHeader::new_with_validate("extraHeader", &config::Tree::HTTP, validate::ExtraHeader) .with_deviation("fails on illformed UTF-8, without leniency"); /// The `http.followRedirects` key. pub const FOLLOW_REDIRECTS: FollowRedirects = FollowRedirects::new_with_validate("followRedirects", &config::Tree::HTTP, validate::FollowRedirects); /// The `http.lowSpeedTime` key. pub const LOW_SPEED_TIME: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("lowSpeedTime", &config::Tree::HTTP) .with_deviation("fails on negative values"); /// The `http.lowSpeedLimit` key. pub const LOW_SPEED_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("lowSpeedLimit", &config::Tree::HTTP) .with_deviation("fails on negative values"); /// The `http.schannelUseSSLCAInfo` key. pub const SCHANNEL_USE_SSL_CA_INFO: keys::Boolean = keys::Boolean::new_boolean("schannelUseSSLCAInfo", &config::Tree::HTTP) .with_deviation("only used as switch internally to turn off using the sslCAInfo, unconditionally. If unset, it has no effect, whereas in `git` it defaults to false."); /// The `http.sslCAInfo` key. pub const SSL_CA_INFO: keys::Path = keys::Path::new_path("sslCAInfo", &config::Tree::HTTP).with_environment_override("GIT_SSL_CAINFO"); /// The `http.schannelCheckRevoke` key. pub const SCHANNEL_CHECK_REVOKE: keys::Boolean = keys::Boolean::new_boolean("schannelCheckRevoke", &config::Tree::HTTP); } impl Section for Http { fn name(&self) -> &str { "http" } fn keys(&self) -> &[&dyn Key] { &[ &Self::SSL_VERSION, &Self::SSL_VERIFY, &Self::PROXY, &Self::PROXY_AUTH_METHOD, &Self::VERSION, &Self::USER_AGENT, &Self::EXTRA_HEADER, &Self::FOLLOW_REDIRECTS, &Self::LOW_SPEED_TIME, &Self::LOW_SPEED_LIMIT, &Self::SCHANNEL_USE_SSL_CA_INFO, &Self::SSL_CA_INFO, &Self::SCHANNEL_CHECK_REVOKE, ] } } /// The `http.followRedirects` key. pub type FollowRedirects = keys::Any; /// The `http.extraHeader` key. pub type ExtraHeader = keys::Any; /// The `http.sslVersion` key, as well as others of the same type. pub type SslVersion = keys::Any; /// The `http.proxyAuthMethod` key, as well as others of the same type. pub type ProxyAuthMethod = keys::Any; /// The `http.version` key. pub type Version = keys::Any; mod key_impls { use crate::config::tree::{ http::{ProxyAuthMethod, SslVersion}, keys, Section, }; impl SslVersion { pub const fn new_ssl_version(name: &'static str, section: &'static dyn Section) -> Self { keys::Any::new_with_validate(name, section, super::validate::SslVersion) } } impl ProxyAuthMethod { pub const fn new_proxy_auth_method(name: &'static str, section: &'static dyn Section) -> Self { keys::Any::new_with_validate(name, section, super::validate::ProxyAuthMethod) } } #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] impl crate::config::tree::http::FollowRedirects { /// Convert `value` into the redirect specification, or query the same value as `boolean` /// for additional possible input values. /// /// Note that `boolean` only queries the underlying key as boolean, which is a necessity to handle /// empty booleans correctly, that is those without a value separator. pub fn try_into_follow_redirects( &'static self, value: std::borrow::Cow<'_, crate::bstr::BStr>, boolean: impl FnOnce() -> Result, gix_config::value::Error>, ) -> Result< crate::protocol::transport::client::http::options::FollowRedirects, crate::config::key::GenericErrorWithValue, > { use crate::{bstr::ByteSlice, protocol::transport::client::http::options::FollowRedirects}; Ok(if value.as_ref().as_bytes() == b"initial" { FollowRedirects::Initial } else if let Some(value) = boolean().map_err(|err| { crate::config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err) })? { if value { FollowRedirects::All } else { FollowRedirects::None } } else { FollowRedirects::Initial }) } } impl super::ExtraHeader { /// Convert a list of values into extra-headers, while failing entirely on illformed UTF-8. pub fn try_into_extra_header( &'static self, values: Vec>, ) -> Result, crate::config::string::Error> { let mut out = Vec::with_capacity(values.len()); for value in values { if value.is_empty() { out.clear(); } else { out.push(self.try_into_string(value)?); } } Ok(out) } } #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] impl super::Version { pub fn try_into_http_version( &'static self, value: std::borrow::Cow<'_, crate::bstr::BStr>, ) -> Result< gix_protocol::transport::client::http::options::HttpVersion, crate::config::key::GenericErrorWithValue, > { use gix_protocol::transport::client::http::options::HttpVersion; use crate::bstr::ByteSlice; Ok(match value.as_ref().as_bytes() { b"HTTP/1.1" => HttpVersion::V1_1, b"HTTP/2" => HttpVersion::V2, _ => { return Err(crate::config::key::GenericErrorWithValue::from_value( self, value.into_owned(), )) } }) } } #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] impl ProxyAuthMethod { pub fn try_into_proxy_auth_method( &'static self, value: std::borrow::Cow<'_, crate::bstr::BStr>, ) -> Result< gix_protocol::transport::client::http::options::ProxyAuthMethod, crate::config::key::GenericErrorWithValue, > { use gix_protocol::transport::client::http::options::ProxyAuthMethod; use crate::bstr::ByteSlice; Ok(match value.as_ref().as_bytes() { b"anyauth" => ProxyAuthMethod::AnyAuth, b"basic" => ProxyAuthMethod::Basic, b"digest" => ProxyAuthMethod::Digest, b"negotiate" => ProxyAuthMethod::Negotiate, b"ntlm" => ProxyAuthMethod::Ntlm, _ => { return Err(crate::config::key::GenericErrorWithValue::from_value( self, value.into_owned(), )) } }) } } #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] impl SslVersion { pub fn try_into_ssl_version( &'static self, value: std::borrow::Cow<'_, crate::bstr::BStr>, ) -> Result { use gix_protocol::transport::client::http::options::SslVersion::*; use crate::bstr::ByteSlice; Ok(match value.as_ref().as_bytes() { b"default" | b"" => Default, b"tlsv1" => TlsV1, b"sslv2" => SslV2, b"sslv3" => SslV3, b"tlsv1.0" => TlsV1_0, b"tlsv1.1" => TlsV1_1, b"tlsv1.2" => TlsV1_2, b"tlsv1.3" => TlsV1_3, _ => return Err(crate::config::ssl_version::Error::from_value(self, value.into_owned())), }) } } } pub mod validate { use std::error::Error; use crate::{ bstr::{BStr, ByteSlice}, config::tree::keys::Validate, }; pub struct SslVersion; impl Validate for SslVersion { fn validate(&self, _value: &BStr) -> Result<(), Box> { #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] super::Http::SSL_VERSION.try_into_ssl_version(std::borrow::Cow::Borrowed(_value))?; Ok(()) } } pub struct ProxyAuthMethod; impl Validate for ProxyAuthMethod { fn validate(&self, _value: &BStr) -> Result<(), Box> { #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] super::Http::PROXY_AUTH_METHOD.try_into_proxy_auth_method(std::borrow::Cow::Borrowed(_value))?; Ok(()) } } pub struct Version; impl Validate for Version { fn validate(&self, _value: &BStr) -> Result<(), Box> { #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] super::Http::VERSION.try_into_http_version(std::borrow::Cow::Borrowed(_value))?; Ok(()) } } pub struct ExtraHeader; impl Validate for ExtraHeader { fn validate(&self, value: &BStr) -> Result<(), Box> { value.to_str()?; Ok(()) } } pub struct FollowRedirects; impl Validate for FollowRedirects { fn validate(&self, _value: &BStr) -> Result<(), Box> { #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] super::Http::FOLLOW_REDIRECTS.try_into_follow_redirects(std::borrow::Cow::Borrowed(_value), || { gix_config::Boolean::try_from(_value).map(|b| Some(b.0)) })?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/index.rs000064400000000000000000000042061046102023000170110ustar 00000000000000use crate::{ config, config::tree::{keys, Index, Key, Section}, }; impl Index { /// The `index.threads` key. pub const THREADS: IndexThreads = IndexThreads::new_with_validate("threads", &config::Tree::INDEX, validate::IndexThreads); /// The `index.skipHash` key. pub const SKIP_HASH: keys::Boolean = keys::Boolean::new_boolean("skipHash", &config::Tree::INDEX) .with_deviation("also used to skip the hash when reading, even if a hash exists in the index file"); } /// The `index.threads` key. pub type IndexThreads = keys::Any; mod index_threads { use std::borrow::Cow; use crate::{ bstr::BStr, config, config::{key::GenericErrorWithValue, tree::index::IndexThreads}, }; impl IndexThreads { /// Parse `value` into the amount of threads to use, with `1` being single-threaded, or `0` indicating /// to select the amount of threads, with any other number being the specific amount of threads to use. pub fn try_into_index_threads( &'static self, value: Cow<'_, BStr>, ) -> Result { gix_config::Integer::try_from(value.as_ref()) .ok() .and_then(|i| i.to_decimal().and_then(|i| i.try_into().ok())) .or_else(|| { gix_config::Boolean::try_from(value.as_ref()) .ok() .map(|b| if b.0 { 0 } else { 1 }) }) .ok_or_else(|| GenericErrorWithValue::from_value(self, value.into_owned())) } } } impl Section for Index { fn name(&self) -> &str { "index" } fn keys(&self) -> &[&dyn Key] { &[&Self::THREADS, &Self::SKIP_HASH] } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct IndexThreads; impl keys::Validate for IndexThreads { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Index::THREADS.try_into_index_threads(value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/init.rs000064400000000000000000000007011046102023000166410ustar 00000000000000use crate::{ config, config::tree::{keys, Init, Key, Section}, }; impl Init { /// The `init.defaultBranch` key. pub const DEFAULT_BRANCH: keys::Any = keys::Any::new("defaultBranch", &config::Tree::INIT) .with_deviation("If not set, we use `main` instead of `master`"); } impl Section for Init { fn name(&self) -> &str { "init" } fn keys(&self) -> &[&dyn Key] { &[&Self::DEFAULT_BRANCH] } } gix-0.69.1/src/config/tree/sections/mailmap.rs000064400000000000000000000007411046102023000173220ustar 00000000000000use crate::config::{ tree::{keys, Key, Mailmap, Section}, Tree, }; impl Mailmap { /// The `mailmap.blob` key pub const BLOB: keys::String = keys::String::new_string("blob", &Tree::MAILMAP); /// The `mailmap.file` key pub const FILE: keys::Path = keys::Path::new_path("file", &Tree::MAILMAP); } impl Section for Mailmap { fn name(&self) -> &str { "mailmap" } fn keys(&self) -> &[&dyn Key] { &[&Self::BLOB, &Self::FILE] } } gix-0.69.1/src/config/tree/sections/merge.rs000064400000000000000000000073521046102023000170060ustar 00000000000000use crate::config; use crate::config::tree::SubSectionRequirement; use crate::config::{ tree::{keys, Key, Merge, Section}, Tree, }; impl Merge { /// The `merge.renameLimit` key. pub const RENAME_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer( "renameLimit", &config::Tree::MERGE, ) .with_note( "The limit is actually squared, so 1000 stands for up to 1 million diffs if fuzzy rename tracking is enabled", ); /// The `merge.renames` key. #[cfg(feature = "merge")] pub const RENAMES: super::diff::Renames = super::diff::Renames::new_renames("renames", &config::Tree::MERGE); /// The `merge.renormalize` key pub const RENORMALIZE: keys::Boolean = keys::Boolean::new_boolean("renormalize", &Tree::MERGE); /// The `merge.default` key pub const DEFAULT: keys::String = keys::String::new_string("default", &Tree::MERGE); /// The `merge..name` key. pub const DRIVER_NAME: keys::String = keys::String::new_string("name", &config::Tree::MERGE) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `merge..driver` key. pub const DRIVER_COMMAND: keys::Program = keys::Program::new_program("driver", &config::Tree::MERGE) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `merge..recursive` key. pub const DRIVER_RECURSIVE: keys::String = keys::String::new_string("recursive", &config::Tree::MERGE) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("driver"))); /// The `merge.conflictStyle` key. #[cfg(feature = "merge")] pub const CONFLICT_STYLE: ConflictStyle = ConflictStyle::new_with_validate("conflictStyle", &config::Tree::MERGE, validate::ConflictStyle); } impl Section for Merge { fn name(&self) -> &str { "merge" } fn keys(&self) -> &[&dyn Key] { &[ &Self::RENAME_LIMIT, #[cfg(feature = "merge")] &Self::RENAMES, &Self::RENORMALIZE, &Self::DEFAULT, &Self::DRIVER_NAME, &Self::DRIVER_COMMAND, &Self::DRIVER_RECURSIVE, #[cfg(feature = "merge")] &Self::CONFLICT_STYLE, ] } } /// The `merge.conflictStyle` key. #[cfg(feature = "merge")] pub type ConflictStyle = keys::Any; #[cfg(feature = "merge")] mod conflict_style { use crate::{bstr::BStr, config, config::tree::sections::merge::ConflictStyle}; use gix_merge::blob::builtin_driver::text; use std::borrow::Cow; impl ConflictStyle { /// Derive the diff algorithm identified by `name`, case-insensitively. pub fn try_into_conflict_style( &'static self, name: Cow<'_, BStr>, ) -> Result { let style = if name.as_ref() == "merge" { text::ConflictStyle::Merge } else if name.as_ref() == "diff3" { text::ConflictStyle::Diff3 } else if name.as_ref() == "zdiff3" { text::ConflictStyle::ZealousDiff3 } else { return Err(config::key::GenericErrorWithValue::from_value(self, name.into_owned())); }; Ok(style) } } } #[cfg(feature = "merge")] mod validate { use crate::{ bstr::BStr, config::tree::{keys, Merge}, }; pub struct ConflictStyle; impl keys::Validate for ConflictStyle { fn validate(&self, value: &BStr) -> Result<(), Box> { Merge::CONFLICT_STYLE.try_into_conflict_style(value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/mod.rs000064400000000000000000000050551046102023000164640ustar 00000000000000#![allow(missing_docs)] /// The `author` top-level section. #[derive(Copy, Clone, Default)] pub struct Author; mod author; /// The `branch` top-level section. #[derive(Copy, Clone, Default)] pub struct Branch; pub mod branch; /// The `checkout` top-level section. #[derive(Copy, Clone, Default)] pub struct Checkout; pub mod checkout; /// The `clone` top-level section. #[derive(Copy, Clone, Default)] pub struct Clone; mod clone; /// The `committer` top-level section. #[derive(Copy, Clone, Default)] pub struct Committer; mod committer; /// The `core` top-level section. #[derive(Copy, Clone, Default)] pub struct Core; pub mod core; /// The `credential` top-level section. #[derive(Copy, Clone, Default)] pub struct Credential; pub mod credential; /// The `diff` top-level section. #[derive(Copy, Clone, Default)] #[cfg(feature = "blob-diff")] pub struct Diff; #[cfg(feature = "blob-diff")] pub mod diff; /// The `extension` top-level section. #[derive(Copy, Clone, Default)] pub struct Extensions; pub mod extensions; /// The `fetch` top-level section. #[derive(Copy, Clone, Default)] pub struct Fetch; pub mod fetch; /// The `gitoxide` top-level section. #[derive(Copy, Clone, Default)] pub struct Gitoxide; pub mod gitoxide; /// The `http` top-level section. #[derive(Copy, Clone, Default)] pub struct Http; pub mod http; /// The `index` top-level section. #[derive(Copy, Clone, Default)] pub struct Index; pub mod index; /// The `init` top-level section. #[derive(Copy, Clone, Default)] pub struct Init; mod init; #[derive(Copy, Clone, Default)] pub struct Mailmap; mod mailmap; #[derive(Copy, Clone, Default)] pub struct Merge; mod merge; /// The `pack` top-level section. #[derive(Copy, Clone, Default)] pub struct Pack; pub mod pack; /// The `protocol` top-level section. #[derive(Copy, Clone, Default)] pub struct Protocol; pub mod protocol; /// The `push` top-level section. #[derive(Copy, Clone, Default)] pub struct Push; pub mod push; /// The `remote` top-level section. #[derive(Copy, Clone, Default)] pub struct Remote; pub mod remote; /// The `safe` top-level section. #[derive(Copy, Clone, Default)] pub struct Safe; mod safe; /// The `ssh` top-level section. #[derive(Copy, Clone, Default)] pub struct Ssh; pub mod ssh; /// The `status` top-level section. #[derive(Copy, Clone, Default)] #[cfg(feature = "status")] pub struct Status; #[cfg(feature = "status")] pub mod status; /// The `user` top-level section. #[derive(Copy, Clone, Default)] pub struct User; mod user; /// The `url` top-level section. #[derive(Copy, Clone, Default)] pub struct Url; mod url; gix-0.69.1/src/config/tree/sections/pack.rs000064400000000000000000000040701046102023000166170ustar 00000000000000use crate::{ config, config::tree::{keys, Key, Pack, Section}, }; impl Pack { /// The `pack.threads` key. pub const THREADS: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer("threads", &config::Tree::PACK) .with_deviation("Leaving this key unspecified uses all available cores, instead of 1"); /// The `pack.indexVersion` key. pub const INDEX_VERSION: IndexVersion = IndexVersion::new_with_validate("indexVersion", &config::Tree::PACK, validate::IndexVersion); } /// The `pack.indexVersion` key. pub type IndexVersion = keys::Any; mod index_version { use crate::{config, config::tree::sections::pack::IndexVersion}; impl IndexVersion { /// Try to interpret an integer value as index version. pub fn try_into_index_version( &'static self, value: Result, ) -> Result { let value = value.map_err(|err| config::key::GenericError::from(self).with_source(err))?; Ok(match value { 1 => gix_pack::index::Version::V1, 2 => gix_pack::index::Version::V2, _ => return Err(config::key::GenericError::from(self)), }) } } } impl Section for Pack { fn name(&self) -> &str { "pack" } fn keys(&self) -> &[&dyn Key] { &[&Self::THREADS, &Self::INDEX_VERSION] } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct IndexVersion; impl keys::Validate for IndexVersion { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Pack::INDEX_VERSION.try_into_index_version(gix_config::Integer::try_from(value).and_then( |int| { int.to_decimal() .ok_or_else(|| gix_config::value::Error::new("integer out of range", value)) }, ))?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/protocol.rs000064400000000000000000000106671046102023000175530ustar 00000000000000use crate::{ config, config::tree::{keys, Key, Protocol, Section}, }; impl Protocol { /// The `protocol.allow` key. pub const ALLOW: Allow = Allow::new_with_validate("allow", &config::Tree::PROTOCOL, validate::Allow); /// The `protocol.version` key. pub const VERSION: Version = Version::new_with_validate("version", &config::Tree::PROTOCOL, validate::Version); /// The `protocol.` subsection pub const NAME_PARAMETER: NameParameter = NameParameter; } /// The `protocol.allow` key type. pub type Allow = keys::Any; /// The `protocol.version` key. pub type Version = keys::Any; #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] mod allow { use std::borrow::Cow; use crate::{bstr::BStr, config, config::tree::protocol::Allow, remote::url::scheme_permission}; impl Allow { /// Convert `value` into its respective `Allow` variant, possibly informing about the `scheme` we are looking at in the error. pub fn try_into_allow( &'static self, value: Cow<'_, BStr>, scheme: Option<&str>, ) -> Result { scheme_permission::Allow::try_from(value).map_err(|value| config::protocol::allow::Error { value, scheme: scheme.map(ToOwned::to_owned), }) } } } /// The `protocol.` parameter section. pub struct NameParameter; impl NameParameter { /// The `protocol..allow` key. pub const ALLOW: Allow = Allow::new_with_validate("allow", &Protocol::NAME_PARAMETER, validate::Allow); } impl Section for NameParameter { fn name(&self) -> &str { "" } fn keys(&self) -> &[&dyn Key] { &[&Self::ALLOW] } fn parent(&self) -> Option<&dyn Section> { Some(&config::Tree::PROTOCOL) } } impl Section for Protocol { fn name(&self) -> &str { "protocol" } fn keys(&self) -> &[&dyn Key] { &[&Self::ALLOW, &Self::VERSION] } fn sub_sections(&self) -> &[&dyn Section] { &[&Self::NAME_PARAMETER] } } mod key_impls { impl super::Version { /// Convert `value` into the corresponding protocol version, possibly applying the correct default. #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub fn try_into_protocol_version( &'static self, value: Option>, ) -> Result { let value = match value { None => return Ok(gix_protocol::transport::Protocol::V2), Some(v) => v, }; Ok(match value { Ok(0) => gix_protocol::transport::Protocol::V0, Ok(1) => gix_protocol::transport::Protocol::V1, Ok(2) => gix_protocol::transport::Protocol::V2, Ok(other) => { return Err(crate::config::key::GenericErrorWithValue::from_value( self, other.to_string().into(), )) } Err(err) => { return Err( crate::config::key::GenericErrorWithValue::from_value(self, "unknown".into()).with_source(err), ) } }) } } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct Allow; impl keys::Validate for Allow { fn validate(&self, _value: &BStr) -> Result<(), Box> { #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] super::Protocol::ALLOW.try_into_allow(std::borrow::Cow::Borrowed(_value), None)?; Ok(()) } } pub struct Version; impl keys::Validate for Version { fn validate(&self, value: &BStr) -> Result<(), Box> { let value = gix_config::Integer::try_from(value)? .to_decimal() .ok_or_else(|| format!("integer {value} cannot be represented as integer"))?; match value { 0..=2 => Ok(()), _ => Err(format!("protocol version {value} is unknown").into()), } } } } gix-0.69.1/src/config/tree/sections/push.rs000064400000000000000000000033101046102023000166540ustar 00000000000000use crate::{ config, config::tree::{keys, Key, Push, Section}, }; impl Push { /// The `push.default` key pub const DEFAULT: Default = Default::new_with_validate("default", &config::Tree::PUSH, validate::Default); } impl Section for Push { fn name(&self) -> &str { "push" } fn keys(&self) -> &[&dyn Key] { &[&Self::DEFAULT] } } /// The `remote..tagOpt` key type. pub type Default = keys::Any; mod default { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::push::Default, push, }; impl Default { /// Try to interpret `value` as `push.default`. pub fn try_into_default( &'static self, value: Cow<'_, BStr>, ) -> Result { Ok(match value.as_ref().as_bytes() { b"nothing" => push::Default::Nothing, b"current" => push::Default::Current, b"upstream" | b"tracking" => push::Default::Upstream, b"simple" => push::Default::Simple, b"matching" => push::Default::Matching, _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())), }) } } } mod validate { pub struct Default; use std::{borrow::Cow, error::Error}; use crate::{bstr::BStr, config::tree::keys::Validate}; impl Validate for Default { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Push::DEFAULT.try_into_default(Cow::Borrowed(value))?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/remote.rs000064400000000000000000000072211046102023000171750ustar 00000000000000use crate::{ config, config::tree::{http, keys, Key, Remote, Section, SubSectionRequirement}, }; const NAME_PARAMETER: Option = Some(SubSectionRequirement::Parameter("name")); impl Remote { /// The `remote.pushDefault` key pub const PUSH_DEFAULT: keys::RemoteName = keys::RemoteName::new_remote_name("pushDefault", &config::Tree::REMOTE); /// The `remote..tagOpt` key pub const TAG_OPT: TagOpt = TagOpt::new_with_validate("tagOpt", &config::Tree::REMOTE, validate::TagOpt) .with_subsection_requirement(Some(SubSectionRequirement::Parameter("name"))); /// The `remote..url` key pub const URL: keys::Url = keys::Url::new_url("url", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER); /// The `remote..pushUrl` key pub const PUSH_URL: keys::Url = keys::Url::new_url("pushUrl", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER); /// The `remote..fetch` key pub const FETCH: keys::FetchRefSpec = keys::FetchRefSpec::new_fetch_refspec("fetch", &config::Tree::REMOTE) .with_subsection_requirement(NAME_PARAMETER); /// The `remote..push` key pub const PUSH: keys::PushRefSpec = keys::PushRefSpec::new_push_refspec("push", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER); /// The `remote..proxy` key pub const PROXY: keys::String = keys::String::new_string("proxy", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER); /// The `remote..proxyAuthMethod` key. pub const PROXY_AUTH_METHOD: http::ProxyAuthMethod = http::ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &config::Tree::REMOTE) .with_subsection_requirement(NAME_PARAMETER) .with_deviation("implemented like git, but never actually tried"); } impl Section for Remote { fn name(&self) -> &str { "remote" } fn keys(&self) -> &[&dyn Key] { &[ &Self::PUSH_DEFAULT, &Self::TAG_OPT, &Self::URL, &Self::PUSH_URL, &Self::FETCH, &Self::PUSH, &Self::PROXY, &Self::PROXY_AUTH_METHOD, ] } } /// The `remote..tagOpt` key type. pub type TagOpt = keys::Any; mod tag_opts { use std::borrow::Cow; use crate::{ bstr::{BStr, ByteSlice}, config, config::tree::remote::TagOpt, remote, }; impl TagOpt { /// Try to interpret `value` as tag option. /// /// # Note /// /// It's heavily biased towards the git command-line unfortunately, and the only /// value of its kind. Maybe in future more values will be supported which are less /// about passing them to a sub-process. pub fn try_into_tag_opt( &'static self, value: Cow<'_, BStr>, ) -> Result { Ok(match value.as_ref().as_bytes() { b"--tags" => remote::fetch::Tags::All, b"--no-tags" => remote::fetch::Tags::None, _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())), }) } } } pub mod validate { use std::{borrow::Cow, error::Error}; use crate::{bstr::BStr, config::tree::keys::Validate}; pub struct TagOpt; impl Validate for TagOpt { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Remote::TAG_OPT.try_into_tag_opt(Cow::Borrowed(value))?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/safe.rs000064400000000000000000000012651046102023000166220ustar 00000000000000use crate::{ config, config::tree::{keys, Key, Safe, Section}, }; impl Safe { /// The `safe.directory` key pub const DIRECTORY: keys::Any = keys::Any::new("directory", &config::Tree::SAFE); } impl Safe { /// Implements the directory filter to trust only global and system files, for use with `safe.directory`. pub fn directory_filter(meta: &gix_config::file::Metadata) -> bool { let kind = meta.source.kind(); kind == gix_config::source::Kind::System || kind == gix_config::source::Kind::Global } } impl Section for Safe { fn name(&self) -> &str { "safe" } fn keys(&self) -> &[&dyn Key] { &[&Self::DIRECTORY] } } gix-0.69.1/src/config/tree/sections/ssh.rs000064400000000000000000000037241046102023000165030ustar 00000000000000use crate::{ config, config::tree::{keys, Key, Section, Ssh}, }; impl Ssh { /// The `ssh.variant` key pub const VARIANT: Variant = Variant::new_with_validate("variant", &config::Tree::SSH, validate::Variant) .with_environment_override("GIT_SSH_VARIANT") .with_deviation("We error if a variant is chosen that we don't know, as opposed to defaulting to 'ssh'"); } /// The `ssh.variant` key. pub type Variant = keys::Any; #[cfg(feature = "blocking-network-client")] mod variant { use std::borrow::Cow; use crate::{bstr::BStr, config, config::tree::ssh::Variant}; impl Variant { pub fn try_into_variant( &'static self, value: Cow<'_, BStr>, ) -> Result, config::key::GenericErrorWithValue> { use gix_protocol::transport::client::ssh::ProgramKind; use crate::bstr::ByteSlice; Ok(Some(match value.as_ref().as_bytes() { b"auto" => return Ok(None), b"ssh" => ProgramKind::Ssh, b"plink" => ProgramKind::Plink, b"putty" => ProgramKind::Putty, b"tortoiseplink" => ProgramKind::TortoisePlink, b"simple" => ProgramKind::Simple, _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())), })) } } } impl Section for Ssh { fn name(&self) -> &str { "ssh" } fn keys(&self) -> &[&dyn Key] { &[&Self::VARIANT] } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct Variant; impl keys::Validate for Variant { fn validate(&self, _value: &BStr) -> Result<(), Box> { #[cfg(feature = "blocking-network-client")] super::Ssh::VARIANT.try_into_variant(_value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/status.rs000064400000000000000000000034151046102023000172260ustar 00000000000000use crate::config; use crate::config::tree::sections::Status; use crate::config::tree::{keys, Key, Section}; impl Status { /// The `status.showUntrackedFiles` key pub const SHOW_UNTRACKED_FILES: ShowUntrackedFiles = ShowUntrackedFiles::new_with_validate( "showUntrackedFiles", &config::Tree::STATUS, validate::ShowUntrackedFiles, ); } /// The `status.showUntrackedFiles` key. pub type ShowUntrackedFiles = keys::Any; mod show_untracked_files { use std::borrow::Cow; use crate::{bstr::BStr, config, config::tree::status::ShowUntrackedFiles, status}; impl ShowUntrackedFiles { pub fn try_into_show_untracked_files( &'static self, value: Cow<'_, BStr>, ) -> Result { use crate::bstr::ByteSlice; Ok(match value.as_ref().as_bytes() { b"no" => status::UntrackedFiles::None, b"normal" => status::UntrackedFiles::Collapsed, b"all" => status::UntrackedFiles::Files, _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())), }) } } } impl Section for Status { fn name(&self) -> &str { "status" } fn keys(&self) -> &[&dyn Key] { &[&Self::SHOW_UNTRACKED_FILES] } } mod validate { use crate::{bstr::BStr, config::tree::keys}; pub struct ShowUntrackedFiles; impl keys::Validate for ShowUntrackedFiles { fn validate(&self, value: &BStr) -> Result<(), Box> { super::Status::SHOW_UNTRACKED_FILES.try_into_show_untracked_files(value.into())?; Ok(()) } } } gix-0.69.1/src/config/tree/sections/url.rs000064400000000000000000000013611046102023000165030ustar 00000000000000use crate::{ config, config::tree::{keys, Key, Section, SubSectionRequirement, Url}, }; const BASE_PARAMETER: Option = Some(SubSectionRequirement::Parameter("base")); impl Url { /// The `url..insteadOf` key pub const INSTEAD_OF: keys::Any = keys::Any::new("insteadOf", &config::Tree::URL).with_subsection_requirement(BASE_PARAMETER); /// The `url..pushInsteadOf` key pub const PUSH_INSTEAD_OF: keys::Any = keys::Any::new("pushInsteadOf", &config::Tree::URL).with_subsection_requirement(BASE_PARAMETER); } impl Section for Url { fn name(&self) -> &str { "url" } fn keys(&self) -> &[&dyn Key] { &[&Self::INSTEAD_OF, &Self::PUSH_INSTEAD_OF] } } gix-0.69.1/src/config/tree/sections/user.rs000064400000000000000000000010141046102023000166520ustar 00000000000000use crate::{ config, config::tree::{gitoxide, keys, Key, Section, User}, }; impl User { /// The `user.name` key pub const NAME: keys::Any = keys::Any::new("name", &config::Tree::USER); /// The `user.email` key pub const EMAIL: keys::Any = keys::Any::new("email", &config::Tree::USER).with_fallback(&gitoxide::User::EMAIL_FALLBACK); } impl Section for User { fn name(&self) -> &str { "user" } fn keys(&self) -> &[&dyn Key] { &[&Self::NAME, &Self::EMAIL] } } gix-0.69.1/src/config/tree/traits.rs000064400000000000000000000211001046102023000153510ustar 00000000000000use crate::{ bstr::{BStr, BString, ByteVec}, config::tree::key::validate_assignment, }; /// Provide information about a configuration section. pub trait Section { /// The section name, like `remote` in `remote.origin.url`. fn name(&self) -> &str; /// The keys directly underneath it for carrying configuration values. fn keys(&self) -> &[&dyn Key]; /// The list of sub-section names, which may be empty if there are no statically known sub-sections. fn sub_sections(&self) -> &[&dyn Section] { &[] } /// The parent section if this is a statically known sub-section. fn parent(&self) -> Option<&dyn Section> { None } } /// Determine how subsections may be used with a given key, suitable for obtaining the full name for use in assignments. #[derive(Debug, Copy, Clone)] pub enum SubSectionRequirement { /// Subsections must not be used, this key can only be below a section. Never, /// The sub-section is used as parameter with the given name. Parameter(&'static str), } /// A way to link a key with other resources. #[derive(Debug, Copy, Clone)] pub enum Link { /// The environment variable of the given name will override the value of this key. EnvironmentOverride(&'static str), /// This config key is used as fallback if this key isn't set. FallbackKey(&'static dyn Key), } /// A note attached to a key. #[derive(Debug, Copy, Clone)] pub enum Note { /// A piece of information related to a key to help the user. Informative(&'static str), /// This key works differently than is described by git, explaining the deviation further. Deviation(&'static str), } /// A leaf-level entry in the git configuration, like `url` in `remote.origin.url`. pub trait Key: std::fmt::Debug { /// The key's name, like `url` in `remote.origin.url`. fn name(&self) -> &str; /// See if `value` is allowed as value of this key, or return a descriptive error if it is not. fn validate(&self, value: &BStr) -> Result<(), crate::config::tree::key::validate::Error>; /// The section containing this key. Git configuration has no free-standing keys, they are always underneath a section. fn section(&self) -> &dyn Section; /// The return value encodes three possible states to indicate subsection requirements /// * `None` = subsections may or may not be used, the most flexible setting. /// * `Some([Requirement][SubSectionRequirement])` = subsections must or must not be used, depending on the value fn subsection_requirement(&self) -> Option<&SubSectionRequirement> { Some(&SubSectionRequirement::Never) } /// Return the link to other resources, if available. fn link(&self) -> Option<&Link> { None } /// Return a note about this key, if available. fn note(&self) -> Option<&Note> { None } /// Return the name of an environment variable that would override this value (after following links until one is found). fn environment_override(&self) -> Option<&str> { let mut cursor = self.link()?; loop { match cursor { Link::EnvironmentOverride(name) => return Some(name), Link::FallbackKey(next) => { cursor = next.link()?; } } } } /// Return the environment override that must be set on this key. /// # Panics /// If no environment variable is set fn the_environment_override(&self) -> &str { self.environment_override() .expect("BUG: environment override must be set") } /// Produce a name that describes how the name is composed. This is `core.bare` for statically known keys, or `branch..key` /// for complex ones. fn logical_name(&self) -> String { let section = self.section(); let mut buf = String::new(); let parameter = if let Some(parent) = section.parent() { buf.push_str(parent.name()); buf.push('.'); None } else { self.subsection_requirement().and_then(|requirement| match requirement { SubSectionRequirement::Parameter(name) => Some(name), SubSectionRequirement::Never => None, }) }; buf.push_str(section.name()); buf.push('.'); if let Some(parameter) = parameter { buf.push('<'); buf.push_str(parameter); buf.push('>'); buf.push('.'); } buf.push_str(self.name()); buf } /// The full name of the key for use in configuration overrides, like `core.bare`, or `remote..url` if `subsection` is /// not `None`. /// May fail if this key needs a subsection, or may not have a subsection. fn full_name(&self, subsection: Option<&BStr>) -> Result { let section = self.section(); let mut buf = BString::default(); let subsection = match self.subsection_requirement() { None => subsection, Some(requirement) => match (requirement, subsection) { (SubSectionRequirement::Never, Some(_)) => { return Err(format!( "The key named '{}' cannot be used with non-static subsections.", self.logical_name() )); } (SubSectionRequirement::Parameter(_), None) => { return Err(format!( "The key named '{}' cannot be used without subsections.", self.logical_name() )) } _ => subsection, }, }; if let Some(parent) = section.parent() { buf.push_str(parent.name()); buf.push(b'.'); } buf.push_str(section.name()); buf.push(b'.'); if let Some(subsection) = subsection { debug_assert!( section.parent().is_none(), "BUG: sections with parameterized sub-sections must be top-level sections" ); buf.push_str(subsection); buf.push(b'.'); } buf.push_str(self.name()); Ok(buf) } /// Return an assignment with the keys full name to `value`, suitable for [configuration overrides][crate::open::Options::config_overrides()]. /// Note that this will fail if the key requires a subsection name. fn validated_assignment(&self, value: &BStr) -> Result { self.validate(value)?; let mut key = self .full_name(None) .map_err(|message| validate_assignment::Error::Name { message })?; key.push(b'='); key.push_str(value); Ok(key) } /// Return an assignment with the keys full name to `value`, suitable for [configuration overrides][crate::open::Options::config_overrides()]. /// Note that this will fail if the key requires a subsection name. fn validated_assignment_fmt( &self, value: &dyn std::fmt::Display, ) -> Result { let value = value.to_string(); self.validated_assignment(value.as_str().into()) } /// Return an assignment to `value` with the keys full name within `subsection`, suitable for [configuration overrides][crate::open::Options::config_overrides()]. /// Note that this is only valid if this key supports parameterized sub-sections, or else an error is returned. fn validated_assignment_with_subsection( &self, value: &BStr, subsection: &BStr, ) -> Result { self.validate(value)?; let mut key = self .full_name(Some(subsection)) .map_err(|message| validate_assignment::Error::Name { message })?; key.push(b'='); key.push_str(value); Ok(key) } } impl gix_config::AsKey for &dyn Key { fn as_key(&self) -> gix_config::KeyRef<'_> { self.try_as_key().expect("infallible") } fn try_as_key(&self) -> Option> { let section_name = self .section() .parent() .map_or_else(|| self.section().name(), Section::name); let subsection_name = if self.section().parent().is_some() { Some(self.section().name().into()) } else { None }; let value_name = self.name(); gix_config::KeyRef { section_name, subsection_name, value_name, } .into() } } gix-0.69.1/src/create.rs000064400000000000000000000216631046102023000131200ustar 00000000000000use std::{ fs::{self, OpenOptions}, io::Write, path::{Path, PathBuf}, }; use gix_config::parse::section; use gix_discover::DOT_GIT_DIR; /// The error used in [`into()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not obtain the current directory")] CurrentDir(#[from] std::io::Error), #[error("Could not open data at '{}'", .path.display())] IoOpen { source: std::io::Error, path: PathBuf }, #[error("Could not write data at '{}'", .path.display())] IoWrite { source: std::io::Error, path: PathBuf }, #[error("Refusing to initialize the existing '{}' directory", .path.display())] DirectoryExists { path: PathBuf }, #[error("Refusing to initialize the non-empty directory as '{}'", .path.display())] DirectoryNotEmpty { path: PathBuf }, #[error("Could not create directory at '{}'", .path.display())] CreateDirectory { source: std::io::Error, path: PathBuf }, } /// The kind of repository to create. #[derive(Debug, Copy, Clone)] pub enum Kind { /// An empty repository with a `.git` folder, setup to contain files in its worktree. WithWorktree, /// A bare repository without a worktree. Bare, } const TPL_INFO_EXCLUDE: &[u8] = include_bytes!("assets/init/info/exclude"); const TPL_HOOKS_APPLYPATCH_MSG: &[u8] = include_bytes!("assets/init/hooks/applypatch-msg.sample"); const TPL_HOOKS_COMMIT_MSG: &[u8] = include_bytes!("assets/init/hooks/commit-msg.sample"); const TPL_HOOKS_FSMONITOR_WATCHMAN: &[u8] = include_bytes!("assets/init/hooks/fsmonitor-watchman.sample"); const TPL_HOOKS_POST_UPDATE: &[u8] = include_bytes!("assets/init/hooks/post-update.sample"); const TPL_HOOKS_PRE_APPLYPATCH: &[u8] = include_bytes!("assets/init/hooks/pre-applypatch.sample"); const TPL_HOOKS_PRE_COMMIT: &[u8] = include_bytes!("assets/init/hooks/pre-commit.sample"); const TPL_HOOKS_PRE_MERGE_COMMIT: &[u8] = include_bytes!("assets/init/hooks/pre-merge-commit.sample"); const TPL_HOOKS_PRE_PUSH: &[u8] = include_bytes!("assets/init/hooks/pre-push.sample"); const TPL_HOOKS_PRE_REBASE: &[u8] = include_bytes!("assets/init/hooks/pre-rebase.sample"); const TPL_HOOKS_PREPARE_COMMIT_MSG: &[u8] = include_bytes!("assets/init/hooks/prepare-commit-msg.sample"); const TPL_HOOKS_DOCS_URL: &[u8] = include_bytes!("assets/init/hooks/docs.url"); const TPL_DESCRIPTION: &[u8] = include_bytes!("assets/init/description"); const TPL_HEAD: &[u8] = include_bytes!("assets/init/HEAD"); struct PathCursor<'a>(&'a mut PathBuf); struct NewDir<'a>(&'a mut PathBuf); impl PathCursor<'_> { fn at(&mut self, component: &str) -> &Path { self.0.push(component); self.0.as_path() } } impl NewDir<'_> { fn at(self, component: &str) -> Result { self.0.push(component); create_dir(self.0)?; Ok(self) } fn as_mut(&mut self) -> &mut PathBuf { self.0 } } impl Drop for NewDir<'_> { fn drop(&mut self) { self.0.pop(); } } impl Drop for PathCursor<'_> { fn drop(&mut self) { self.0.pop(); } } fn write_file(data: &[u8], path: &Path) -> Result<(), Error> { let mut file = OpenOptions::new() .write(true) .create(true) .truncate(true) .append(false) .open(path) .map_err(|e| Error::IoOpen { source: e, path: path.to_owned(), })?; file.write_all(data).map_err(|e| Error::IoWrite { source: e, path: path.to_owned(), }) } fn create_dir(p: &Path) -> Result<(), Error> { fs::create_dir_all(p).map_err(|e| Error::CreateDirectory { source: e, path: p.to_owned(), }) } /// Options for use in [`into()`]; #[derive(Copy, Clone, Default)] pub struct Options { /// If true, and the kind of repository to create has a worktree, then the destination directory must be empty. /// /// By default repos with worktree can be initialized into a non-empty repository as long as there is no `.git` directory. pub destination_must_be_empty: bool, /// If set, use these filesystem capabilities to populate the respective git-config fields. /// If `None`, the directory will be probed. pub fs_capabilities: Option, } /// Create a new `.git` repository of `kind` within the possibly non-existing `directory` /// and return its path. /// Note that this is a simple template-based initialization routine which should be accompanied with additional corrections /// to respect git configuration, which is accomplished by [its callers][crate::ThreadSafeRepository::init_opts()] /// that return a [Repository][crate::Repository]. pub fn into( directory: impl Into, kind: Kind, Options { fs_capabilities, destination_must_be_empty, }: Options, ) -> Result { let mut dot_git = directory.into(); let bare = matches!(kind, Kind::Bare); if bare || destination_must_be_empty { let num_entries_in_dot_git = fs::read_dir(&dot_git) .or_else(|err| { if err.kind() == std::io::ErrorKind::NotFound { fs::create_dir(&dot_git).and_then(|_| fs::read_dir(&dot_git)) } else { Err(err) } }) .map_err(|err| Error::IoOpen { source: err, path: dot_git.clone(), })? .count(); if num_entries_in_dot_git != 0 { return Err(Error::DirectoryNotEmpty { path: dot_git }); } } if !bare { dot_git.push(DOT_GIT_DIR); if dot_git.is_dir() { return Err(Error::DirectoryExists { path: dot_git }); } }; create_dir(&dot_git)?; { let mut cursor = NewDir(&mut dot_git).at("info")?; write_file(TPL_INFO_EXCLUDE, PathCursor(cursor.as_mut()).at("exclude"))?; } { let mut cursor = NewDir(&mut dot_git).at("hooks")?; for (tpl, filename) in &[ (TPL_HOOKS_DOCS_URL, "docs.url"), (TPL_HOOKS_PREPARE_COMMIT_MSG, "prepare-commit-msg.sample"), (TPL_HOOKS_PRE_REBASE, "pre-rebase.sample"), (TPL_HOOKS_PRE_PUSH, "pre-push.sample"), (TPL_HOOKS_PRE_COMMIT, "pre-commit.sample"), (TPL_HOOKS_PRE_MERGE_COMMIT, "pre-merge-commit.sample"), (TPL_HOOKS_PRE_APPLYPATCH, "pre-applypatch.sample"), (TPL_HOOKS_POST_UPDATE, "post-update.sample"), (TPL_HOOKS_FSMONITOR_WATCHMAN, "fsmonitor-watchman.sample"), (TPL_HOOKS_COMMIT_MSG, "commit-msg.sample"), (TPL_HOOKS_APPLYPATCH_MSG, "applypatch-msg.sample"), ] { write_file(tpl, PathCursor(cursor.as_mut()).at(filename))?; } } { let mut cursor = NewDir(&mut dot_git).at("objects")?; create_dir(PathCursor(cursor.as_mut()).at("info"))?; create_dir(PathCursor(cursor.as_mut()).at("pack"))?; } { let mut cursor = NewDir(&mut dot_git).at("refs")?; create_dir(PathCursor(cursor.as_mut()).at("heads"))?; create_dir(PathCursor(cursor.as_mut()).at("tags"))?; } for (tpl, filename) in &[(TPL_HEAD, "HEAD"), (TPL_DESCRIPTION, "description")] { write_file(tpl, PathCursor(&mut dot_git).at(filename))?; } let caps = { let mut config = gix_config::File::default(); let caps = { let caps = fs_capabilities.unwrap_or_else(|| gix_fs::Capabilities::probe(&dot_git)); let mut core = config.new_section("core", None).expect("valid section name"); core.push(key("repositoryformatversion"), Some("0".into())); core.push(key("filemode"), Some(bool(caps.executable_bit).into())); core.push(key("bare"), Some(bool(bare).into())); core.push(key("logallrefupdates"), Some(bool(!bare).into())); core.push(key("symlinks"), Some(bool(caps.symlink).into())); core.push(key("ignorecase"), Some(bool(caps.ignore_case).into())); core.push(key("precomposeunicode"), Some(bool(caps.precompose_unicode).into())); caps }; let mut cursor = PathCursor(&mut dot_git); let config_path = cursor.at("config"); std::fs::write(config_path, config.to_bstring()).map_err(|err| Error::IoWrite { source: err, path: config_path.to_owned(), })?; caps }; Ok(gix_discover::repository::Path::from_dot_git_dir( dot_git, if bare { gix_discover::repository::Kind::PossiblyBare } else { gix_discover::repository::Kind::WorkTree { linked_git_dir: None } }, &gix_fs::current_dir(caps.precompose_unicode)?, ) .expect("by now the `dot_git` dir is valid as we have accessed it")) } fn key(name: &'static str) -> section::ValueName<'static> { section::ValueName::try_from(name).expect("valid key name") } fn bool(v: bool) -> &'static str { match v { true => "true", false => "false", } } gix-0.69.1/src/diff.rs000064400000000000000000000220221046102023000125530ustar 00000000000000use gix_diff::tree::recorder::Location; pub use gix_diff::*; /// pub mod options { /// pub mod init { /// The error returned when instantiating [diff options](crate::diff::Options). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[cfg(feature = "blob-diff")] #[error(transparent)] RewritesConfiguration(#[from] crate::diff::new_rewrites::Error), } } } /// General diff-related options for configuring rename-tracking and blob diffs. #[derive(Debug, Copy, Clone)] pub struct Options { location: Option, #[cfg(feature = "blob-diff")] rewrites: Option, } impl Default for Options { fn default() -> Self { Options { location: Some(Location::Path), #[cfg(feature = "blob-diff")] rewrites: None, } } } #[cfg(feature = "blob-diff")] impl From for gix_diff::tree_with_rewrites::Options { fn from(opts: Options) -> Self { gix_diff::tree_with_rewrites::Options { location: opts.location, #[cfg(feature = "blob-diff")] rewrites: opts.rewrites, } } } /// Lifecycle impl Options { #[cfg(feature = "blob-diff")] pub(crate) fn from_configuration(config: &crate::config::Cache) -> Result { Ok(Options { location: Some(Location::Path), rewrites: { let (rewrites, is_configured) = config.diff_renames()?; if is_configured { rewrites } else { Some(Default::default()) } }, }) } } /// Setters impl Options { /// Do not keep track of filepaths at all, which will leave all `location` fields empty. pub fn no_locations(&mut self) -> &mut Self { self.location = Some(Location::FileName); self } /// Keep track of file-names, which makes `location` fields usable with the filename of the changed item. pub fn track_filename(&mut self) -> &mut Self { self.location = Some(Location::FileName); self } /// Keep track of the entire path of a change, relative to the repository. (default). /// /// This makes the `location` field fully usable. pub fn track_path(&mut self) -> &mut Self { self.location = Some(Location::Path); self } /// Provide `None` to disable rewrite tracking entirely, or pass `Some()` to control to /// what extent rename and copy tracking is performed. /// /// Note that by default, the git configuration determines rewrite tracking and git defaults are used /// if nothing is configured, which turns rename tracking with 50% similarity on, while not tracking copies at all. #[cfg(feature = "blob-diff")] pub fn track_rewrites(&mut self, renames: Option) -> &mut Self { self.rewrites = renames; self } } /// Builder impl Options { /// Provide `None` to disable rewrite tracking entirely, or pass `Some()` to control to /// what extent rename and copy tracking is performed. /// /// Note that by default, the git configuration determines rewrite tracking and git defaults are used /// if nothing is configured, which turns rename tracking with 50% similarity on, while not tracking copies at all. #[cfg(feature = "blob-diff")] pub fn with_rewrites(mut self, renames: Option) -> Self { self.rewrites = renames; self } } /// pub mod rename { /// Determine how to do rename tracking. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Tracking { /// Do not track renames at all, the fastest option. Disabled, /// Track renames. Renames, /// Track renames and copies. /// /// This is the most expensive option. RenamesAndCopies, } } /// #[cfg(feature = "blob-diff")] pub(crate) mod utils { use gix_diff::{rewrites::Copies, Rewrites}; use crate::{ config::{cache::util::ApplyLeniency, tree::Diff}, diff::rename::Tracking, Repository, }; /// pub mod new_rewrites { /// The error returned by [`new_rewrites()`](super::new_rewrites()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigDiffRenames(#[from] crate::config::key::GenericError), #[error(transparent)] ConfigDiffRenameLimit(#[from] crate::config::unsigned_integer::Error), } } /// pub mod resource_cache { /// The error returned by [`resource_cache()`](super::resource_cache()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] DiffAlgorithm(#[from] crate::config::diff::algorithm::Error), #[error(transparent)] WorktreeFilterOptions(#[from] crate::filter::pipeline::options::Error), #[error(transparent)] DiffDrivers(#[from] crate::config::diff::drivers::Error), #[error(transparent)] DiffPipelineOptions(#[from] crate::config::diff::pipeline_options::Error), #[error(transparent)] CommandContext(#[from] crate::config::command_context::Error), } } /// Create an instance by reading all relevant information from the `config`uration, while being `lenient` or not. /// Returns `Ok((None, false))` if nothing is configured, or `Ok((None, true))` if it's configured and disabled. /// /// Note that missing values will be defaulted similar to what git does. #[allow(clippy::result_large_err)] pub fn new_rewrites( config: &gix_config::File<'static>, lenient: bool, ) -> Result<(Option, bool), new_rewrites::Error> { new_rewrites_inner(config, lenient, &Diff::RENAMES, &Diff::RENAME_LIMIT) } pub(crate) fn new_rewrites_inner( config: &gix_config::File<'static>, lenient: bool, renames: &'static crate::config::tree::diff::Renames, rename_limit: &'static crate::config::tree::keys::UnsignedInteger, ) -> Result<(Option, bool), new_rewrites::Error> { let copies = match config .boolean(renames) .map(|value| renames.try_into_renames(value)) .transpose() .with_leniency(lenient)? { Some(renames) => match renames { Tracking::Disabled => return Ok((None, true)), Tracking::Renames => None, Tracking::RenamesAndCopies => Some(Copies::default()), }, None => return Ok((None, false)), }; let default = Rewrites::default(); Ok(( Rewrites { copies, limit: config .integer(rename_limit) .map(|value| rename_limit.try_into_usize(value)) .transpose() .with_leniency(lenient)? .unwrap_or(default.limit), ..default } .into(), true, )) } /// Return a low-level utility to efficiently prepare a blob-level diff operation between two resources, /// and cache these diffable versions so that matrix-like MxN diffs are efficient. /// /// `repo` is used to obtain the needed configuration values. /// `mode` determines how the diffable files will look like, and also how fast, in average, these conversions are. /// `attr_stack` is for accessing `.gitattributes` for knowing how to apply filters. Know that it's typically adjusted based on the /// `roots` - if there are no worktree roots, `.gitattributes` are also not usually read from worktrees. /// `roots` provide information about where to get diffable data from, so source and destination can either be sourced from /// a worktree, or from the object database, or both. pub fn resource_cache( repo: &Repository, mode: gix_diff::blob::pipeline::Mode, attr_stack: gix_worktree::Stack, roots: gix_diff::blob::pipeline::WorktreeRoots, ) -> Result { let diff_algo = repo.config.diff_algorithm()?; let diff_cache = gix_diff::blob::Platform::new( gix_diff::blob::platform::Options { algorithm: Some(diff_algo), skip_internal_diff_if_external_is_configured: false, }, gix_diff::blob::Pipeline::new( roots, gix_filter::Pipeline::new(repo.command_context()?, crate::filter::Pipeline::options(repo)?), repo.config.diff_drivers()?, repo.config.diff_pipeline_options()?, ), mode, attr_stack, ); Ok(diff_cache) } } #[cfg(feature = "blob-diff")] pub use utils::{new_rewrites, resource_cache}; gix-0.69.1/src/dirwalk/iter.rs000064400000000000000000000151121046102023000142450ustar 00000000000000use super::Iter; use crate::bstr::BString; use crate::util::OwnedOrStaticAtomicBool; use crate::worktree::IndexPersistedOrInMemory; use crate::{dirwalk, PathspecDetached, Repository}; use std::path::PathBuf; /// An entry of the directory walk as returned by the [iterator](Iter). pub struct Item { /// The directory entry. pub entry: gix_dir::Entry, /// `collapsed_directory_status` is `Some(dir_status)` if this entry was part of a directory with the given /// `dir_status` that wasn't the same as the one of `entry` and if [gix_dir::walk::Options::emit_collapsed] was /// [gix_dir::walk::CollapsedEntriesEmissionMode::OnStatusMismatch]. It will also be `Some(dir_status)` if that option /// was [gix_dir::walk::CollapsedEntriesEmissionMode::All]. pub collapsed_directory_status: Option, } impl Item { fn new(entry: gix_dir::EntryRef<'_>, collapsed_directory_status: Option) -> Self { Item { entry: entry.to_owned(), collapsed_directory_status, } } } /// The outcome of fully consumed [dirwalk iterator](Iter). pub struct Outcome { /// The index originally passed in to create the iterator. pub index: IndexPersistedOrInMemory, /// The excludes stack used for the dirwalk, for access of `.gitignore` information. pub excludes: gix_worktree::Stack, /// The pathspecs used to guide the operation, pub pathspec: PathspecDetached, /// The root actually being used for the traversal, and useful to transform the paths returned for the user. /// It's always within the [`work-dir`](Repository::work_dir). pub traversal_root: PathBuf, /// The actual result of the dirwalk. pub dirwalk: gix_dir::walk::Outcome, } /// The error returned by [Repository::dirwalk_iter()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Failed to spawn producer thread")] #[cfg(feature = "parallel")] SpawnThread(#[from] std::io::Error), #[error(transparent)] #[cfg(not(feature = "parallel"))] Dirwalk(#[from] dirwalk::Error), #[error(transparent)] #[cfg(not(feature = "parallel"))] DetachPathSpec(#[from] std::io::Error), } /// Lifecycle impl Iter { pub(crate) fn new( repo: &Repository, index: IndexPersistedOrInMemory, patterns: Vec, should_interrupt: OwnedOrStaticAtomicBool, options: dirwalk::Options, ) -> Result { #[cfg(feature = "parallel")] { let repo = repo.clone().into_sync(); let (tx, rx) = std::sync::mpsc::channel(); let handle = std::thread::Builder::new() .name("gix::dirwalk::iter::producer".into()) .spawn({ let should_interrupt = should_interrupt.clone(); move || -> Result { let repo: Repository = repo.into(); let mut collect = Collect { tx }; let out = repo.dirwalk(&index, patterns, &should_interrupt, options, &mut collect)?; Ok(Outcome { index, excludes: out.excludes.detach(), pathspec: out.pathspec.detach().map_err(|err| { dirwalk::Error::Walk(gix_dir::walk::Error::ReadDir { path: repo.git_dir().to_owned(), source: err, }) })?, traversal_root: out.traversal_root, dirwalk: out.dirwalk, }) } })?; Ok(Iter { rx_and_join: Some((rx, handle)), should_interrupt, out: None, }) } #[cfg(not(feature = "parallel"))] { let mut collect = Collect { items: Vec::new() }; let out = repo.dirwalk(&index, patterns, &should_interrupt, options, &mut collect)?; let out = Outcome { index, excludes: out.excludes.detach(), pathspec: out.pathspec.detach()?, traversal_root: out.traversal_root, dirwalk: out.dirwalk, }; Ok(Iter { items: collect.items.into_iter(), out: Some(out), }) } } } /// Access impl Iter { /// Return the outcome of the iteration, or `None` if the iterator isn't fully consumed. pub fn outcome_mut(&mut self) -> Option<&mut Outcome> { self.out.as_mut() } /// Turn the iterator into the iteration outcome, which is `None` on error or if the iteration /// isn't complete. pub fn into_outcome(mut self) -> Option { self.out.take() } } impl Iterator for Iter { type Item = Result; fn next(&mut self) -> Option { #[cfg(feature = "parallel")] { let (rx, _join) = self.rx_and_join.as_ref()?; match rx.recv().ok() { Some(item) => Some(Ok(item)), None => { let (_rx, handle) = self.rx_and_join.take()?; match handle.join().expect("no panic") { Ok(out) => { self.out = Some(out); None } Err(err) => Some(Err(err)), } } } } #[cfg(not(feature = "parallel"))] self.items.next().map(Ok) } } #[cfg(feature = "parallel")] impl Drop for Iter { fn drop(&mut self) { crate::util::parallel_iter_drop(self.rx_and_join.take(), &self.should_interrupt); } } struct Collect { #[cfg(feature = "parallel")] tx: std::sync::mpsc::Sender, #[cfg(not(feature = "parallel"))] items: Vec, } impl gix_dir::walk::Delegate for Collect { fn emit( &mut self, entry: gix_dir::EntryRef<'_>, collapsed_directory_status: Option, ) -> gix_dir::walk::Action { // NOTE: we assume that the receiver triggers interruption so the operation will stop if the receiver is down. let item = Item::new(entry, collapsed_directory_status); #[cfg(feature = "parallel")] self.tx.send(item).ok(); #[cfg(not(feature = "parallel"))] self.items.push(item); gix_dir::walk::Action::Continue } } gix-0.69.1/src/dirwalk/mod.rs000064400000000000000000000071611046102023000140660ustar 00000000000000use gix_dir::walk::{CollapsedEntriesEmissionMode, EmissionMode, ForDeletionMode}; use crate::{config, AttributeStack, Pathspec}; use std::path::PathBuf; mod options; /// pub mod iter; /// An iterator for entries in a directory walk. /// /// ### Parallel Operation /// /// Note that without the `parallel` feature, the iterator becomes 'serial', which means that all entries will be traversed /// in advance and it cannot be interrupted unless the interrupt flag is set from another thread. /// /// It's a crutch that is just there to make single-threaded applications possible at all, as it's not really an iterator /// anymore. If this matters, better run [Repository::dirwalk()](crate::Repository::dirwalk) by hand as it provides all /// control one would need, just not as an iterator. /// /// Also, even with `parallel` set, the first call to `next()` will block until there is an item available, without a chance /// to interrupt unless the interrupt flag is set from another thread. pub struct Iter { #[cfg(feature = "parallel")] #[allow(clippy::type_complexity)] rx_and_join: Option<( std::sync::mpsc::Receiver, std::thread::JoinHandle>, )>, #[cfg(feature = "parallel")] should_interrupt: crate::util::OwnedOrStaticAtomicBool, /// Without parallelization, the iterator has to buffer all changes in advance. #[cfg(not(feature = "parallel"))] items: std::vec::IntoIter, /// The outcome of the operation, only available once the operation has ended. out: Option, } /// The error returned by [dirwalk()](crate::Repository::dirwalk()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Walk(#[from] gix_dir::walk::Error), #[error("A working tree is required to perform a directory walk")] MissingWorkDir, #[error(transparent)] Excludes(#[from] config::exclude_stack::Error), #[error(transparent)] Pathspec(#[from] crate::pathspec::init::Error), #[error(transparent)] Prefix(#[from] gix_path::realpath::Error), #[error(transparent)] FilesystemOptions(#[from] config::boolean::Error), #[error("Could not list worktrees to assure they are no candidates for deletion")] ListWorktrees(#[from] std::io::Error), } /// The outcome of the [dirwalk()](crate::Repository::dirwalk). pub struct Outcome<'repo> { /// The excludes stack used for the dirwalk, for access of `.gitignore` information. pub excludes: AttributeStack<'repo>, /// The pathspecs used to guide the operation, pub pathspec: Pathspec<'repo>, /// The root actually being used for the traversal, and useful to transform the paths returned for the user. /// It's always within the [`work-dir`](crate::Repository::work_dir). pub traversal_root: PathBuf, /// The actual result of the dirwalk. pub dirwalk: gix_dir::walk::Outcome, } /// Options for use in the [`Repository::dirwalk()`](crate::Repository::dirwalk()) function. /// /// Note that all values start out disabled. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] pub struct Options { precompose_unicode: bool, ignore_case: bool, recurse_repositories: bool, emit_pruned: bool, emit_ignored: Option, for_deletion: Option, emit_tracked: bool, emit_untracked: EmissionMode, emit_empty_directories: bool, classify_untracked_bare_repositories: bool, emit_collapsed: Option, symlinks_to_directories_are_ignored_like_directories: bool, pub(crate) empty_patterns_match_prefix: bool, } gix-0.69.1/src/dirwalk/options.rs000064400000000000000000000216711046102023000150040ustar 00000000000000use crate::dirwalk::Options; use gix_dir::walk::{CollapsedEntriesEmissionMode, EmissionMode, ForDeletionMode}; /// Construction impl Options { pub(crate) fn from_fs_caps(caps: gix_fs::Capabilities) -> Self { Self { precompose_unicode: caps.precompose_unicode, ignore_case: caps.ignore_case, recurse_repositories: false, emit_pruned: false, emit_ignored: None, for_deletion: None, emit_tracked: false, emit_untracked: Default::default(), emit_empty_directories: false, classify_untracked_bare_repositories: false, emit_collapsed: None, empty_patterns_match_prefix: false, symlinks_to_directories_are_ignored_like_directories: false, } } } impl From for gix_dir::walk::Options<'static> { fn from(v: Options) -> Self { gix_dir::walk::Options { precompose_unicode: v.precompose_unicode, ignore_case: v.ignore_case, recurse_repositories: v.recurse_repositories, emit_pruned: v.emit_pruned, emit_ignored: v.emit_ignored, for_deletion: v.for_deletion, emit_tracked: v.emit_tracked, emit_untracked: v.emit_untracked, emit_empty_directories: v.emit_empty_directories, classify_untracked_bare_repositories: v.classify_untracked_bare_repositories, emit_collapsed: v.emit_collapsed, symlinks_to_directories_are_ignored_like_directories: v .symlinks_to_directories_are_ignored_like_directories, worktree_relative_worktree_dirs: None, } } } impl Options { /// If `true`, default `false`, pathspecs and the directory walk itself will be setup to use the [prefix](crate::Repository::prefix) /// if patterns are empty. /// /// This means that the directory walk will be limited to only what's inside the [repository prefix](crate::Repository::prefix). /// By default, the directory walk will see everything. pub fn empty_patterns_match_prefix(mut self, toggle: bool) -> Self { self.empty_patterns_match_prefix = toggle; self } /// Like [`empty_patterns_match_prefix()`](Self::empty_patterns_match_prefix), but only requires a mutably borrowed instance. pub fn set_empty_patterns_match_prefix(&mut self, toggle: bool) -> &mut Self { self.empty_patterns_match_prefix = toggle; self } /// If `toggle` is `true`, we will stop figuring out if any directory that is a candidate for recursion is also a nested repository, /// which saves time but leads to recurse into it. If `false`, nested repositories will not be traversed. pub fn recurse_repositories(mut self, toggle: bool) -> Self { self.recurse_repositories = toggle; self } /// Like [`recurse_repositories()`](Self::recurse_repositories), but only requires a mutably borrowed instance. pub fn set_recurse_repositories(&mut self, toggle: bool) -> &mut Self { self.recurse_repositories = toggle; self } /// If `toggle` is `true`, entries that are pruned and whose [Kind](gix_dir::entry::Kind) is known will be emitted. pub fn emit_pruned(mut self, toggle: bool) -> Self { self.emit_pruned = toggle; self } /// Like [`emit_pruned()`](Self::emit_pruned), but only requires a mutably borrowed instance. pub fn set_emit_pruned(&mut self, toggle: bool) -> &mut Self { self.emit_pruned = toggle; self } /// If `value` is `Some(mode)`, entries that are ignored will be emitted according to the given `mode`. /// If `None`, ignored entries will not be emitted at all. pub fn emit_ignored(mut self, value: Option) -> Self { self.emit_ignored = value; self } /// Like [`emit_ignored()`](Self::emit_ignored), but only requires a mutably borrowed instance. pub fn set_emit_ignored(&mut self, value: Option) -> &mut Self { self.emit_ignored = value; self } /// When the walk is for deletion, `value` must be `Some(_)` to assure we don't collapse directories that have precious files in /// them, and otherwise assure that no entries are observable that shouldn't be deleted. /// If `None`, precious files are treated like expendable files, which is usually what you want when displaying them /// for addition to the repository, and the collapse of folders can be more generous in relation to ignored files. pub fn for_deletion(mut self, value: Option) -> Self { self.for_deletion = value; self } /// Like [`for_deletion()`](Self::for_deletion), but only requires a mutably borrowed instance. pub fn set_for_deletion(&mut self, value: Option) -> &mut Self { self.for_deletion = value; self } /// If `toggle` is `true`, we will also emit entries for tracked items. Otherwise these will remain 'hidden', /// even if a pathspec directly refers to it. pub fn emit_tracked(mut self, toggle: bool) -> Self { self.emit_tracked = toggle; self } /// Like [`emit_tracked()`](Self::emit_tracked), but only requires a mutably borrowed instance. pub fn set_emit_tracked(&mut self, toggle: bool) -> &mut Self { self.emit_tracked = toggle; self } /// Controls the way untracked files are emitted. By default, this is happening immediately and without any simplification. pub fn emit_untracked(mut self, mode: EmissionMode) -> Self { self.emit_untracked = mode; self } /// Like [`emit_untracked()`](Self::emit_untracked), but only requires a mutably borrowed instance. pub fn set_emit_untracked(&mut self, mode: EmissionMode) -> &mut Self { self.emit_untracked = mode; self } /// If `toggle` is `true`, emit empty directories as well. Note that a directory also counts as empty if it has any /// amount or depth of nested subdirectories, as long as none of them includes a file. /// Thus, this makes leaf-level empty directories visible, as those don't have any content. pub fn emit_empty_directories(mut self, toggle: bool) -> Self { self.emit_empty_directories = toggle; self } /// Like [`emit_empty_directories()`](Self::emit_empty_directories), but only requires a mutably borrowed instance. pub fn set_emit_empty_directories(&mut self, toggle: bool) -> &mut Self { self.emit_empty_directories = toggle; self } /// If `toggle` is `true`, we will not only find non-bare repositories in untracked directories, but also bare ones. /// /// Note that this is very costly, but without it, bare repositories will appear like untracked directories when collapsed, /// and they will be recursed into. pub fn classify_untracked_bare_repositories(mut self, toggle: bool) -> Self { self.classify_untracked_bare_repositories = toggle; self } /// Like [`classify_untracked_bare_repositories()`](Self::classify_untracked_bare_repositories), but only requires a mutably borrowed instance. pub fn set_classify_untracked_bare_repositories(&mut self, toggle: bool) -> &mut Self { self.classify_untracked_bare_repositories = toggle; self } /// Control whether entries that are in an about-to-be collapsed directory will be emitted. The default is `None`, /// so entries in a collapsed directory are not observable. pub fn emit_collapsed(mut self, value: Option) -> Self { self.emit_collapsed = value; self } /// Like [`emit_collapsed()`](Self::emit_collapsed), but only requires a mutably borrowed instance. pub fn set_emit_collapsed(&mut self, value: Option) -> &mut Self { self.emit_collapsed = value; self } /// This is a `libgit2` compatibility flag, and if enabled, symlinks that point to directories will be considered a directory /// when checking for exclusion. /// /// This is relevant if `src2` points to `src`, and is excluded with `src2/`. If `false`, `src2` will not be excluded, /// if `true` it will be excluded as the symlink is considered a directory. /// /// In other words, for Git compatibility this flag should be `false`, the default, for `git2` compatibility it should be `true`. pub fn symlinks_to_directories_are_ignored_like_directories(mut self, toggle: bool) -> Self { self.symlinks_to_directories_are_ignored_like_directories = toggle; self } /// Like [`symlinks_to_directories_are_ignored_like_directories()`](Self::symlinks_to_directories_are_ignored_like_directories), /// but only requires a mutably borrowed instance. pub fn set_symlinks_to_directories_are_ignored_like_directories(&mut self, toggle: bool) -> &mut Self { self.symlinks_to_directories_are_ignored_like_directories = toggle; self } } gix-0.69.1/src/discover.rs000064400000000000000000000111701046102023000134630ustar 00000000000000#![allow(clippy::result_large_err)] use std::path::Path; pub use gix_discover::*; use crate::{bstr::BString, ThreadSafeRepository}; /// The error returned by [`crate::discover()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Discover(#[from] upwards::Error), #[error(transparent)] Open(#[from] crate::open::Error), } impl ThreadSafeRepository { /// Try to open a git repository in `directory` and search upwards through its parents until one is found, /// using default trust options which matters in case the found repository isn't owned by the current user. pub fn discover(directory: impl AsRef) -> Result { Self::discover_opts(directory, Default::default(), Default::default()) } /// Try to open a git repository in `directory` and search upwards through its parents until one is found, /// while applying `options`. Then use the `trust_map` to determine which of our own repository options to use /// for instantiations. /// /// Note that [trust overrides](crate::open::Options::with()) in the `trust_map` are not effective here and we will /// always override it with the determined trust value. This is a precaution as the API user is unable to actually know /// if the directory that is discovered can indeed be trusted (or else they'd have to implement the discovery themselves /// and be sure that no attacker ever gets access to a directory structure. The cost of this is a permission check, which /// seems acceptable). pub fn discover_opts( directory: impl AsRef, options: upwards::Options<'_>, trust_map: gix_sec::trust::Mapping, ) -> Result { let _span = gix_trace::coarse!("ThreadSafeRepository::discover()"); let (path, trust) = upwards_opts(directory.as_ref(), options)?; let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories(); let mut options = trust_map.into_value_by_level(trust); options.git_dir_trust = trust.into(); // Note that we will adjust the `current_dir` later so it matches the value of `core.precomposeUnicode`. options.current_dir = Some(gix_fs::current_dir(false).map_err(upwards::Error::CurrentDir)?); Self::open_from_paths(git_dir, worktree_dir, options).map_err(Into::into) } /// Try to open a git repository directly from the environment. /// If that fails, discover upwards from `directory` until one is found, /// while applying discovery options from the environment. pub fn discover_with_environment_overrides(directory: impl AsRef) -> Result { Self::discover_with_environment_overrides_opts(directory, Default::default(), Default::default()) } /// Try to open a git repository directly from the environment, which reads `GIT_DIR` /// if it is set. If unset, discover upwards from `directory` until one is found, /// while applying `options` with overrides from the environment which includes: /// /// - `GIT_DISCOVERY_ACROSS_FILESYSTEM` /// - `GIT_CEILING_DIRECTORIES` /// /// Finally, use the `trust_map` to determine which of our own repository options to use /// based on the trust level of the effective repository directory. /// /// ### Note /// /// Consider to set [`match_ceiling_dir_or_error = false`](gix_discover::upwards::Options::match_ceiling_dir_or_error) /// to allow discovery if an outside environment variable sets non-matching ceiling directories for greater /// compatibility with Git. pub fn discover_with_environment_overrides_opts( directory: impl AsRef, mut options: upwards::Options<'_>, trust_map: gix_sec::trust::Mapping, ) -> Result { fn apply_additional_environment(mut opts: upwards::Options<'_>) -> upwards::Options<'_> { use crate::bstr::ByteVec; if let Some(cross_fs) = std::env::var_os("GIT_DISCOVERY_ACROSS_FILESYSTEM") .and_then(|v| Vec::from_os_string(v).ok().map(BString::from)) { if let Ok(b) = gix_config::Boolean::try_from(cross_fs.as_ref()) { opts.cross_fs = b.into(); } } opts } if std::env::var_os("GIT_DIR").is_some() { return Self::open_with_environment_overrides(directory.as_ref(), trust_map).map_err(Error::Open); } options = apply_additional_environment(options.apply_environment()); Self::discover_opts(directory, options, trust_map) } } gix-0.69.1/src/env.rs000064400000000000000000000143601046102023000124410ustar 00000000000000//! Utilities to handle program arguments and other values of interest. use std::ffi::{OsStr, OsString}; use crate::bstr::{BString, ByteVec}; /// Returns the name of the agent for identification towards a remote server as statically known when compiling the crate. /// Suitable for both `git` servers and HTTP servers, and used unless configured otherwise. /// /// Note that it's meant to be used in conjunction with [`protocol::agent()`][crate::protocol::agent()] which /// prepends `git/`. pub fn agent() -> &'static str { concat!("oxide-", env!("CARGO_PKG_VERSION")) } /// Equivalent to `std::env::args_os()`, but with precomposed unicode on MacOS and other apple platforms. /// It does not change the input arguments on any other platform. /// /// Note that this ignores `core.precomposeUnicode` as git-config isn't available yet. It's default enabled in modern git though, /// and generally decomposed unicode is nothing one would want in a git repository. pub fn args_os() -> impl Iterator { args_os_opt(cfg!(target_vendor = "apple")) } /// Like [`args_os()`], but with the `precompose_unicode` parameter akin to `core.precomposeUnicode` in the Git configuration. pub fn args_os_opt(precompose_unicode: bool) -> impl Iterator { std::env::args_os().map(move |arg| { if precompose_unicode { gix_utils::str::precompose_os_string(arg.into()).into_owned() } else { arg } }) } /// Convert the given `input` into a `BString`, useful for usage in `clap`. pub fn os_str_to_bstring(input: &OsStr) -> Option { Vec::from_os_string(input.into()).map(Into::into).ok() } /// Utilities to collate errors of common operations into one error type. /// /// This is useful as this type can present an API to answer common questions, like whether a network request seems to have failed /// spuriously or if the underlying repository seems to be corrupted. /// Error collation supports all operations, including opening the repository. /// /// ### Usage /// /// The caller may define a function that specifies the result type as `Result` to collect /// errors into a well-known error type which provides an API for simple queries. pub mod collate { /// pub mod fetch { /// An error which combines all possible errors when opening a repository, finding remotes and using them to fetch. /// /// It can be used to detect if the repository is likely be corrupted in some way, or if the fetch failed spuriously /// and thus can be retried. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Open(#[from] crate::open::Error), #[error(transparent)] FindExistingReference(#[from] crate::reference::find::existing::Error), #[error(transparent)] RemoteInit(#[from] crate::remote::init::Error), #[error(transparent)] FindExistingRemote(#[from] crate::remote::find::existing::Error), #[error(transparent)] #[cfg(feature = "credentials")] CredentialHelperConfig(#[from] crate::config::credential_helpers::Error), #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] #[error(transparent)] Connect(#[from] crate::remote::connect::Error), #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] #[error(transparent)] PrepareFetch(#[from] crate::remote::fetch::prepare::Error), #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] #[error(transparent)] Fetch(#[from] crate::remote::fetch::Error), #[error(transparent)] Other(E), } #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] impl crate::protocol::transport::IsSpuriousError for Error where E: std::error::Error + Send + Sync + 'static, { fn is_spurious(&self) -> bool { match self { Error::Open(_) | Error::CredentialHelperConfig(_) | Error::RemoteInit(_) | Error::FindExistingReference(_) | Error::FindExistingRemote(_) | Error::Other(_) => false, Error::Connect(err) => err.is_spurious(), Error::PrepareFetch(err) => err.is_spurious(), Error::Fetch(err) => err.is_spurious(), } } } /// Queries impl Error where E: std::error::Error + Send + Sync + 'static, { /// Return true if repository corruption caused the failure. pub fn is_corrupted(&self) -> bool { match self { Error::Open(crate::open::Error::NotARepository { .. } | crate::open::Error::Config(_)) => true, #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] Error::PrepareFetch(crate::remote::fetch::prepare::Error::RefMap( // Configuration couldn't be accessed or was incomplete. crate::remote::ref_map::Error::GatherTransportConfig { .. } | crate::remote::ref_map::Error::ConfigureCredentials(_), )) => true, // Maybe the value of the configuration was corrupted, or a file couldn't be removed. #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] Error::Fetch( crate::remote::fetch::Error::PackThreads(_) | crate::remote::fetch::Error::PackIndexVersion(_) | crate::remote::fetch::Error::RemovePackKeepFile { .. } | crate::remote::fetch::Error::Fetch(gix_protocol::fetch::Error::Negotiate(_)), ) => true, _ => false, } } } } } gix-0.69.1/src/ext/mod.rs000064400000000000000000000005131046102023000132230ustar 00000000000000pub use object_id::ObjectIdExt; pub use reference::ReferenceExt; #[cfg(feature = "revision")] pub use rev_spec::RevSpecExt; #[cfg(feature = "blob-diff")] pub use tree::TreeDiffChangeExt; pub use tree::{TreeEntryExt, TreeEntryRefExt, TreeIterExt}; mod object_id; mod reference; #[cfg(feature = "revision")] mod rev_spec; mod tree; gix-0.69.1/src/ext/object_id.rs000064400000000000000000000016231046102023000143710ustar 00000000000000use gix_hash::ObjectId; pub trait Sealed {} pub type AncestorsIter = gix_traverse::commit::Simple bool>; /// An extension trait to add functionality to [`ObjectId`]s. pub trait ObjectIdExt: Sealed { /// Create an iterator over the ancestry of the commits reachable from this id, which must be a commit. fn ancestors(self, find: Find) -> AncestorsIter where Find: gix_object::Find; /// Infuse this object id `repo` access. fn attach(self, repo: &crate::Repository) -> crate::Id<'_>; } impl Sealed for ObjectId {} impl ObjectIdExt for ObjectId { fn ancestors(self, find: Find) -> AncestorsIter where Find: gix_object::Find, { gix_traverse::commit::Simple::new(Some(self), find) } fn attach(self, repo: &crate::Repository) -> crate::Id<'_> { crate::Id::from_id(self, repo) } } gix-0.69.1/src/ext/reference.rs000064400000000000000000000006671046102023000144140ustar 00000000000000/// Extensions for [references][gix_ref::Reference]. pub trait ReferenceExt { /// Attach [`Repository`][crate::Repository] to the given reference. It can be detached later with [`detach()]`. fn attach(self, repo: &crate::Repository) -> crate::Reference<'_>; } impl ReferenceExt for gix_ref::Reference { fn attach(self, repo: &crate::Repository) -> crate::Reference<'_> { crate::Reference::from_ref(self, repo) } } gix-0.69.1/src/ext/rev_spec.rs000064400000000000000000000010261046102023000142520ustar 00000000000000/// Extensions for [revision specifications][gix_revision::Spec]. pub trait RevSpecExt { /// Attach [`Repository`][crate::Repository] to the given rev-spec. fn attach(self, repo: &crate::Repository) -> crate::revision::Spec<'_>; } impl RevSpecExt for gix_revision::Spec { fn attach(self, repo: &crate::Repository) -> crate::revision::Spec<'_> { crate::revision::Spec { inner: self, path: None, first_ref: None, second_ref: None, repo, } } } gix-0.69.1/src/ext/tree.rs000064400000000000000000000054221046102023000134070ustar 00000000000000use std::borrow::BorrowMut; use gix_object::TreeRefIter; use gix_traverse::tree::breadthfirst; pub trait Sealed {} /// An extension trait for tree iterators pub trait TreeIterExt: Sealed { /// Traverse this tree with `state` being provided to potentially reuse allocations, and `find` being a function to lookup trees /// and turn them into iterators. /// /// The `delegate` implements a way to store details about the traversal to allow paying only for what's actually used. /// Since it is expected to store the operation result, _unit_ is returned. fn traverse( &self, state: StateMut, objects: Find, delegate: &mut V, ) -> Result<(), breadthfirst::Error> where Find: gix_object::Find, StateMut: BorrowMut, V: gix_traverse::tree::Visit; } impl Sealed for TreeRefIter<'_> {} impl TreeIterExt for TreeRefIter<'_> { fn traverse( &self, state: StateMut, objects: Find, delegate: &mut V, ) -> Result<(), breadthfirst::Error> where Find: gix_object::Find, StateMut: BorrowMut, V: gix_traverse::tree::Visit, { breadthfirst(*self, state, objects, delegate) } } /// Extensions for [EntryRef](gix_object::tree::EntryRef). pub trait TreeEntryRefExt<'a>: 'a { /// Attach [`repo`](crate::Repository) to the given tree entry. It can be detached later with `detach()`. fn attach<'repo>(self, repo: &'repo crate::Repository) -> crate::object::tree::EntryRef<'repo, 'a>; } impl<'a> TreeEntryRefExt<'a> for gix_object::tree::EntryRef<'a> { fn attach<'repo>(self, repo: &'repo crate::Repository) -> crate::object::tree::EntryRef<'repo, 'a> { crate::object::tree::EntryRef { inner: self, repo } } } /// Extensions for [Entry](gix_object::tree::Entry). pub trait TreeEntryExt { /// Attach [`repo`](crate::Repository) to the given tree entry. It can be detached later with `detach()`. fn attach(self, repo: &crate::Repository) -> crate::object::tree::Entry<'_>; } impl TreeEntryExt for gix_object::tree::Entry { fn attach(self, repo: &crate::Repository) -> crate::object::tree::Entry<'_> { crate::object::tree::Entry { inner: self, repo } } } /// Extensions for [Change](gix_diff::tree_with_rewrites::Change). #[cfg(feature = "blob-diff")] pub trait TreeDiffChangeExt { /// Attach [`old_repo`](crate::Repository) and `new_repo` to current instance. It can be detached later with `detach()`. /// Note that both repositories are usually the same. fn attach<'old, 'new>( &self, old_repo: &'old crate::Repository, new_repo: &'new crate::Repository, ) -> crate::object::tree::diff::Change<'_, 'old, 'new>; } gix-0.69.1/src/filter.rs000064400000000000000000000224011046102023000131310ustar 00000000000000//! lower-level access to filters which are applied to create working tree checkouts or to 'clean' working tree contents for storage in git. use std::borrow::Cow; pub use gix_filter as plumbing; use gix_object::Find; use crate::{ bstr::BStr, config::{ cache::util::{ApplyLeniency, ApplyLeniencyDefaultValue}, tree::Core, }, Repository, }; /// pub mod pipeline { /// pub mod options { use crate::{bstr::BString, config}; /// The error returned by [Pipeline::options()][crate::filter::Pipeline::options()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] CheckRoundTripEncodings(#[from] config::encoding::Error), #[error(transparent)] SafeCrlf(#[from] config::key::GenericErrorWithValue), #[error("Could not interpret 'filter.{name}.required' configuration")] Driver { name: BString, source: gix_config::value::Error, }, #[error(transparent)] CommandContext(#[from] config::command_context::Error), } } /// pub mod convert_to_git { /// The error returned by [Pipeline::convert_to_git()][crate::filter::Pipeline::convert_to_git()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Failed to prime attributes to the path at which the data resides")] WorktreeCacheAtPath(#[from] std::io::Error), #[error(transparent)] Convert(#[from] gix_filter::pipeline::convert::to_git::Error), } } /// pub mod convert_to_worktree { /// The error returned by [Pipeline::convert_to_worktree()][crate::filter::Pipeline::convert_to_worktree()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Failed to prime attributes to the path at which the data resides")] WorktreeCacheAtPath(#[from] std::io::Error), #[error(transparent)] Convert(#[from] gix_filter::pipeline::convert::to_worktree::Error), } } } /// A git pipeline for transforming data *to-git* and *to-worktree*, based /// [on git configuration and attributes](https://git-scm.com/docs/gitattributes). #[derive(Clone)] pub struct Pipeline<'repo> { inner: gix_filter::Pipeline, cache: gix_worktree::Stack, repo: &'repo Repository, } /// Lifecycle impl<'repo> Pipeline<'repo> { /// Extract options from `repo` that are needed to properly drive a standard git filter pipeline. pub fn options(repo: &'repo Repository) -> Result { let config = &repo.config.resolved; let encodings = Core::CHECK_ROUND_TRIP_ENCODING.try_into_encodings(config.string("core.checkRoundtripEncoding"))?; let safe_crlf = config .string("core.safecrlf") .map(|value| Core::SAFE_CRLF.try_into_safecrlf(value)) .transpose() .map(Option::unwrap_or_default) .with_lenient_default_value( repo.config.lenient_config, // in lenient mode, we prefer the safe option, instead of just (trying) to output warnings. gix_filter::pipeline::CrlfRoundTripCheck::Fail, )?; let auto_crlf = config .string("core.autocrlf") .map(|value| Core::AUTO_CRLF.try_into_autocrlf(value)) .transpose() .with_leniency(repo.config.lenient_config)? .unwrap_or_default(); let eol = config .string("core.eol") .map(|value| Core::EOL.try_into_eol(value)) .transpose()?; let drivers = extract_drivers(repo)?; Ok(gix_filter::pipeline::Options { drivers, eol_config: gix_filter::eol::Configuration { auto_crlf, eol }, encodings_with_roundtrip_check: encodings, crlf_roundtrip_check: safe_crlf, object_hash: repo.object_hash(), }) } /// Create a new instance by extracting all necessary information and configuration from a `repo` along with `cache` for accessing /// attributes. The `index` is used for some filters which may access it under very specific circumstances. pub fn new(repo: &'repo Repository, cache: gix_worktree::Stack) -> Result { let pipeline = gix_filter::Pipeline::new(repo.command_context()?, Self::options(repo)?); Ok(Pipeline { inner: pipeline, cache, repo, }) } /// Detach the repository and obtain the individual functional parts. pub fn into_parts(self) -> (gix_filter::Pipeline, gix_worktree::Stack) { (self.inner, self.cache) } } /// Conversions impl Pipeline<'_> { /// Convert a `src` stream (to be found at `rela_path`, a repo-relative path) to a representation suitable for storage in `git` /// by using all attributes at `rela_path` and configuration of the repository to know exactly which filters apply. /// `index` is used in particularly rare cases where the CRLF filter in auto-mode tries to determine whether to apply itself, /// and it should match the state used when [instantiating this instance][Self::new()]. /// Note that the return-type implements [`std::io::Read`]. pub fn convert_to_git( &mut self, src: R, rela_path: &std::path::Path, index: &gix_index::State, ) -> Result, pipeline::convert_to_git::Error> where R: std::io::Read, { let entry = self.cache.at_path(rela_path, None, &self.repo.objects)?; Ok(self.inner.convert_to_git( src, rela_path, &mut |_, attrs| { entry.matching_attributes(attrs); }, &mut |buf| -> Result<_, gix_object::find::Error> { let entry = match index.entry_by_path(gix_path::into_bstr(rela_path).as_ref()) { None => return Ok(None), Some(entry) => entry, }; let obj = self.repo.objects.try_find(&entry.id, buf)?; Ok(obj.filter(|obj| obj.kind == gix_object::Kind::Blob).map(|_| ())) }, )?) } /// Convert a `src` buffer located at `rela_path` (in the index) from what's in `git` to the worktree representation. /// This method will obtain all attributes and configuration necessary to know exactly which filters to apply. /// Note that the return-type implements [`std::io::Read`]. /// /// Use `can_delay` to tell driver processes that they may delay the return of data. Doing this will require the caller to specifically /// handle delayed files by keeping state and using [`Self::into_parts()`] to get access to the driver state to follow the delayed-files /// protocol. For simplicity, most will want to disallow delayed processing. pub fn convert_to_worktree<'input>( &mut self, src: &'input [u8], rela_path: &BStr, can_delay: gix_filter::driver::apply::Delay, ) -> Result, pipeline::convert_to_worktree::Error> { let entry = self.cache.at_entry(rela_path, None, &self.repo.objects)?; Ok(self.inner.convert_to_worktree( src, rela_path, &mut |_, attrs| { entry.matching_attributes(attrs); }, can_delay, )?) } /// Retrieve the static context that is made available to the process filters. /// /// The context set here is relevant for the [`convert_to_git()`][Self::convert_to_git()] and /// [`convert_to_worktree()`][Self::convert_to_worktree()] methods. pub fn driver_context_mut(&mut self) -> &mut gix_filter::pipeline::Context { self.inner.driver_context_mut() } } /// Obtain a list of all configured driver, but ignore those in sections that we don't trust enough. fn extract_drivers(repo: &Repository) -> Result, pipeline::options::Error> { repo.config .resolved .sections_by_name("filter") .into_iter() .flatten() .filter(|s| repo.filter_config_section()(s.meta())) .filter_map(|s| { s.header().subsection_name().map(|name| { Ok(gix_filter::Driver { name: name.to_owned(), clean: s.value("clean").map(Cow::into_owned), smudge: s.value("smudge").map(Cow::into_owned), process: s.value("process").map(Cow::into_owned), required: s .value("required") .map(|value| gix_config::Boolean::try_from(value.as_ref())) .transpose() .map_err(|err| pipeline::options::Error::Driver { name: name.to_owned(), source: err, })? .unwrap_or_default() .into(), }) }) }) .collect::, pipeline::options::Error>>() } gix-0.69.1/src/head/log.rs000064400000000000000000000024631046102023000133340ustar 00000000000000use gix_hash::ObjectId; use crate::{ bstr::{BString, ByteSlice}, Head, }; impl<'repo> Head<'repo> { /// Return a platform for obtaining iterators on the reference log associated with the `HEAD` reference. pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'static, 'repo> { gix_ref::file::log::iter::Platform { store: &self.repo.refs, name: "HEAD".try_into().expect("HEAD is always valid"), buf: Vec::new(), } } /// Return a list of all branch names that were previously checked out with the first-ever checked out branch /// being the first entry of the list, and the most recent is the last, along with the commit they were pointing to /// at the time. pub fn prior_checked_out_branches(&self) -> std::io::Result>> { Ok(self.log_iter().all()?.map(|log| { log.filter_map(Result::ok) .filter_map(|line| { line.message .strip_prefix(b"checkout: moving from ") .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos])) .map(|from_branch| (from_branch.as_bstr().to_owned(), line.previous_oid())) }) .collect() })) } } gix-0.69.1/src/head/mod.rs000064400000000000000000000106051046102023000133270ustar 00000000000000//! #![allow(clippy::empty_docs)] use gix_hash::ObjectId; use gix_ref::FullNameRef; use crate::{ ext::{ObjectIdExt, ReferenceExt}, Head, }; /// Represents the kind of `HEAD` reference. #[derive(Clone)] pub enum Kind { /// The existing reference the symbolic HEAD points to. /// /// This is the common case. Symbolic(gix_ref::Reference), /// The yet-to-be-created reference the symbolic HEAD refers to. /// /// This is the case in a newly initialized repository. Unborn(gix_ref::FullName), /// The head points to an object directly, not to a symbolic reference. /// /// This state is less common and can occur when checking out commits directly. Detached { /// The object to which the head points to target: ObjectId, /// Possibly the final destination of `target` after following the object chain from tag objects to commits. peeled: Option, }, } impl Kind { /// Attach this instance to a `repo` to produce a [`Head`]. pub fn attach(self, repo: &crate::Repository) -> Head<'_> { Head { kind: self, repo } } } /// Access impl<'repo> Head<'repo> { /// Returns the name of this references, always `HEAD`. pub fn name(&self) -> &'static FullNameRef { // TODO: use a statically checked version of this when available. "HEAD".try_into().expect("HEAD is valid") } /// Returns the full reference name of this head if it is not detached, or `None` otherwise. pub fn referent_name(&self) -> Option<&FullNameRef> { Some(match &self.kind { Kind::Symbolic(r) => r.name.as_ref(), Kind::Unborn(name) => name.as_ref(), Kind::Detached { .. } => return None, }) } /// Returns true if this instance is detached, and points to an object directly. pub fn is_detached(&self) -> bool { matches!(self.kind, Kind::Detached { .. }) } /// Returns true if this instance is not yet born, hence it points to a ref that doesn't exist yet. /// /// This is the case in a newly initialized repository. pub fn is_unborn(&self) -> bool { matches!(self.kind, Kind::Unborn(_)) } // TODO: tests /// Returns the id the head points to, which isn't possible on unborn heads. pub fn id(&self) -> Option> { match &self.kind { Kind::Symbolic(r) => r.target.try_id().map(|oid| oid.to_owned().attach(self.repo)), Kind::Detached { peeled, target } => { (*peeled).unwrap_or_else(|| target.to_owned()).attach(self.repo).into() } Kind::Unborn(_) => None, } } /// Try to transform this instance into the symbolic reference that it points to, or return `None` if head is detached or unborn. pub fn try_into_referent(self) -> Option> { match self.kind { Kind::Symbolic(r) => r.attach(self.repo).into(), _ => None, } } } mod remote { use super::Head; use crate::{remote, Remote}; /// Remote impl<'repo> Head<'repo> { /// Return the remote with which the currently checked our reference can be handled as configured by `branch..remote|pushRemote` /// or fall back to the non-branch specific remote configuration. `None` is returned if the head is detached or unborn, so there is /// no branch specific remote. /// /// This is equivalent to calling [`Reference::remote(…)`][crate::Reference::remote()] and /// [`Repository::remote_default_name()`][crate::Repository::remote_default_name()] in order. /// /// Combine it with [`Repository::find_default_remote()`][crate::Repository::find_default_remote()] as fallback to /// handle detached heads, i.e. obtain a remote even in case of detached heads, /// or call [`Repository::find_fetch_remote(…)`](crate::Repository::find_fetch_remote()) for the highest-level way of finding /// the right remote, just like `git fetch` does. pub fn into_remote( self, direction: remote::Direction, ) -> Option, remote::find::existing::Error>> { let repo = self.repo; self.try_into_referent()? .remote(direction) .or_else(|| repo.find_default_remote(direction)) } } } /// pub mod log; /// pub mod peel; gix-0.69.1/src/head/peel.rs000064400000000000000000000144601046102023000135000ustar 00000000000000use crate::{ ext::{ObjectIdExt, ReferenceExt}, head::Kind, Head, }; mod error { use crate::{object, reference}; /// The error returned by [`Head::peel_to_id_in_place()`](super::Head::try_peel_to_id_in_place()) /// and [`Head::into_fully_peeled_id()`](super::Head::try_into_peeled_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindExistingObject(#[from] object::find::existing::Error), #[error(transparent)] PeelReference(#[from] reference::peel::Error), } } pub use error::Error; /// pub mod into_id { use crate::object; /// The error returned by [`Head::into_peeled_id()`](super::Head::into_peeled_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Peel(#[from] super::Error), #[error("Branch '{name}' does not have any commits")] Unborn { name: gix_ref::FullName }, #[error(transparent)] ObjectKind(#[from] object::try_into::Error), } } /// pub mod to_commit { use crate::object; /// The error returned by [`Head::peel_to_commit_in_place()`](super::Head::peel_to_commit_in_place()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] PeelToObject(#[from] super::to_object::Error), #[error(transparent)] ObjectKind(#[from] object::try_into::Error), } } /// pub mod to_object { /// The error returned by [`Head::peel_to_object_in_place()`](super::Head::peel_to_object_in_place()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Peel(#[from] super::Error), #[error("Branch '{name}' does not have any commits")] Unborn { name: gix_ref::FullName }, } } impl<'repo> Head<'repo> { /// Peel this instance and consume it to make obtaining its final target id possible, while returning an error on unborn heads. /// /// The final target is obtained by following symbolic references and peeling tags to their final destination, which /// typically is a commit, but can be any object. pub fn into_peeled_id(mut self) -> Result, into_id::Error> { self.try_peel_to_id_in_place()?; self.id().ok_or_else(|| match self.kind { Kind::Symbolic(gix_ref::Reference { name, .. }) | Kind::Unborn(name) => into_id::Error::Unborn { name }, Kind::Detached { .. } => unreachable!("id can be returned after peeling"), }) } /// Peel this instance and consume it to make obtaining its final target object possible, while returning an error on unborn heads. /// /// The final target is obtained by following symbolic references and peeling tags to their final destination, which /// typically is a commit, but can be any object as well. pub fn into_peeled_object(mut self) -> Result, to_object::Error> { self.peel_to_object_in_place() } /// Consume this instance and transform it into the final object that it points to, or `Ok(None)` if the `HEAD` /// reference is yet to be born. /// /// The final target is obtained by following symbolic references and peeling tags to their final destination, which /// typically is a commit, but can be any object. pub fn try_into_peeled_id(mut self) -> Result>, Error> { self.try_peel_to_id_in_place() } /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no /// more object to follow, and return that object id. /// /// Returns `Ok(None)` if the head is unborn. /// /// The final target is obtained by following symbolic references and peeling tags to their final destination, which /// typically is a commit, but can be any object. pub fn try_peel_to_id_in_place(&mut self) -> Result>, Error> { Ok(Some(match &mut self.kind { Kind::Unborn(_name) => return Ok(None), Kind::Detached { peeled: Some(peeled), .. } => (*peeled).attach(self.repo), Kind::Detached { peeled: None, target } => { let id = target.attach(self.repo); if id.header()?.kind() == gix_object::Kind::Commit { id } else { match id.object()?.peel_tags_to_end() { Ok(obj) => { self.kind = Kind::Detached { peeled: Some(obj.id), target: *target, }; obj.id() } Err(err) => return Err(err.into()), } } } Kind::Symbolic(r) => { let mut nr = r.clone().attach(self.repo); let peeled = nr.peel_to_id_in_place(); *r = nr.detach(); peeled? } })) } /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no /// more object to follow, transform the id into a commit if possible and return that. /// /// Returns an error if the head is unborn or if it doesn't point to a commit. pub fn peel_to_object_in_place(&mut self) -> Result, to_object::Error> { let id = self .try_peel_to_id_in_place()? .ok_or_else(|| to_object::Error::Unborn { name: self.referent_name().expect("unborn").to_owned(), })?; id.object() .map_err(|err| to_object::Error::Peel(Error::FindExistingObject(err))) } /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no /// more object to follow, transform the id into a commit if possible and return that. /// /// Returns an error if the head is unborn or if it doesn't point to a commit. pub fn peel_to_commit_in_place(&mut self) -> Result, to_commit::Error> { Ok(self.peel_to_object_in_place()?.try_into_commit()?) } } gix-0.69.1/src/id.rs000064400000000000000000000142351046102023000122460ustar 00000000000000//! #![allow(clippy::empty_docs)] use std::ops::Deref; use gix_hash::{oid, ObjectId}; use crate::{object::find, Id, Object}; /// An [object id][ObjectId] infused with a [`Repository`][crate::Repository]. impl<'repo> Id<'repo> { /// Find the [`Object`] associated with this object id, and consider it an error if it doesn't exist. /// /// # Note /// /// There can only be one `ObjectRef` per `Easy`. To increase that limit, clone the `Easy`. pub fn object(&self) -> Result, find::existing::Error> { self.repo.find_object(self.inner) } /// Find the [`header`][gix_odb::find::Header] associated with this object id, or an error if it doesn't exist. /// /// Use this method if there is no interest in the contents of the object, which generally is much faster to obtain. pub fn header(&self) -> Result { self.repo.find_header(self.inner) } /// Try to find the [`Object`] associated with this object id, and return `None` if it's not available locally. /// /// # Note /// /// There can only be one `ObjectRef` per `Easy`. To increase that limit, clone the `Easy`. pub fn try_object(&self) -> Result>, find::Error> { self.repo.try_find_object(self.inner) } /// Find the [`header`][gix_odb::find::Header] associated with this object id, or return `None` if it doesn't exist. /// /// Use this method if there is no interest in the contents of the object, which generally is much faster to obtain. pub fn try_header(&self) -> Result, find::Error> { self.repo.try_find_header(self.inner) } /// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`. pub fn shorten(&self) -> Result { let hex_len = self.repo.config.hex_len.map_or_else( || self.repo.objects.packed_object_count().map(calculate_auto_hex_len), Ok, )?; let prefix = gix_odb::store::prefix::disambiguate::Candidate::new(self.inner, hex_len) .expect("BUG: internal hex-len must always be valid"); self.repo .objects .disambiguate_prefix(prefix)? .ok_or(shorten::Error::NotFound { oid: self.inner }) } /// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`, or default /// to a prefix which equals our id in the unlikely error case. pub fn shorten_or_id(&self) -> gix_hash::Prefix { self.shorten().unwrap_or_else(|_| self.inner.into()) } } fn calculate_auto_hex_len(num_packed_objects: u64) -> usize { let mut len = 64 - num_packed_objects.leading_zeros(); len = (len + 1) / 2; len.max(7) as usize } /// pub mod shorten { /// Returned by [`Id::prefix()`][super::Id::shorten()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] PackedObjectsCount(#[from] gix_odb::store::load_index::Error), #[error(transparent)] DisambiguatePrefix(#[from] gix_odb::store::prefix::disambiguate::Error), #[error("Id could not be shortened as the object with id {} could not be found", .oid)] NotFound { oid: gix_hash::ObjectId }, } } impl Deref for Id<'_> { type Target = oid; fn deref(&self) -> &Self::Target { &self.inner } } impl<'repo> Id<'repo> { pub(crate) fn from_id(id: impl Into, repo: &'repo crate::Repository) -> Self { Id { inner: id.into(), repo } } /// Turn this instance into its bare [`ObjectId`]. pub fn detach(self) -> ObjectId { self.inner } } impl<'repo> Id<'repo> { /// Obtain a platform for traversing ancestors of this commit. pub fn ancestors(&self) -> crate::revision::walk::Platform<'repo> { crate::revision::walk::Platform::new(Some(self.inner), self.repo) } } mod impls { use std::{cmp::Ordering, hash::Hasher}; use gix_hash::{oid, ObjectId}; use crate::{Id, Object, ObjectDetached}; // Eq, Hash, Ord, PartialOrd, impl std::hash::Hash for Id<'_> { fn hash(&self, state: &mut H) { self.inner.hash(state); } } impl<'a> PartialOrd> for Id<'a> { fn partial_cmp(&self, other: &Id<'a>) -> Option { self.inner.partial_cmp(&other.inner) } } impl<'repo> PartialEq> for Id<'repo> { fn eq(&self, other: &Id<'repo>) -> bool { self.inner == other.inner } } impl PartialEq for Id<'_> { fn eq(&self, other: &ObjectId) -> bool { &self.inner == other } } impl<'repo> PartialEq> for ObjectId { fn eq(&self, other: &Id<'repo>) -> bool { self == &other.inner } } impl PartialEq for Id<'_> { fn eq(&self, other: &oid) -> bool { self.inner == other } } impl<'repo> PartialEq> for Id<'repo> { fn eq(&self, other: &Object<'repo>) -> bool { self.inner == other.id } } impl PartialEq for Id<'_> { fn eq(&self, other: &ObjectDetached) -> bool { self.inner == other.id } } impl std::fmt::Debug for Id<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.inner.fmt(f) } } impl std::fmt::Display for Id<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.inner.fmt(f) } } impl AsRef for Id<'_> { fn as_ref(&self) -> &oid { &self.inner } } impl<'repo> From> for ObjectId { fn from(v: Id<'repo>) -> Self { v.inner } } } #[cfg(test)] mod tests { use super::*; #[test] fn size_of_oid() { let actual = std::mem::size_of::>(); let ceiling = 32; assert!( actual <= ceiling, "size of oid shouldn't change without notice: {actual} <= {ceiling}" ); } } gix-0.69.1/src/init.rs000064400000000000000000000075731046102023000126240ustar 00000000000000#![allow(clippy::result_large_err)] use std::{borrow::Cow, path::Path}; use gix_ref::{ store::WriteReflog, transaction::{PreviousValue, RefEdit}, FullName, Target, }; use crate::{bstr::BString, config::tree::Init, ThreadSafeRepository}; /// The name of the branch to use if non is configured via git configuration. /// /// # Deviation /// /// We use `main` instead of `master`. pub const DEFAULT_BRANCH_NAME: &str = "main"; /// The error returned by [`crate::init()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not obtain the current directory")] CurrentDir(#[from] std::io::Error), #[error(transparent)] Init(#[from] crate::create::Error), #[error(transparent)] Open(#[from] crate::open::Error), #[error("Invalid default branch name: {name:?}")] InvalidBranchName { name: BString, source: gix_validate::reference::name::Error, }, #[error("Could not edit HEAD reference with new default name")] EditHeadForDefaultBranch(#[from] crate::reference::edit::Error), } impl ThreadSafeRepository { /// Create a repository with work-tree within `directory`, creating intermediate directories as needed. /// /// Fails without action if there is already a `.git` repository inside of `directory`, but /// won't mind if the `directory` otherwise is non-empty. pub fn init( directory: impl AsRef, kind: crate::create::Kind, options: crate::create::Options, ) -> Result { use gix_sec::trust::DefaultForLevel; let open_options = crate::open::Options::default_for_level(gix_sec::Trust::Full); Self::init_opts(directory, kind, options, open_options) } /// Similar to [`init`][Self::init()], but allows to determine how exactly to open the newly created repository. /// /// # Deviation /// /// Instead of naming the default branch `master`, we name it `main` unless configured explicitly using the `init.defaultBranch` /// configuration key. pub fn init_opts( directory: impl AsRef, kind: crate::create::Kind, create_options: crate::create::Options, mut open_options: crate::open::Options, ) -> Result { let path = crate::create::into(directory.as_ref(), kind, create_options)?; let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories(); open_options.git_dir_trust = Some(gix_sec::Trust::Full); // The repo will use `core.precomposeUnicode` to adjust the value as needed. open_options.current_dir = gix_fs::current_dir(false)?.into(); let repo = ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, open_options)?; let branch_name = repo .config .resolved .string(Init::DEFAULT_BRANCH) .unwrap_or_else(|| Cow::Borrowed(DEFAULT_BRANCH_NAME.into())); if branch_name.as_ref() != DEFAULT_BRANCH_NAME { let sym_ref: FullName = format!("refs/heads/{branch_name}") .try_into() .map_err(|err| Error::InvalidBranchName { name: branch_name.into_owned(), source: err, })?; let mut repo = repo.to_thread_local(); let prev_write_reflog = repo.refs.write_reflog; repo.refs.write_reflog = WriteReflog::Disable; repo.edit_reference(RefEdit { change: gix_ref::transaction::Change::Update { log: Default::default(), expected: PreviousValue::Any, new: Target::Symbolic(sym_ref), }, name: "HEAD".try_into().expect("valid"), deref: false, })?; repo.refs.write_reflog = prev_write_reflog; } Ok(repo) } } gix-0.69.1/src/interrupt.rs000064400000000000000000000223111046102023000137000ustar 00000000000000//! Process-global interrupt handling //! //! This module contains facilities to globally request an interrupt, which will cause supporting computations to //! abort once it is observed. //! Such checks for interrupts are provided in custom implementations of various traits to transparently add interrupt //! support to methods who wouldn't otherwise by injecting it. see [`Read`]. #[cfg(feature = "interrupt")] mod init { use std::{ io, sync::atomic::{AtomicUsize, Ordering}, }; static DEREGISTER_COUNT: AtomicUsize = AtomicUsize::new(0); static REGISTERED_HOOKS: once_cell::sync::Lazy>> = once_cell::sync::Lazy::new(Default::default); static DEFAULT_BEHAVIOUR_HOOKS: once_cell::sync::Lazy>> = once_cell::sync::Lazy::new(Default::default); /// A type to help deregistering hooks registered with [`init_handler`](super::init_handler()); #[derive(Default)] pub struct Deregister { do_reset: bool, } pub struct AutoDeregister(Deregister); impl Deregister { /// Remove all previously registered handlers, and assure the default behaviour is reinstated, if this is the last available instance. /// /// Note that only the instantiation of the default behaviour can fail. pub fn deregister(self) -> std::io::Result<()> { let mut hooks = REGISTERED_HOOKS.lock(); let count = DEREGISTER_COUNT.fetch_sub(1, Ordering::SeqCst); if count > 1 || hooks.is_empty() { return Ok(()); } if self.do_reset { super::reset(); } for (_, hook_id) in hooks.iter() { signal_hook::low_level::unregister(*hook_id); } let hooks = hooks.drain(..); let mut default_hooks = DEFAULT_BEHAVIOUR_HOOKS.lock(); // Even if dropped, `drain(..)` clears the vec which is a must. for (sig, _) in hooks { // # SAFETY // * we only register a handler that is specifically designed to run in this environment. #[allow(unsafe_code)] unsafe { default_hooks.push(signal_hook::low_level::register(sig, move || { signal_hook::low_level::emulate_default_handler(sig).ok(); })?); } } Ok(()) } /// If called with `toggle` being `true`, when actually deregistering, we will also reset the trigger by /// calling [`reset()`](super::reset()). pub fn with_reset(mut self, toggle: bool) -> Self { self.do_reset = toggle; self } /// Return a type that deregisters all installed signal handlers on drop. pub fn auto_deregister(self) -> AutoDeregister { AutoDeregister(self) } } impl Drop for AutoDeregister { fn drop(&mut self) { std::mem::take(&mut self.0).deregister().ok(); } } /// Initialize a signal handler to listen to SIGINT and SIGTERM and trigger our [`trigger()`](super::trigger()) that way. /// Also trigger `interrupt()` which promises to never use a Mutex, allocate or deallocate, or do anything else that's blocking. /// Use `grace_count` to determine how often the termination signal can be received before it's terminal, e.g. 1 would only terminate /// the application the second time the signal is received. /// Note that only the `grace_count` and `interrupt` of the first call are effective, all others will be ignored. /// /// Use the returned `Deregister` type to explicitly deregister hooks, or to do so automatically. /// /// # Note /// /// It will abort the process on second press and won't inform the user about this behaviour either as we are unable to do so without /// deadlocking even when trying to write to stderr directly. /// /// SAFETY: `interrupt()` will be called from a signal handler. See [`signal_hook::low_level::register()`] for details about. #[allow(unsafe_code, clippy::missing_safety_doc)] pub unsafe fn init_handler( grace_count: usize, interrupt: impl Fn() + Send + Sync + Clone + 'static, ) -> io::Result { let prev_count = DEREGISTER_COUNT.fetch_add(1, Ordering::SeqCst); if prev_count != 0 { // Try to obtain the lock before we return just to wait for the signals to actually be registered. let _guard = REGISTERED_HOOKS.lock(); return Ok(Deregister::default()); } let mut guard = REGISTERED_HOOKS.lock(); if !guard.is_empty() { return Ok(Deregister::default()); } let mut hooks = Vec::with_capacity(signal_hook::consts::TERM_SIGNALS.len()); for sig in signal_hook::consts::TERM_SIGNALS { // # SAFETY // * we only set atomics or call functions that do // * there is no use of the heap let interrupt = interrupt.clone(); let action = move || { static INTERRUPT_COUNT: AtomicUsize = AtomicUsize::new(0); if !super::is_triggered() { INTERRUPT_COUNT.store(0, Ordering::SeqCst); } let msg_idx = INTERRUPT_COUNT.fetch_add(1, Ordering::SeqCst); if msg_idx == grace_count { gix_tempfile::registry::cleanup_tempfiles_signal_safe(); signal_hook::low_level::emulate_default_handler(*sig).ok(); } interrupt(); super::trigger(); }; #[allow(unsafe_code)] unsafe { let hook_id = signal_hook::low_level::register(*sig, action)?; hooks.push((*sig, hook_id)); } } for hook_id in DEFAULT_BEHAVIOUR_HOOKS.lock().drain(..) { signal_hook::low_level::unregister(hook_id); } // This means that they won't setup a handler allowing us to call them right before we actually abort. gix_tempfile::signal::setup(gix_tempfile::signal::handler::Mode::None); *guard = hooks; Ok(Deregister::default()) } } use std::{ io, sync::atomic::{AtomicBool, Ordering}, }; #[cfg(feature = "interrupt")] pub use init::{init_handler, Deregister}; /// A wrapper for an inner iterator which will check for interruptions on each iteration. pub struct Iter { /// The actual iterator to yield elements from. inner: gix_features::interrupt::IterWithErr<'static, I, EFN>, } impl Iter where I: Iterator, EFN: FnOnce() -> E, { /// Create a new iterator over `inner` which checks for interruptions on each iteration and calls `make_err()` to /// signal an interruption happened, causing no further items to be iterated from that point on. pub fn new(inner: I, make_err: EFN) -> Self { Iter { inner: gix_features::interrupt::IterWithErr::new(inner, make_err, &IS_INTERRUPTED), } } /// Return the inner iterator pub fn into_inner(self) -> I { self.inner.inner } /// Return the inner iterator as reference pub fn inner(&self) -> &I { &self.inner.inner } } impl Iterator for Iter where I: Iterator, EFN: FnOnce() -> E, { type Item = Result; fn next(&mut self) -> Option { self.inner.next() } } /// A wrapper for implementers of [`std::io::Read`] or [`std::io::BufRead`] with interrupt support. /// /// It fails a [read][`std::io::Read::read`] while an interrupt was requested. pub struct Read { /// The actual implementor of [`std::io::Read`] to which interrupt support will be added. inner: gix_features::interrupt::Read<'static, R>, } impl Read where R: io::Read, { /// Create a new interruptible reader from `read`. pub fn new(read: R) -> Self { Read { inner: gix_features::interrupt::Read { inner: read, should_interrupt: &IS_INTERRUPTED, }, } } /// Return the inner reader pub fn into_inner(self) -> R { self.inner.inner } } impl io::Read for Read where R: io::Read, { fn read(&mut self, buf: &mut [u8]) -> io::Result { self.inner.read(buf) } } impl io::BufRead for Read where R: io::BufRead, { fn fill_buf(&mut self) -> io::Result<&[u8]> { self.inner.fill_buf() } fn consume(&mut self, amt: usize) { self.inner.consume(amt); } } /// The flag behind all utility functions in this module. pub static IS_INTERRUPTED: AtomicBool = AtomicBool::new(false); /// Returns true if an interrupt is requested. pub fn is_triggered() -> bool { IS_INTERRUPTED.load(Ordering::Relaxed) } /// Trigger an interrupt, signalling to those checking for [`is_triggered()`] to stop what they are doing. pub fn trigger() { IS_INTERRUPTED.store(true, Ordering::SeqCst); } /// Sets the interrupt request to false, thus allowing those checking for [`is_triggered()`] to proceed. pub fn reset() { IS_INTERRUPTED.store(false, Ordering::SeqCst); } gix-0.69.1/src/lib.rs000064400000000000000000000313271046102023000124210ustar 00000000000000//! This crate provides the [`Repository`] abstraction which serves as a hub into all the functionality of git. //! //! It's powerful and won't sacrifice performance while still increasing convenience compared to using the sub-crates //! individually. Sometimes it may hide complexity under the assumption that the performance difference doesn't matter //! for all but the fewest tools out there, which would be using the underlying crates directly or file an issue. //! //! ### The Trust Model //! //! It is very simple - based on the ownership of the repository compared to the user of the current process [Trust](sec::Trust) //! is assigned. This can be [overridden](open::Options::with()) as well. Further, git configuration files track their trust level //! per section based on and sensitive values like paths to executables or certain values will be skipped if they are from a source //! that isn't [fully](sec::Trust::Full) trusted. //! //! That way, data can safely be obtained without risking to execute untrusted executables. //! //! Note that it's possible to let `gix` act like `git` or `git2` by setting the [open::Options::bail_if_untrusted()] option. //! //! ### The prelude and extensions //! //! With `use git_repository::prelude::*` you should be ready to go as it pulls in various extension traits to make functionality //! available on objects that may use it. //! //! The method signatures are still complex and may require various arguments for configuration and cache control. //! //! Most extensions to existing objects provide an `obj_with_extension.attach(&repo).an_easier_version_of_a_method()` for simpler //! call signatures. //! //! ### `ThreadSafe` Mode //! //! By default, the [`Repository`] isn't `Sync` and thus can't be used in certain contexts which require the `Sync` trait. //! //! To help with this, convert it with [`.into_sync()`][Repository::into_sync()] into a [`ThreadSafeRepository`]. //! //! ### Object-Access Performance //! //! Accessing objects quickly is the bread-and-butter of working with git, right after accessing references. Hence it's vital //! to understand which cache levels exist and how to leverage them. //! //! When accessing an object, the first cache that's queried is a memory-capped LRU object cache, mapping their id to data and kind. //! It has to be specifically enabled a [`Repository`]. //! On miss, the object is looked up and if a pack is hit, there is a small fixed-size cache for delta-base objects. //! //! In scenarios where the same objects are accessed multiple times, the object cache can be useful and is to be configured specifically //! using the [`object_cache_size(…)`][crate::Repository::object_cache_size()] method. //! //! Use the `cache-efficiency-debug` cargo feature to learn how efficient the cache actually is - it's easy to end up with lowered //! performance if the cache is not hit in 50% of the time. //! //! ### Terminology //! //! #### `WorkingTree` and `WorkTree` //! //! When reading the documentation of the canonical gix-worktree program one gets the impression work tree and working tree are used //! interchangeably. We use the term _work tree_ only and try to do so consistently as its shorter and assumed to be the same. //! //! ### Plumbing Crates //! //! To make using _sub-crates_ and their types easier, these are re-exported into the root of this crate. Here we list how to access nested plumbing //! crates which are otherwise harder to discover: //! //! **`git_repository::`** //! * [`odb`] //! * [`pack`][odb::pack] //! * [`protocol`] //! * [`transport`][protocol::transport] //! * [`packetline`][protocol::transport::packetline] //! //! ### `libgit2` API to `gix` //! //! This doc-aliases are used to help finding methods under a possibly changed name. Just search in the docs. //! Entering `git2` into the search field will also surface all methods with such annotations. //! //! What follows is a list of methods you might be missing, along with workarounds if available. //! * [`git2::Repository::open_bare()`](https://docs.rs/git2/*/git2/struct.Repository.html#method.open_bare) ➡ ❌ - use [`open()`] and discard if it is not bare. //! * [`git2::build::CheckoutBuilder::disable_filters()`](https://docs.rs/git2/*/git2/build/struct.CheckoutBuilder.html#method.disable_filters) ➡ ❌ *(filters are always applied during checkouts)* //! * [`git2::Repository::submodule_status()`](https://docs.rs/git2/*/git2/struct.Repository.html#method.submodule_status) ➡ [`Submodule::state()`] - status provides more information and conveniences though, and an actual worktree status isn't performed. //! //! #### Integrity checks //! //! `git2` by default performs integrity checks via [`strict_hash_verification()`](https://docs.rs/git2/latest/git2/opts/fn.strict_hash_verification.html) and //! [`strict_object_creation`](https://docs.rs/git2/latest/git2/opts/fn.strict_object_creation.html) which `gitoxide` *currently* **does not have**. //! //! ### Feature Flags #![cfg_attr( all(doc, feature = "document-features"), doc = ::document_features::document_features!() )] #![cfg_attr(all(doc, feature = "document-features"), feature(doc_cfg, doc_auto_cfg))] #![deny(missing_docs, rust_2018_idioms, unsafe_code)] #![allow(clippy::result_large_err)] // Re-exports to make this a potential one-stop shop crate avoiding people from having to reference various crates themselves. // This also means that their major version changes affect our major version, but that's alright as we directly expose their // APIs/instances anyway. pub use gix_actor as actor; #[cfg(feature = "attributes")] pub use gix_attributes as attrs; #[cfg(feature = "command")] pub use gix_command as command; pub use gix_commitgraph as commitgraph; #[cfg(feature = "credentials")] pub use gix_credentials as credentials; pub use gix_date as date; #[cfg(feature = "dirwalk")] pub use gix_dir as dir; pub use gix_features as features; use gix_features::threading::OwnShared; pub use gix_features::{ parallel, progress::{Count, DynNestedProgress, NestedProgress, Progress}, threading, }; pub use gix_fs as fs; pub use gix_glob as glob; pub use gix_hash as hash; pub use gix_hashtable as hashtable; #[cfg(feature = "excludes")] pub use gix_ignore as ignore; #[doc(inline)] #[cfg(feature = "index")] pub use gix_index as index; pub use gix_lock as lock; #[cfg(feature = "credentials")] pub use gix_negotiate as negotiate; pub use gix_object as objs; pub use gix_object::bstr; pub use gix_odb as odb; #[cfg(feature = "credentials")] pub use gix_prompt as prompt; pub use gix_protocol as protocol; pub use gix_ref as refs; pub use gix_refspec as refspec; pub use gix_revwalk as revwalk; pub use gix_sec as sec; pub use gix_tempfile as tempfile; pub use gix_trace as trace; pub use gix_traverse as traverse; pub use gix_url as url; #[doc(inline)] pub use gix_url::Url; pub use gix_utils as utils; pub use gix_validate as validate; pub use hash::{oid, ObjectId}; pub mod interrupt; mod ext; /// pub mod prelude; #[cfg(feature = "excludes")] mod attribute_stack; /// pub mod path; /// The standard type for a store to handle git references. pub type RefStore = gix_ref::file::Store; /// A handle for finding objects in an object database, abstracting away caches for thread-local use. pub type OdbHandle = gix_odb::memory::Proxy; /// A handle for finding objects in an object database, abstracting away caches for moving across threads. pub type OdbHandleArc = gix_odb::memory::Proxy; /// A way to access git configuration pub(crate) type Config = OwnShared>; mod types; #[cfg(any(feature = "excludes", feature = "attributes"))] pub use types::AttributeStack; pub use types::{ Blob, Commit, Head, Id, Object, ObjectDetached, Reference, Remote, Repository, Tag, ThreadSafeRepository, Tree, Worktree, }; #[cfg(feature = "attributes")] pub use types::{Pathspec, PathspecDetached, Submodule}; /// pub mod clone; pub mod commit; /// #[cfg(feature = "dirwalk")] pub mod dirwalk; pub mod head; pub mod id; pub mod object; #[cfg(feature = "attributes")] pub mod pathspec; pub mod reference; pub mod repository; #[cfg(feature = "attributes")] pub mod submodule; pub mod tag; #[cfg(any(feature = "dirwalk", feature = "status"))] pub(crate) mod util; /// pub mod progress; /// pub mod push; /// pub mod diff; /// #[cfg(feature = "merge")] pub mod merge; /// See [`ThreadSafeRepository::discover()`], but returns a [`Repository`] instead. /// /// # Note /// /// **The discovered repository might not be suitable for any operation that requires authentication with remotes** /// as it doesn't see the relevant git configuration. /// /// To achieve that, one has to [enable `git_binary` configuration](https://github.com/GitoxideLabs/gitoxide/blob/9723e1addf52cc336d59322de039ea0537cdca36/src/plumbing/main.rs#L86) /// in the open-options and use [`ThreadSafeRepository::discover_opts()`] instead. Alternatively, it might be well-known /// that the tool is going to run in a neatly configured environment without relying on bundled configuration. #[allow(clippy::result_large_err)] pub fn discover(directory: impl AsRef) -> Result { ThreadSafeRepository::discover(directory).map(Into::into) } /// See [`ThreadSafeRepository::init()`], but returns a [`Repository`] instead. #[allow(clippy::result_large_err)] pub fn init(directory: impl AsRef) -> Result { ThreadSafeRepository::init(directory, create::Kind::WithWorktree, create::Options::default()).map(Into::into) } /// See [`ThreadSafeRepository::init()`], but returns a [`Repository`] instead. #[allow(clippy::result_large_err)] pub fn init_bare(directory: impl AsRef) -> Result { ThreadSafeRepository::init(directory, create::Kind::Bare, create::Options::default()).map(Into::into) } /// Create a platform for configuring a bare clone from `url` to the local `path`, using default options for opening it (but /// amended with using configuration from the git installation to ensure all authentication options are honored). /// /// See [`clone::PrepareFetch::new()`] for a function to take full control over all options. #[allow(clippy::result_large_err)] pub fn prepare_clone_bare( url: Url, path: impl AsRef, ) -> Result where Url: std::convert::TryInto, gix_url::parse::Error: From, { clone::PrepareFetch::new( url, path, create::Kind::Bare, create::Options::default(), open_opts_with_git_binary_config(), ) } /// Create a platform for configuring a clone with main working tree from `url` to the local `path`, using default options for opening it /// (but amended with using configuration from the git installation to ensure all authentication options are honored). /// /// See [`clone::PrepareFetch::new()`] for a function to take full control over all options. #[allow(clippy::result_large_err)] pub fn prepare_clone(url: Url, path: impl AsRef) -> Result where Url: std::convert::TryInto, gix_url::parse::Error: From, { clone::PrepareFetch::new( url, path, create::Kind::WithWorktree, create::Options::default(), open_opts_with_git_binary_config(), ) } fn open_opts_with_git_binary_config() -> open::Options { use gix_sec::trust::DefaultForLevel; let mut opts = open::Options::default_for_level(gix_sec::Trust::Full); opts.permissions.config.git_binary = true; opts } /// See [`ThreadSafeRepository::open()`], but returns a [`Repository`] instead. #[allow(clippy::result_large_err)] #[doc(alias = "git2")] pub fn open(directory: impl Into) -> Result { ThreadSafeRepository::open(directory).map(Into::into) } /// See [`ThreadSafeRepository::open_opts()`], but returns a [`Repository`] instead. #[allow(clippy::result_large_err)] #[doc(alias = "open_ext", alias = "git2")] pub fn open_opts(directory: impl Into, options: open::Options) -> Result { ThreadSafeRepository::open_opts(directory, options).map(Into::into) } /// pub mod create; /// pub mod open; /// pub mod config; /// #[cfg(feature = "mailmap")] pub mod mailmap; /// pub mod worktree; pub mod revision; #[cfg(feature = "attributes")] pub mod filter; /// pub mod remote; /// pub mod init; /// Not to be confused with 'status'. pub mod state; /// #[cfg(feature = "status")] pub mod status; /// pub mod shallow; /// pub mod discover; pub mod env; #[cfg(feature = "attributes")] fn is_dir_to_mode(is_dir: bool) -> gix_index::entry::Mode { if is_dir { gix_index::entry::Mode::DIR } else { gix_index::entry::Mode::FILE } } gix-0.69.1/src/mailmap.rs000064400000000000000000000013061046102023000132650ustar 00000000000000pub use gix_mailmap::*; /// pub mod load { /// The error returned by [`crate::Repository::open_mailmap_into()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The mailmap file declared in `mailmap.file` could not be read")] Io(#[from] std::io::Error), #[error("The configured mailmap.blob could not be parsed")] BlobSpec(#[from] crate::revision::spec::parse::single::Error), #[error(transparent)] PathInterpolate(#[from] gix_config::path::interpolate::Error), #[error("Could not find object configured in `mailmap.blob`")] FindExisting(#[from] crate::object::find::existing::Error), } } gix-0.69.1/src/merge.rs000064400000000000000000000305751046102023000127560ustar 00000000000000pub use gix_merge as plumbing; pub use gix_merge::blob; /// pub mod virtual_merge_base { use crate::Id; /// The outcome produced by [`Repository::virtual_merge_base()`](crate::Repository::virtual_merge_base()). pub struct Outcome<'repo> { /// The commit ids of all the virtual merge bases we have produced in the process of recursively merging the merge-bases. /// As they have been written to the object database, they are still available until they are garbage collected. /// The last one is the most recently produced and the one returned as `commit_id`. /// If this list is empty, this means that there was only one merge-base, which itself is already suitable the final merge-base. pub virtual_merge_bases: Vec>, /// The id of the commit that was created to hold the merged tree. pub commit_id: Id<'repo>, /// The hash of the merged tree. pub tree_id: Id<'repo>, } } /// pub mod commit { /// The outcome produced by [`Repository::merge_commits()`](crate::Repository::merge_commits()). #[derive(Clone)] pub struct Outcome<'a> { /// The outcome of the actual tree-merge, with the tree editor to write to obtain the actual tree id. pub tree_merge: crate::merge::tree::Outcome<'a>, /// The tree id of the base commit we used. This is either… /// * the single merge-base we found /// * the first of multiple merge-bases if [Options::with_use_first_merge_base()] was `true`. /// * the merged tree of all merge-bases, which then isn't linked to an actual commit. /// * an empty tree, if [Options::with_allow_missing_merge_base()] is enabled. pub merge_base_tree_id: gix_hash::ObjectId, /// The object ids of all the commits which were found to be merge-bases, or `None` if there was no merge-base. pub merge_bases: Option>, /// A list of virtual commits that were created to merge multiple merge-bases into one, the last one being /// the one we used as merge-base for the merge. /// As they are not reachable by anything they will be garbage collected, but knowing them provides options. /// Would be empty if no virtual commit was needed at all as there was only a single merge-base. /// Otherwise, the last commit id is the one with the `merge_base_tree_id`. pub virtual_merge_bases: Vec, } /// A way to configure [`Repository::merge_commits()`](crate::Repository::merge_commits()). #[derive(Default, Debug, Clone)] pub struct Options { allow_missing_merge_base: bool, tree_merge: crate::merge::tree::Options, use_first_merge_base: bool, } impl From for Options { fn from(value: gix_merge::tree::Options) -> Self { Options { tree_merge: value.into(), use_first_merge_base: false, allow_missing_merge_base: false, } } } impl From for Options { fn from(value: crate::merge::tree::Options) -> Self { Options { tree_merge: value, use_first_merge_base: false, allow_missing_merge_base: false, } } } impl From for gix_merge::commit::Options { fn from( Options { allow_missing_merge_base, tree_merge, use_first_merge_base, }: Options, ) -> Self { gix_merge::commit::Options { allow_missing_merge_base, tree_merge: tree_merge.into(), use_first_merge_base, } } } /// Builder impl Options { /// If `true`, merging unrelated commits is allowed, with the merge-base being assumed as empty tree. pub fn with_allow_missing_merge_base(mut self, allow_missing_merge_base: bool) -> Self { self.allow_missing_merge_base = allow_missing_merge_base; self } /// If `true`, do not merge multiple merge-bases into one. Instead, just use the first one. #[doc(alias = "no_recursive", alias = "git2")] pub fn with_use_first_merge_base(mut self, use_first_merge_base: bool) -> Self { self.use_first_merge_base = use_first_merge_base; self } } } /// pub mod tree { use gix_merge::blob::builtin_driver; pub use gix_merge::tree::{ apply_index_entries, treat_as_unresolved, Conflict, ContentMerge, Resolution, ResolutionFailure, TreatAsUnresolved, }; /// The outcome produced by [`Repository::merge_trees()`](crate::Repository::merge_trees()). #[derive(Clone)] pub struct Outcome<'repo> { /// The ready-made (but unwritten) *base* tree, including all non-conflicting changes, and the changes that had /// conflicts which could be resolved automatically. /// /// This means, if all of their changes were conflicting, this will be equivalent to the *base* tree. pub tree: crate::object::tree::Editor<'repo>, /// The set of conflicts we encountered. Can be empty to indicate there was no conflict. /// Note that conflicts might have been auto-resolved, but they are listed here for completeness. /// Use [`has_unresolved_conflicts()`](Outcome::has_unresolved_conflicts()) to see if any action is needed /// before using [`tree`](Outcome::tree). pub conflicts: Vec, /// `true` if `conflicts` contains only a single *unresolved* conflict in the last slot, but possibly more resolved ones. /// This also makes this outcome a very partial merge that cannot be completed. pub failed_on_first_unresolved_conflict: bool, } impl Outcome<'_> { /// Return `true` if there is any conflict that would still need to be resolved as they would yield undesirable trees. /// This is based on `how` to determine what should be considered unresolved. pub fn has_unresolved_conflicts(&self, how: TreatAsUnresolved) -> bool { self.conflicts.iter().any(|c| c.is_unresolved(how)) } /// Returns `true` if `index` changed as we applied conflicting stages to it, using `how` to determine if a /// conflict should be considered unresolved. /// /// `removal_mode` decides how unconflicted entries should be removed if they are superseded by /// their conflicted counterparts. /// /// It's important that `index` is at the state of [`Self::tree`]. /// Note that in practice, whenever there is a single [conflict](Conflict), this function will return `true`. pub fn index_changed_after_applying_conflicts( &self, index: &mut gix_index::State, how: TreatAsUnresolved, removal_mode: apply_index_entries::RemovalMode, ) -> bool { apply_index_entries(&self.conflicts, how, index, removal_mode) } } /// A way to configure [`Repository::merge_trees()`](crate::Repository::merge_trees()). #[derive(Default, Debug, Clone)] pub struct Options { inner: gix_merge::tree::Options, file_favor: Option, tree_favor: Option, } impl From for Options { fn from(opts: gix_merge::tree::Options) -> Self { Options { inner: opts, file_favor: None, tree_favor: None, } } } impl From for gix_merge::tree::Options { fn from(value: Options) -> Self { let mut opts = value.inner; if let Some(file_favor) = value.file_favor { let (resolve_binary, resolve_text) = match file_favor { FileFavor::Ours => ( builtin_driver::binary::ResolveWith::Ours, builtin_driver::text::Conflict::ResolveWithOurs, ), FileFavor::Theirs => ( builtin_driver::binary::ResolveWith::Theirs, builtin_driver::text::Conflict::ResolveWithTheirs, ), }; opts.symlink_conflicts = Some(resolve_binary); opts.blob_merge.resolve_binary_with = Some(resolve_binary); opts.blob_merge.text.conflict = resolve_text; } opts.tree_conflicts = value.tree_favor.map(Into::into); opts } } /// Identify how files should be resolved in case of conflicts. /// /// This works for… /// /// * content merges /// * binary files /// * symlinks (a form of file after all) /// /// Note that *union* merges aren't available as they aren't available for binaries or symlinks. #[derive(Debug, Copy, Clone)] pub enum FileFavor { /// Choose *our* side in case of a conflict. /// Note that this choice is precise, so *ours* hunk will only be chosen if they conflict with *theirs*, /// so *their* hunks may still show up in the merged result. Ours, /// Choose *their* side in case of a conflict. /// Note that this choice is precise, so *ours* hunk will only be chosen if they conflict with *theirs*, /// so *their* hunks may still show up in the merged result. Theirs, } /// Control how irreconcilable changes to trees should be resolved. /// /// Examples for such issues are: /// /// * *we*: delete, *they*: modify /// * *we*: rename, *they*: rename to something else /// * *we*: delete, *they*: rename /// /// Use this to control which entries are visible to in the resulting tree. /// Also note that this does not apply to the many tree-related changes are reconcilable. #[derive(Debug, Copy, Clone)] pub enum TreeFavor { /// Choose *our* side in case of a conflict. /// Note that content-merges are *still* performed according to the [FileFavor]. Ours, /// Choose the state of the shared common ancestor, dropping both *ours* and *their* changes. /// Content merges are not performed here. Ancestor, } impl From for gix_merge::tree::ResolveWith { fn from(value: TreeFavor) -> Self { match value { TreeFavor::Ours => gix_merge::tree::ResolveWith::Ours, TreeFavor::Ancestor => gix_merge::tree::ResolveWith::Ancestor, } } } /// Builder impl Options { /// If *not* `None`, rename tracking will be performed when determining the changes of each side of the merge. pub fn with_rewrites(mut self, rewrites: Option) -> Self { self.inner.rewrites = rewrites; self } /// If `Some(what-is-unresolved)`, the first unresolved conflict will cause the entire merge to stop. /// This is useful to see if there is any conflict, without performing the whole operation, something /// that can be very relevant during merges that would cause a lot of blob-diffs. pub fn with_fail_on_conflict(mut self, fail_on_conflict: Option) -> Self { self.inner.fail_on_conflict = fail_on_conflict; self } /// When `None`, the default, both sides will be treated equally, and in case of conflict an unbiased representation /// is chosen both for content and for trees, causing a conflict. /// /// With `Some(favor)` one can choose a side to prefer in order to forcefully resolve an otherwise irreconcilable conflict, /// loosing information in the process. pub fn with_file_favor(mut self, file_favor: Option) -> Self { self.file_favor = file_favor; self } /// When `None`, the default, both sides will be treated equally, trying to keep both conflicting changes in the tree, possibly /// by renaming one side to move it out of the way. /// /// With `Some(favor)` one can choose a side to prefer in order to forcefully resolve an otherwise irreconcilable conflict, /// loosing information in the process. pub fn with_tree_favor(mut self, tree_favor: Option) -> Self { self.tree_favor = tree_favor; self } } } gix-0.69.1/src/object/blob.rs000064400000000000000000000154221046102023000140350ustar 00000000000000use crate::{Blob, ObjectDetached}; /// #[cfg(feature = "blob-diff")] pub mod diff { use std::ops::Range; use gix_diff::blob::platform::prepare_diff::Operation; use crate::bstr::ByteSlice; /// A platform to keep temporary information to perform line diffs on modified blobs. /// pub struct Platform<'a> { /// The cache holding diffable data related to our blobs. pub resource_cache: &'a mut gix_diff::blob::Platform, } /// pub mod init { /// The error returned by [`object::tree::diff::Change::diff`](crate::object::tree::diff::Change::diff()). pub type Error = gix_diff::blob::platform::set_resource::Error; } /// pub mod lines { use crate::bstr::BStr; /// The error returned by [Platform::lines()](super::Platform::lines()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error where E: std::error::Error + Send + Sync + 'static, { #[error(transparent)] ProcessHunk(E), #[error(transparent)] PrepareDiff(#[from] gix_diff::blob::platform::prepare_diff::Error), } /// A change to a hunk of lines. pub enum Change<'a, 'data> { /// Lines were added. Addition { /// The lines themselves without terminator. lines: &'a [&'data BStr], }, /// Lines were removed. Deletion { /// The lines themselves without terminator. lines: &'a [&'data BStr], }, /// Lines have been replaced. Modification { /// The replaced lines without terminator. lines_before: &'a [&'data BStr], /// The new lines without terminator. lines_after: &'a [&'data BStr], }, } } impl Platform<'_> { /// Perform a diff on lines between the old and the new version of a blob, passing each hunk of lines to `process_hunk`. /// The diffing algorithm is determined by the `diff.algorithm` configuration, or individual diff drivers. /// Note that `process_hunk` is not called if one of the involved resources are binary, but that can be determined /// by introspecting the outcome. // TODO: more tests (only tested insertion right now) pub fn lines( &mut self, mut process_hunk: FnH, ) -> Result, lines::Error> where FnH: FnMut(lines::Change<'_, '_>) -> Result<(), E>, E: std::error::Error + Send + Sync + 'static, { self.resource_cache.options.skip_internal_diff_if_external_is_configured = false; let prep = self.resource_cache.prepare_diff()?; match prep.operation { Operation::InternalDiff { algorithm } => { let input = prep.interned_input(); let mut err = None; let mut lines = Vec::new(); gix_diff::blob::diff(algorithm, &input, |before: Range, after: Range| { if err.is_some() { return; } lines.clear(); lines.extend( input.before[before.start as usize..before.end as usize] .iter() .map(|&line| input.interner[line].as_bstr()), ); let end_of_before = lines.len(); lines.extend( input.after[after.start as usize..after.end as usize] .iter() .map(|&line| input.interner[line].as_bstr()), ); let hunk_before = &lines[..end_of_before]; let hunk_after = &lines[end_of_before..]; if hunk_after.is_empty() { err = process_hunk(lines::Change::Deletion { lines: hunk_before }).err(); } else if hunk_before.is_empty() { err = process_hunk(lines::Change::Addition { lines: hunk_after }).err(); } else { err = process_hunk(lines::Change::Modification { lines_before: hunk_before, lines_after: hunk_after, }) .err(); } }); if let Some(err) = err { return Err(lines::Error::ProcessHunk(err)); } } Operation::ExternalCommand { .. } => { unreachable!("we disabled that") } Operation::SourceOrDestinationIsBinary => {} }; Ok(prep) } /// Count the amount of removed and inserted lines efficiently. /// Note that nothing will happen if one of the inputs is binary, and `None` will be returned. pub fn line_counts( &mut self, ) -> Result>, gix_diff::blob::platform::prepare_diff::Error> { self.resource_cache.options.skip_internal_diff_if_external_is_configured = false; let prep = self.resource_cache.prepare_diff()?; match prep.operation { Operation::InternalDiff { algorithm } => { let tokens = prep.interned_input(); let counter = gix_diff::blob::diff(algorithm, &tokens, gix_diff::blob::sink::Counter::default()); Ok(Some(counter)) } Operation::ExternalCommand { .. } => { unreachable!("we disabled that") } Operation::SourceOrDestinationIsBinary => Ok(None), } } } } /// Remove Lifetime impl Blob<'_> { /// Create an owned instance of this object, copying our data in the process. pub fn detached(&self) -> ObjectDetached { ObjectDetached { id: self.id, kind: gix_object::Kind::Blob, data: self.data.clone(), } } /// Sever the connection to the `Repository` and turn this instance into a standalone object. pub fn detach(self) -> ObjectDetached { self.into() } /// Retrieve this instance's data, leaving its own data empty. /// /// This method works around the immovability of members of this type. pub fn take_data(&mut self) -> Vec { std::mem::take(&mut self.data) } } gix-0.69.1/src/object/commit.rs000064400000000000000000000147661046102023000144210ustar 00000000000000use crate::{bstr, bstr::BStr, Commit, ObjectDetached, Tree}; mod error { use crate::object; #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindExistingObject(#[from] object::find::existing::Error), #[error("The commit could not be decoded fully or partially")] Decode(#[from] gix_object::decode::Error), #[error("Expected object of type {}, but got {}", .expected, .actual)] ObjectKind { expected: gix_object::Kind, actual: gix_object::Kind, }, } } pub use error::Error; /// Remove Lifetime impl Commit<'_> { /// Create an owned instance of this object, copying our data in the process. pub fn detached(&self) -> ObjectDetached { ObjectDetached { id: self.id, kind: gix_object::Kind::Commit, data: self.data.clone(), } } /// Sever the connection to the `Repository` and turn this instance into a standalone object. pub fn detach(self) -> ObjectDetached { self.into() } /// Retrieve this instance's encoded data, leaving its own data empty. /// /// This method works around the immovability of members of this type. pub fn take_data(&mut self) -> Vec { std::mem::take(&mut self.data) } } impl<'repo> Commit<'repo> { /// Turn this objects id into a shortened id with a length in hex as configured by `core.abbrev`. pub fn short_id(&self) -> Result { use crate::ext::ObjectIdExt; self.id.attach(self.repo).shorten() } /// Parse the commits message into a [`MessageRef`][gix_object::commit::MessageRef] pub fn message(&self) -> Result, gix_object::decode::Error> { Ok(gix_object::commit::MessageRef::from_bytes(self.message_raw()?)) } /// Decode the commit object until the message and return it. pub fn message_raw(&self) -> Result<&'_ BStr, gix_object::decode::Error> { gix_object::CommitRefIter::from_bytes(&self.data).message() } /// Obtain the message by using intricate knowledge about the encoding, which is fastest and /// can't fail at the expense of error handling. pub fn message_raw_sloppy(&self) -> &BStr { use bstr::ByteSlice; self.data .find(b"\n\n") .map(|pos| &self.data[pos + 2..]) .unwrap_or_default() .as_bstr() } /// Decode the commit and obtain the time at which the commit was created. /// /// For the time at which it was authored, refer to `.decode()?.author.time`. pub fn time(&self) -> Result { Ok(self.committer()?.time) } /// Decode the entire commit object and return it for accessing all commit information. /// /// It will allocate only if there are more than 2 parents. /// /// Note that the returned commit object does make lookup easy and should be /// used for successive calls to string-ish information to avoid decoding the object /// more than once. pub fn decode(&self) -> Result, gix_object::decode::Error> { gix_object::CommitRef::from_bytes(&self.data) } /// Return an iterator over tokens, representing this commit piece by piece. pub fn iter(&self) -> gix_object::CommitRefIter<'_> { gix_object::CommitRefIter::from_bytes(&self.data) } /// Return the commits author, with surrounding whitespace trimmed. pub fn author(&self) -> Result, gix_object::decode::Error> { gix_object::CommitRefIter::from_bytes(&self.data) .author() .map(|s| s.trim()) } /// Return the commits committer. with surrounding whitespace trimmed. pub fn committer(&self) -> Result, gix_object::decode::Error> { gix_object::CommitRefIter::from_bytes(&self.data) .committer() .map(|s| s.trim()) } /// Decode this commits parent ids on the fly without allocating. // TODO: tests pub fn parent_ids(&self) -> impl Iterator> + '_ { use crate::ext::ObjectIdExt; let repo = self.repo; gix_object::CommitRefIter::from_bytes(&self.data) .parent_ids() .map(move |id| id.attach(repo)) } /// Parse the commit and return the tree object it points to. pub fn tree(&self) -> Result, Error> { match self.tree_id()?.object()?.try_into_tree() { Ok(tree) => Ok(tree), Err(crate::object::try_into::Error { actual, expected, .. }) => Err(Error::ObjectKind { actual, expected }), } } /// Parse the commit and return the tree id it points to. pub fn tree_id(&self) -> Result, gix_object::decode::Error> { gix_object::CommitRefIter::from_bytes(&self.data) .tree_id() .map(|id| crate::Id::from_id(id, self.repo)) } /// Return our id own id with connection to this repository. pub fn id(&self) -> crate::Id<'repo> { use crate::ext::ObjectIdExt; self.id.attach(self.repo) } /// Obtain a platform for traversing ancestors of this commit. pub fn ancestors(&self) -> crate::revision::walk::Platform<'repo> { self.id().ancestors() } /// Create a platform to further configure a `git describe` operation to find a name for this commit by looking /// at the closest annotated tags (by default) in its past. #[cfg(feature = "revision")] pub fn describe(&self) -> crate::commit::describe::Platform<'repo> { crate::commit::describe::Platform { id: self.id, repo: self.repo, select: Default::default(), first_parent: false, id_as_fallback: false, max_candidates: 10, } } /// Extracts the PGP signature and the data that was used to create the signature, or `None` if it wasn't signed. // TODO: make it possible to verify the signature, probably by wrapping `SignedData`. It's quite some work to do it properly. pub fn signature( &self, ) -> Result, gix_object::commit::SignedData<'_>)>, gix_object::decode::Error> { gix_object::CommitRefIter::signature(&self.data) } } impl std::fmt::Debug for Commit<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Commit({})", self.id) } } gix-0.69.1/src/object/errors.rs000064400000000000000000000031201046102023000144230ustar 00000000000000/// pub mod conversion { /// The error returned by [`crate::object::try_to_()`][crate::Object::try_to_commit_ref()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Decode(#[from] gix_object::decode::Error), #[error("Expected object type {}, but got {}", .expected, .actual)] UnexpectedType { expected: gix_object::Kind, actual: gix_object::Kind, }, } } /// pub mod find { /// Indicate that an error occurred when trying to find an object. #[derive(Debug, thiserror::Error)] #[error(transparent)] pub struct Error(#[from] pub gix_object::find::Error); /// pub mod existing { /// An object could not be found in the database, or an error occurred when trying to obtain it. pub type Error = gix_object::find::existing::Error; /// pub mod with_conversion { /// The error returned by [Repository::find_commit()](crate::Repository::find_commit). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Find(#[from] crate::object::find::existing::Error), #[error(transparent)] Convert(#[from] crate::object::try_into::Error), } } } } /// pub mod write { /// An error to indicate writing to the loose object store failed. #[derive(Debug, thiserror::Error)] #[error(transparent)] pub struct Error(#[from] pub gix_object::write::Error); } gix-0.69.1/src/object/impls.rs000064400000000000000000000100551046102023000142400ustar 00000000000000use crate::{object, Blob, Commit, Object, ObjectDetached, Tag, Tree}; impl<'repo> From> for ObjectDetached { fn from(mut v: Object<'repo>) -> Self { ObjectDetached { id: v.id, kind: v.kind, data: steal_from_freelist(&mut v.data), } } } impl<'repo> From> for ObjectDetached { fn from(mut v: Commit<'repo>) -> Self { ObjectDetached { id: v.id, kind: gix_object::Kind::Commit, data: steal_from_freelist(&mut v.data), } } } impl<'repo> From> for ObjectDetached { fn from(mut v: Tag<'repo>) -> Self { ObjectDetached { id: v.id, kind: gix_object::Kind::Tag, data: steal_from_freelist(&mut v.data), } } } impl<'repo> From> for ObjectDetached { fn from(mut v: Blob<'repo>) -> Self { ObjectDetached { id: v.id, kind: gix_object::Kind::Blob, data: steal_from_freelist(&mut v.data), } } } impl<'repo> From> for ObjectDetached { fn from(mut v: Tree<'repo>) -> Self { ObjectDetached { id: v.id, kind: gix_object::Kind::Tree, data: steal_from_freelist(&mut v.data), } } } impl<'repo> From> for Object<'repo> { fn from(mut v: Commit<'repo>) -> Self { Object { id: v.id, kind: gix_object::Kind::Commit, data: steal_from_freelist(&mut v.data), repo: v.repo, } } } impl AsRef<[u8]> for Object<'_> { fn as_ref(&self) -> &[u8] { &self.data } } impl AsRef<[u8]> for ObjectDetached { fn as_ref(&self) -> &[u8] { &self.data } } impl<'repo> TryFrom> for Commit<'repo> { type Error = Object<'repo>; fn try_from(mut value: Object<'repo>) -> Result { let repo = value.repo; match value.kind { object::Kind::Commit => Ok(Commit { id: value.id, repo, data: steal_from_freelist(&mut value.data), }), _ => Err(value), } } } impl<'repo> TryFrom> for Tag<'repo> { type Error = Object<'repo>; fn try_from(mut value: Object<'repo>) -> Result { let repo = value.repo; match value.kind { object::Kind::Tag => Ok(Tag { id: value.id, repo, data: steal_from_freelist(&mut value.data), }), _ => Err(value), } } } impl<'repo> TryFrom> for Tree<'repo> { type Error = Object<'repo>; fn try_from(mut value: Object<'repo>) -> Result { let repo = value.repo; match value.kind { object::Kind::Tree => Ok(Tree { id: value.id, repo, data: steal_from_freelist(&mut value.data), }), _ => Err(value), } } } impl<'repo> TryFrom> for Blob<'repo> { type Error = Object<'repo>; fn try_from(mut value: Object<'repo>) -> Result { let repo = value.repo; match value.kind { object::Kind::Blob => Ok(Blob { id: value.id, repo, data: steal_from_freelist(&mut value.data), }), _ => Err(value), } } } impl std::fmt::Debug for Object<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { use gix_object::Kind::*; let type_name = match self.kind { Blob => "Blob", Commit => "Commit", Tree => "Tree", Tag => "Tag", }; write!(f, "{}({})", type_name, self.id) } } /// In conjunction with the handles free list, leaving an empty Vec in place of the original causes it to not be /// returned to the free list. fn steal_from_freelist(data: &mut Vec) -> Vec { std::mem::take(data) } gix-0.69.1/src/object/mod.rs000064400000000000000000000170361046102023000137010ustar 00000000000000//! #![allow(clippy::empty_docs)] use gix_hash::ObjectId; pub use gix_object::Kind; use crate::{Blob, Commit, Id, Object, ObjectDetached, Tag, Tree}; mod errors; pub(crate) mod cache { pub use gix_pack::cache::object::MemoryCappedHashmap; } pub use errors::{conversion, find, write}; /// pub mod blob; /// pub mod commit; mod impls; pub mod peel; mod tag; /// pub mod tree; /// pub mod try_into { #[derive(thiserror::Error, Debug)] #[allow(missing_docs)] #[error("Object named {id} was supposed to be of kind {expected}, but was kind {actual}.")] pub struct Error { pub actual: gix_object::Kind, pub expected: gix_object::Kind, pub id: gix_hash::ObjectId, } } impl ObjectDetached { /// Infuse this owned object with `repo` access. pub fn attach(self, repo: &crate::Repository) -> Object<'_> { Object { id: self.id, kind: self.kind, data: self.data, repo, } } } impl std::fmt::Debug for ObjectDetached { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { use gix_object::Kind::*; let type_name = match self.kind { Blob => "Blob", Commit => "Commit", Tree => "Tree", Tag => "Tag", }; write!(f, "{}({})", type_name, self.id) } } /// Consuming conversions to attached object kinds. impl<'repo> Object<'repo> { pub(crate) fn from_data( id: impl Into, kind: Kind, data: Vec, repo: &'repo crate::Repository, ) -> Self { Object { id: id.into(), kind, data, repo, } } /// Transform this object into a blob, or panic if it is none. pub fn into_blob(self) -> Blob<'repo> { match self.try_into() { Ok(blob) => blob, Err(this) => panic!("Tried to use {} as blob, but was {}", this.id, this.kind), } } /// Transform this object into a tree, or panic if it is none. pub fn into_tree(self) -> Tree<'repo> { match self.try_into() { Ok(tree) => tree, Err(this) => panic!("Tried to use {} as tree, but was {}", this.id, this.kind), } } /// Transform this object into a commit, or panic if it is none. pub fn into_commit(self) -> Commit<'repo> { match self.try_into() { Ok(commit) => commit, Err(this) => panic!("Tried to use {} as commit, but was {}", this.id, this.kind), } } /// Transform this object into a tag, or panic if it is none. pub fn into_tag(self) -> Tag<'repo> { match self.try_into() { Ok(tag) => tag, Err(this) => panic!("Tried to use {} as tag, but was {}", this.id, this.kind), } } /// Transform this object into a commit, or return it as part of the `Err` if it is no commit. pub fn try_into_commit(self) -> Result, try_into::Error> { self.try_into().map_err(|this: Self| try_into::Error { id: this.id, actual: this.kind, expected: gix_object::Kind::Commit, }) } /// Transform this object into a tag, or return it as part of the `Err` if it is no commit. pub fn try_into_tag(self) -> Result, try_into::Error> { self.try_into().map_err(|this: Self| try_into::Error { id: this.id, actual: this.kind, expected: gix_object::Kind::Commit, }) } /// Transform this object into a tree, or return it as part of the `Err` if it is no tree. pub fn try_into_tree(self) -> Result, try_into::Error> { self.try_into().map_err(|this: Self| try_into::Error { id: this.id, actual: this.kind, expected: gix_object::Kind::Tree, }) } /// Transform this object into a blob, or return it as part of the `Err` if it is no blob. pub fn try_into_blob(self) -> Result, try_into::Error> { self.try_into().map_err(|this: Self| try_into::Error { id: this.id, actual: this.kind, expected: gix_object::Kind::Blob, }) } } impl Object<'_> { /// Create an owned instance of this object, copying our data in the process. pub fn detached(&self) -> ObjectDetached { ObjectDetached { id: self.id, kind: self.kind, data: self.data.clone(), } } /// Sever the connection to the `Repository` and turn this instance into a standalone object. pub fn detach(self) -> ObjectDetached { self.into() } } /// Conversions to detached, lower-level object types. impl<'repo> Object<'repo> { /// Obtain a fully parsed commit whose fields reference our data buffer, /// /// # Panic /// /// - this object is not a commit /// - the commit could not be decoded pub fn to_commit_ref(&self) -> gix_object::CommitRef<'_> { self.try_to_commit_ref().expect("BUG: need a commit") } /// Obtain a fully parsed commit whose fields reference our data buffer. pub fn try_to_commit_ref(&self) -> Result, conversion::Error> { gix_object::Data::new(self.kind, &self.data) .decode()? .into_commit() .ok_or(conversion::Error::UnexpectedType { expected: gix_object::Kind::Commit, actual: self.kind, }) } /// Obtain an iterator over commit tokens like in [`to_commit_iter()`][Object::try_to_commit_ref_iter()]. /// /// # Panic /// /// - this object is not a commit pub fn to_commit_ref_iter(&self) -> gix_object::CommitRefIter<'_> { gix_object::Data::new(self.kind, &self.data) .try_into_commit_iter() .expect("BUG: This object must be a commit") } /// Obtain a commit token iterator from the data in this instance, if it is a commit. pub fn try_to_commit_ref_iter(&self) -> Option> { gix_object::Data::new(self.kind, &self.data).try_into_commit_iter() } /// Obtain a tag token iterator from the data in this instance. /// /// # Panic /// /// - this object is not a tag pub fn to_tag_ref_iter(&self) -> gix_object::TagRefIter<'_> { gix_object::Data::new(self.kind, &self.data) .try_into_tag_iter() .expect("BUG: this object must be a tag") } /// Obtain a tag token iterator from the data in this instance. /// /// # Panic /// /// - this object is not a tag pub fn try_to_tag_ref_iter(&self) -> Option> { gix_object::Data::new(self.kind, &self.data).try_into_tag_iter() } /// Obtain a tag object from the data in this instance. /// /// # Panic /// /// - this object is not a tag /// - the tag could not be decoded pub fn to_tag_ref(&self) -> gix_object::TagRef<'_> { self.try_to_tag_ref().expect("BUG: need tag") } /// Obtain a fully parsed tag object whose fields reference our data buffer. pub fn try_to_tag_ref(&self) -> Result, conversion::Error> { gix_object::Data::new(self.kind, &self.data) .decode()? .into_tag() .ok_or(conversion::Error::UnexpectedType { expected: gix_object::Kind::Tag, actual: self.kind, }) } /// Return the attached id of this object. pub fn id(&self) -> Id<'repo> { Id::from_id(self.id, self.repo) } } gix-0.69.1/src/object/peel.rs000064400000000000000000000072321046102023000140440ustar 00000000000000//! #![allow(clippy::empty_docs)] use crate::{ object, object::{peel, Kind}, Commit, Object, Tree, }; /// pub mod to_kind { mod error { use crate::object; /// The error returned by [`Object::peel_to_kind()`][crate::Object::peel_to_kind()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindExistingObject(#[from] object::find::existing::Error), #[error("Last encountered object {oid} was {actual} while trying to peel to {expected}")] NotFound { oid: gix_hash::Prefix, actual: object::Kind, expected: object::Kind, }, } } pub use error::Error; } impl<'repo> Object<'repo> { // TODO: tests /// Follow tags to their target and commits to trees until the given `kind` of object is encountered. /// /// Note that this object doesn't necessarily have to be the end of the chain. /// Typical values are [`Kind::Commit`] or [`Kind::Tree`]. pub fn peel_to_kind(mut self, kind: Kind) -> Result { loop { match self.kind { our_kind if kind == our_kind => { return Ok(self); } Kind::Commit => { let tree_id = self .try_to_commit_ref_iter() .expect("commit") .tree_id() .expect("valid commit"); let repo = self.repo; drop(self); self = repo.find_object(tree_id)?; } Kind::Tag => { let target_id = self.to_tag_ref_iter().target_id().expect("valid tag"); let repo = self.repo; drop(self); self = repo.find_object(target_id)?; } Kind::Tree | Kind::Blob => { return Err(peel::to_kind::Error::NotFound { oid: self.id().shorten().unwrap_or_else(|_| self.id.into()), actual: self.kind, expected: kind, }) } } } } /// Peel this object into a tree and return it, if this is possible. /// /// This will follow tag objects and commits until their tree is reached. pub fn peel_to_tree(self) -> Result, peel::to_kind::Error> { Ok(self.peel_to_kind(gix_object::Kind::Tree)?.into_tree()) } /// Peel this object into a commit and return it, if this is possible. /// /// This will follow tag objects until a commit is reached. pub fn peel_to_commit(self) -> Result, peel::to_kind::Error> { Ok(self.peel_to_kind(gix_object::Kind::Commit)?.into_commit()) } // TODO: tests /// Follow all tag object targets until a commit, tree or blob is reached. /// /// Note that this method is different from [`peel_to_kind(…)`][Object::peel_to_kind()] as it won't /// peel commits to their tree, but handles tags only. pub fn peel_tags_to_end(mut self) -> Result { loop { match self.kind { Kind::Commit | Kind::Tree | Kind::Blob => break Ok(self), Kind::Tag => { let target_id = self.to_tag_ref_iter().target_id().expect("valid tag"); let repo = self.repo; drop(self); self = repo.find_object(target_id)?; } } } } } gix-0.69.1/src/object/tag.rs000064400000000000000000000034251046102023000136720ustar 00000000000000use crate::{ext::ObjectIdExt, ObjectDetached, Tag}; impl<'repo> Tag<'repo> { /// Decode the entire tag object and return it for accessing all tag information. /// /// This never allocates. /// /// Note that the returned commit object does make lookup easy and should be /// used for successive calls to string-ish information to avoid decoding the object /// more than once. pub fn decode(&self) -> Result, gix_object::decode::Error> { gix_object::TagRef::from_bytes(&self.data) } /// Decode this tag partially and return the id of its target. pub fn target_id(&self) -> Result, gix_object::decode::Error> { gix_object::TagRefIter::from_bytes(&self.data) .target_id() .map(|id| id.attach(self.repo)) } /// Decode this tag partially and return the tagger, if the field exists. pub fn tagger(&self) -> Result>, gix_object::decode::Error> { gix_object::TagRefIter::from_bytes(&self.data).tagger() } } /// Remove Lifetime impl Tag<'_> { /// Create an owned instance of this object, copying our data in the process. pub fn detached(&self) -> ObjectDetached { ObjectDetached { id: self.id, kind: gix_object::Kind::Tag, data: self.data.clone(), } } /// Sever the connection to the `Repository` and turn this instance into a standalone object. pub fn detach(self) -> ObjectDetached { self.into() } /// Retrieve this instance's encoded data, leaving its own data empty. /// /// This method works around the immovability of members of this type. pub fn take_data(&mut self) -> Vec { std::mem::take(&mut self.data) } } gix-0.69.1/src/object/tree/diff/change.rs000064400000000000000000000244471046102023000162220ustar 00000000000000use super::ChangeDetached; use crate::bstr::{BStr, ByteSlice}; use crate::ext::ObjectIdExt; use crate::object::tree::diff::Change; use crate::Repository; impl Change<'_, '_, '_> { /// Produce a platform for performing a line-diff no matter whether the underlying [Change] is an addition, modification, /// deletion or rewrite. /// Use `resource_cache` to store the diffable data and possibly reuse previously stored data, usually obtained with /// [Repository::diff_resource_cache()]. /// Afterward the platform, which holds on to `resource_cache`, can be used to perform ready-made operations on the /// pre-set resources. /// /// ### Warning about Memory Consumption /// /// `resource_cache` only grows, so one should call [`gix_diff::blob::Platform::clear_resource_cache`] occasionally. pub fn diff<'b>( &self, resource_cache: &'b mut gix_diff::blob::Platform, ) -> Result, crate::object::blob::diff::init::Error> { resource_cache.set_resource_by_change((*self).into(), &self.id().repo.objects)?; Ok(crate::object::blob::diff::Platform { resource_cache }) } } impl<'a> From> for gix_diff::tree_with_rewrites::ChangeRef<'a> { fn from(value: Change<'a, '_, '_>) -> Self { use gix_diff::tree_with_rewrites::ChangeRef; match value { Change::Addition { location, entry_mode, relation, id, } => ChangeRef::Addition { location, entry_mode, relation, id: id.detach(), }, Change::Deletion { location, entry_mode, relation, id, } => ChangeRef::Deletion { location, entry_mode, relation, id: id.detach(), }, Change::Modification { location, previous_entry_mode, previous_id, entry_mode, id, } => ChangeRef::Modification { location, previous_entry_mode, previous_id: previous_id.detach(), entry_mode, id: id.detach(), }, Change::Rewrite { source_location, source_relation, source_entry_mode, source_id, diff, entry_mode, location, id, relation, copy, } => ChangeRef::Rewrite { source_location, source_entry_mode, source_relation, source_id: source_id.detach(), diff, entry_mode, id: id.detach(), location, relation, copy, }, } } } impl<'a, 'old, 'new> Change<'a, 'old, 'new> { /// Convert `change` into this instance type, attaching the `old_repo` and `new_repo` to each side respectively. /// Note that both repos are typically the same. pub fn from_change_ref( change: gix_diff::tree_with_rewrites::ChangeRef<'a>, old_repo: &'old Repository, new_repo: &'new Repository, ) -> Self { use gix_diff::tree_with_rewrites::ChangeRef; match change { ChangeRef::Addition { location, entry_mode, relation, id, } => Change::Addition { location, entry_mode, relation, id: id.attach(new_repo), }, ChangeRef::Deletion { location, entry_mode, relation, id, } => Change::Deletion { location, entry_mode, relation, id: id.attach(old_repo), }, ChangeRef::Modification { location, previous_entry_mode, previous_id, entry_mode, id, } => Change::Modification { location, previous_entry_mode, entry_mode, previous_id: previous_id.attach(old_repo), id: id.attach(new_repo), }, ChangeRef::Rewrite { source_location, source_entry_mode, source_relation, source_id, diff, entry_mode, id, location, relation, copy, } => Change::Rewrite { source_location, source_relation, source_entry_mode, source_id: source_id.attach(old_repo), diff, entry_mode, location, id: id.attach(new_repo), relation, copy, }, } } } /// Lifecycle impl Change<'_, '_, '_> { /// Detach the repository instance to obtain a fully-owned version pub fn detach(self) -> ChangeDetached { match self { Change::Addition { entry_mode, id, location, relation, } => ChangeDetached::Addition { entry_mode, id: id.detach(), location: location.to_owned(), relation, }, Change::Deletion { entry_mode, id, location, relation, } => ChangeDetached::Deletion { entry_mode, id: id.detach(), location: location.to_owned(), relation, }, Change::Modification { previous_entry_mode, previous_id, entry_mode, id, location, } => ChangeDetached::Modification { previous_entry_mode, previous_id: previous_id.detach(), entry_mode, id: id.detach(), location: location.to_owned(), }, Change::Rewrite { source_location, source_relation, source_entry_mode, source_id, diff, entry_mode, id, relation, copy, location, } => ChangeDetached::Rewrite { source_location: source_location.to_owned(), source_entry_mode, source_relation, source_id: source_id.detach(), diff, entry_mode, id: id.detach(), copy, location: location.to_owned(), relation, }, } } } impl crate::ext::TreeDiffChangeExt for gix_diff::tree_with_rewrites::Change { fn attach<'old, 'new>(&self, old_repo: &'old Repository, new_repo: &'new Repository) -> Change<'_, 'old, 'new> { match self { ChangeDetached::Addition { entry_mode, id, location, relation, } => Change::Addition { entry_mode: *entry_mode, id: id.attach(new_repo), location: location.as_bstr(), relation: *relation, }, ChangeDetached::Deletion { entry_mode, id, location, relation, } => Change::Deletion { entry_mode: *entry_mode, id: id.attach(old_repo), location: location.as_bstr(), relation: *relation, }, ChangeDetached::Modification { previous_entry_mode, previous_id, entry_mode, id, location, } => Change::Modification { previous_entry_mode: *previous_entry_mode, previous_id: previous_id.attach(old_repo), entry_mode: *entry_mode, id: id.attach(new_repo), location: location.as_bstr(), }, ChangeDetached::Rewrite { source_location, source_relation, source_entry_mode, source_id, diff, entry_mode, id, copy, location, relation, } => Change::Rewrite { source_location: source_location.as_ref(), source_relation: *source_relation, source_entry_mode: *source_entry_mode, source_id: source_id.attach(old_repo), diff: *diff, entry_mode: *entry_mode, id: id.attach(new_repo), copy: *copy, relation: *relation, location: location.as_bstr(), }, } } } impl Change<'_, '_, '_> { /// Return the current ID of the change. pub fn id(&self) -> crate::Id<'_> { match self { Change::Addition { id, .. } | Change::Deletion { id, .. } | Change::Modification { id, .. } | Change::Rewrite { id, .. } => *id, } } /// Return the location of this instance. pub fn location(&self) -> &BStr { match self { Change::Addition { location, .. } | Change::Deletion { location, .. } | Change::Modification { location, .. } | Change::Rewrite { location, .. } => location.as_bstr(), } } /// Return the current mode of this instance. pub fn entry_mode(&self) -> gix_object::tree::EntryMode { match self { Change::Addition { entry_mode, .. } | Change::Deletion { entry_mode, .. } | Change::Modification { entry_mode, .. } | Change::Rewrite { entry_mode, .. } => *entry_mode, } } } gix-0.69.1/src/object/tree/diff/for_each.rs000064400000000000000000000076301046102023000165360ustar 00000000000000use gix_object::TreeRefIter; use super::{Action, Change, Platform}; use crate::{diff::rewrites::tracker, Tree}; /// The error return by methods on the [diff platform][Platform]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Diff(#[from] gix_diff::tree_with_rewrites::Error), #[error("The user-provided callback failed")] ForEach(#[source] Box), #[error(transparent)] ResourceCache(#[from] crate::repository::diff_resource_cache::Error), #[error("Failure during rename tracking")] RenameTracking(#[from] tracker::emit::Error), } /// Add the item to compare to. impl<'old> Platform<'_, 'old> { /// Call `for_each` repeatedly with all changes that are needed to convert the source of the diff to the tree to `other`. /// /// `other` could also be created with the [`empty_tree()`][crate::Repository::empty_tree()] method to handle the first commit /// in a repository - it doesn't have a parent, equivalent to compare 'nothing' to something. pub fn for_each_to_obtain_tree<'new, E>( &mut self, other: &Tree<'new>, for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result, ) -> Result, Error> where E: Into>, { self.for_each_to_obtain_tree_inner(other, for_each, None) } /// Like [`Self::for_each_to_obtain_tree()`], but with a reusable `resource_cache` which is used to perform /// diffs fast. /// /// Reusing it between multiple invocations saves a lot of IOps as it avoids the creation /// of a temporary `resource_cache` that triggers reading or checking for multiple gitattribute files. /// Note that it's recommended to call [`gix_diff::blob::Platform::clear_resource_cache()`] between the calls /// to avoid runaway memory usage, as the cache isn't limited. /// /// Note that to do rename tracking like `git` does, one has to configure the `resource_cache` with /// a conversion pipeline that uses [`gix_diff::blob::pipeline::Mode::ToGit`]. pub fn for_each_to_obtain_tree_with_cache<'new, E>( &mut self, other: &Tree<'new>, resource_cache: &mut gix_diff::blob::Platform, for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result, ) -> Result, Error> where E: Into>, { self.for_each_to_obtain_tree_inner(other, for_each, Some(resource_cache)) } fn for_each_to_obtain_tree_inner<'new, E>( &mut self, other: &Tree<'new>, mut for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result, resource_cache: Option<&mut gix_diff::blob::Platform>, ) -> Result, Error> where E: Into>, { let repo = self.lhs.repo; let mut storage; let cache = match resource_cache { None => { storage = repo.diff_resource_cache(gix_diff::blob::pipeline::Mode::ToGit, Default::default())?; &mut storage } Some(cache) => cache, }; let opts = self.options.into(); Ok(gix_diff::tree_with_rewrites( TreeRefIter::from_bytes(&self.lhs.data), TreeRefIter::from_bytes(&other.data), cache, &mut self.state, &repo.objects, |change| { for_each(Change::from_change_ref(change, repo, other.repo)).map(|action| match action { Action::Continue => gix_diff::tree_with_rewrites::Action::Continue, Action::Cancel => gix_diff::tree_with_rewrites::Action::Cancel, }) }, opts, )?) } } gix-0.69.1/src/object/tree/diff/mod.rs000064400000000000000000000237521046102023000155520ustar 00000000000000use gix_diff::tree; use crate::{bstr::BStr, Id, Tree}; /// Returned by the `for_each` function to control flow. #[derive(Default, Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)] pub enum Action { /// Continue the traversal of changes. #[default] Continue, /// Stop the traversal of changes and stop calling this function. Cancel, } pub use gix_diff::tree_with_rewrites::Change as ChangeDetached; /// Represents any possible change in order to turn one tree into another. #[derive(Debug, Clone, Copy)] pub enum Change<'a, 'old, 'new> { /// An entry was added, like the addition of a file or directory. Addition { /// The location of the file or directory, if [tracking](crate::diff::Options::track_path) was enabled. /// /// It may be empty if neither [file names](crate::diff::Options::track_filename()) nor [file paths](crate::diff::Options::track_path()) /// are tracked. location: &'a BStr, /// The mode of the added entry. entry_mode: gix_object::tree::EntryMode, /// Identifies a relationship between this instance and another one, /// making it easy to reconstruct the top-level of directory changes. relation: Option, /// The object id of the added entry. id: Id<'new>, }, /// An entry was deleted, like the deletion of a file or directory. Deletion { /// The location of the file or directory, if [tracking](crate::diff::Options::track_path) was enabled. /// /// Otherwise, this value is always an empty path. location: &'a BStr, /// The mode of the deleted entry. entry_mode: gix_object::tree::EntryMode, /// Identifies a relationship between this instance and another one, /// making it easy to reconstruct the top-level of directory changes. relation: Option, /// The object id of the deleted entry. id: Id<'old>, }, /// An entry was modified, e.g. changing the contents of a file adjusts its object id and turning /// a file into a symbolic link adjusts its mode. Modification { /// The location of the file or directory, if [tracking](crate::diff::Options::track_path) was enabled. /// /// It may be empty if neither [file names](crate::diff::Options::track_filename()) nor [file paths](crate::diff::Options::track_path()) /// are tracked. location: &'a BStr, /// The mode of the entry before the modification. previous_entry_mode: gix_object::tree::EntryMode, /// The object id of the entry before the modification. previous_id: Id<'old>, /// The mode of the entry after the modification. entry_mode: gix_object::tree::EntryMode, /// The object id after the modification. id: Id<'new>, }, /// Entries are considered rewritten if they are not trees and they, according to some understanding of identity, were renamed /// or copied. /// In case of renames, this means they originally appeared as [`Deletion`](Change::Deletion) signalling their source as well as an /// [`Addition`](Change::Addition) acting as destination. /// /// In case of copies, the `copy` flag is true and typically represents a perfect copy of a source was made. /// /// This variant can only be encountered if [rewrite tracking](crate::diff::Options::track_rewrites()) is enabled. /// /// Note that mode changes may have occurred as well, i.e. changes from executable to non-executable or vice-versa. Rewrite { /// The location of the source of the rename operation. /// /// It may be empty if neither [file names](crate::diff::Options::track_filename()) nor [file paths](crate::diff::Options::track_path()) /// are tracked. source_location: &'a BStr, /// Identifies a relationship between the source and another source, /// making it easy to reconstruct the top-level of directory changes. source_relation: Option, /// The mode of the entry before the rename. source_entry_mode: gix_object::tree::EntryMode, /// The object id of the entry before the rename. /// /// Note that this is the same as `id` if we require the [similarity to be 100%](gix_diff::Rewrites::percentage), but may /// be different otherwise. source_id: Id<'old>, /// Information about the diff we performed to detect similarity and match the `source_id` with the current state at `id`. /// It's `None` if `source_id` is equal to `id`, as identity made an actual diff computation unnecessary. diff: Option, /// The mode of the entry after the rename. /// It could differ but still be considered a rename as we are concerned only about content. entry_mode: gix_object::tree::EntryMode, /// The location of the destination file or directory, if [tracking](crate::diff::Options::track_path) was enabled. /// /// It may be empty if neither [file names](crate::diff::Options::track_filename()) nor [file paths](crate::diff::Options::track_path()) /// are tracked. location: &'a BStr, /// The object id after the rename. id: Id<'new>, /// Identifies a relationship between this destination and another destination, /// making it easy to reconstruct the top-level of directory changes. relation: Option, /// If true, this rewrite is created by copy, and `source_id` is pointing to its source. Otherwise, it's a rename, and `source_id` /// points to a deleted object, as renames are tracked as deletions and additions of the same or similar content. copy: bool, }, } /// pub mod change; /// Diffing impl<'repo> Tree<'repo> { /// Return a platform to see the changes needed to create other trees, for instance. /// /// # Performance /// /// It's highly recommended to [set an object cache](crate::Repository::compute_object_cache_size_for_tree_diffs) /// to avoid extracting the same object multiple times. /// By default, similar to `git diff`, rename tracking will be enabled if it is not configured. /// /// Note that if a clone with `--filter=blob=none` was created, rename tracking may fail as it might /// try to access blobs to compute a similarity metric. Thus, it's more compatible to turn rewrite tracking off /// using [`Options::track_rewrites()`](crate::diff::Options::track_rewrites()). #[allow(clippy::result_large_err)] #[doc(alias = "diff_tree_to_tree", alias = "git2")] pub fn changes<'a>(&'a self) -> Result, crate::diff::options::init::Error> { Ok(Platform { state: Default::default(), lhs: self, options: crate::diff::Options::from_configuration(&self.repo.config)?, }) } } /// The diffing platform returned by [`Tree::changes()`]. #[derive(Clone)] pub struct Platform<'a, 'repo> { state: gix_diff::tree::State, lhs: &'a Tree<'repo>, options: crate::diff::Options, } impl Platform<'_, '_> { /// Adjust diff options with `change_opts`. pub fn options(&mut self, change_opts: impl FnOnce(&mut crate::diff::Options)) -> &mut Self { change_opts(&mut self.options); self } } /// Provide aggregated information of a diff between two trees. #[derive(Default, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] #[doc(alias = "DiffStats", alias = "git2")] pub struct Stats { /// The total amount of lines added in the between blobs of the two trees. #[doc(alias = "insertions", alias = "git2")] pub lines_added: u64, /// The total amount of lines removed in the between blobs of the two trees. #[doc(alias = "deletions", alias = "git2")] pub lines_removed: u64, /// The number of files that contributed to these statistics as they were added, removed or modified. pub files_changed: u64, } /// pub mod stats { /// The error returned by [`stats()`](super::Platform::stats()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] CreateResourceCache(#[from] crate::repository::diff_resource_cache::Error), #[error(transparent)] ForEachChange(#[from] crate::object::tree::diff::for_each::Error), } } /// Convenience impl Platform<'_, '_> { /// Calculate statistics about the lines of the diff between our current and the `other` tree. /// /// ### Performance Notes /// /// Be sure to forcefully disable [`track_rewrites(None)`](crate::diff::Options::track_rewrites) to avoid /// rename tracking, an operation that doesn't affect the statistics currently. /// As diffed resources aren't cached, if highly repetitive blobs are expected, performance /// may be diminished. In real-world scenarios where blobs are mostly unique, that's not an issue though. pub fn stats(&mut self, other: &Tree<'_>) -> Result { // let (mut number_of_files, mut lines_added, mut lines_removed) = (0, 0, 0); let mut resource_cache = self.lhs.repo.diff_resource_cache_for_tree_diff()?; let (mut files_changed, mut lines_added, mut lines_removed) = (0, 0, 0); self.for_each_to_obtain_tree(other, |change| { if let Some(counts) = change .diff(&mut resource_cache) .ok() .and_then(|mut platform| platform.line_counts().ok()) .flatten() { files_changed += 1; lines_added += u64::from(counts.insertions); lines_removed += u64::from(counts.removals); } resource_cache.clear_resource_cache_keep_allocation(); Ok::<_, std::convert::Infallible>(Action::Continue) })?; Ok(Stats { files_changed, lines_added, lines_removed, }) } } /// pub mod for_each; gix-0.69.1/src/object/tree/editor.rs000064400000000000000000000245661046102023000153550ustar 00000000000000use crate::bstr::{BStr, BString}; use crate::prelude::ObjectIdExt; use crate::{Id, Repository}; use gix_hash::ObjectId; use gix_object::tree::EntryKind; /// pub mod init { /// The error returned by [`Editor::new()](crate::object::tree::Editor::new()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] DecodeTree(#[from] gix_object::decode::Error), #[error(transparent)] ValidationOptions(#[from] crate::config::boolean::Error), } } /// pub mod write { use crate::bstr::BString; /// The error returned by [`Editor::write()](crate::object::tree::Editor::write()) and [`Cursor::write()](super::Cursor::write). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] WriteTree(#[from] crate::object::write::Error), #[error("The object {} ({}) at '{}' could not be found", id, kind.as_octal_str(), filename)] MissingObject { filename: BString, kind: gix_object::tree::EntryKind, id: gix_hash::ObjectId, }, #[error("The object {} ({}) has an invalid filename: '{}'", id, kind.as_octal_str(), filename)] InvalidFilename { filename: BString, kind: gix_object::tree::EntryKind, id: gix_hash::ObjectId, source: gix_validate::path::component::Error, }, } } /// A cursor at a specific portion of a tree to [edit](super::Editor). pub struct Cursor<'a, 'repo> { inner: gix_object::tree::editor::Cursor<'a, 'repo>, validate: gix_validate::path::component::Options, repo: &'repo Repository, } /// Lifecycle impl<'repo> super::Editor<'repo> { /// Initialize a new editor from the given `tree`. pub fn new(tree: &crate::Tree<'repo>) -> Result { let tree_ref = tree.decode()?; let repo = tree.repo; let validate = repo.config.protect_options()?; Ok(super::Editor { inner: gix_object::tree::Editor::new(tree_ref.into(), &repo.objects, repo.object_hash()), validate, repo, }) } /// Detach all extras and return the underlying plumbing editor, which won't perform validation /// when writing the tree. pub fn detach(self) -> gix_object::tree::Editor<'repo> { self.inner } } /// Tree editing #[cfg(feature = "tree-editor")] impl<'repo> crate::Tree<'repo> { /// Start editing a new tree based on this one. #[doc(alias = "treebuilder", alias = "git2")] pub fn edit(&self) -> Result, init::Error> { super::Editor::new(self) } } /// Obtain an iterator over `BStr`-components. /// /// Note that the implementation is simple, and it's mainly meant for statically known strings /// or locations obtained during a merge. pub trait ToComponents { /// Return an iterator over the components of a path, without the separator. fn to_components(&self) -> impl Iterator; } impl ToComponents for &str { fn to_components(&self) -> impl Iterator { self.split('/').map(Into::into) } } impl ToComponents for String { fn to_components(&self) -> impl Iterator { self.split('/').map(Into::into) } } impl ToComponents for &String { fn to_components(&self) -> impl Iterator { self.split('/').map(Into::into) } } impl ToComponents for BString { fn to_components(&self) -> impl Iterator { self.split(|b| *b == b'/').map(Into::into) } } impl ToComponents for &BString { fn to_components(&self) -> impl Iterator { self.split(|b| *b == b'/').map(Into::into) } } impl ToComponents for &BStr { fn to_components(&self) -> impl Iterator { self.split(|b| *b == b'/').map(Into::into) } } /// Cursor Handling impl<'repo> super::Editor<'repo> { /// Turn ourselves as a cursor, which points to the same tree as the editor. /// /// This is useful if a method takes a [`Cursor`], not an [`Editor`](super::Editor). pub fn to_cursor(&mut self) -> Cursor<'_, 'repo> { Cursor { inner: self.inner.to_cursor(), validate: self.validate, repo: self.repo, } } /// Create a cursor at the given `rela_path`, which must be a tree or is turned into a tree as its own edit. /// /// The returned cursor will then allow applying edits to the tree at `rela_path` as root. /// If `rela_path` is a single empty string, it is equivalent to using the current instance itself. pub fn cursor_at( &mut self, rela_path: impl ToComponents, ) -> Result, gix_object::tree::editor::Error> { Ok(Cursor { inner: self.inner.cursor_at(rela_path.to_components())?, validate: self.validate, repo: self.repo, }) } } /// Operations impl<'repo> Cursor<'_, 'repo> { /// Like [`Editor::upsert()`](super::Editor::upsert()), but with the constraint of only editing in this cursor's tree. pub fn upsert( &mut self, rela_path: impl ToComponents, kind: EntryKind, id: impl Into, ) -> Result<&mut Self, gix_object::tree::editor::Error> { self.inner.upsert(rela_path.to_components(), kind, id.into())?; Ok(self) } /// Like [`Editor::remove()`](super::Editor::remove), but with the constraint of only editing in this cursor's tree. pub fn remove(&mut self, rela_path: impl ToComponents) -> Result<&mut Self, gix_object::tree::editor::Error> { self.inner.remove(rela_path.to_components())?; Ok(self) } /// Like [`Editor::write()`](super::Editor::write()), but will write only the subtree of the cursor. pub fn write(&mut self) -> Result, write::Error> { write_cursor(self) } } /// Operations impl<'repo> super::Editor<'repo> { /// Set the root tree of the modification to `root`, assuring it has a well-known state. /// /// Note that this erases all previous edits. /// /// This is useful if the same editor is re-used for various trees. pub fn set_root(&mut self, root: &crate::Tree<'repo>) -> Result<&mut Self, init::Error> { let new_editor = super::Editor::new(root)?; self.inner = new_editor.inner; self.repo = new_editor.repo; Ok(self) } /// Insert a new entry of `kind` with `id` at `rela_path`, an iterator over each path component in the tree, /// like `a/b/c`. Names are matched case-sensitively. /// /// Existing leaf-entries will be overwritten unconditionally, and it is assumed that `id` is available in the object database /// or will be made available at a later point to assure the integrity of the produced tree. /// /// Intermediate trees will be created if they don't exist in the object database, otherwise they will be loaded and entries /// will be inserted into them instead. /// /// Note that `id` can be [null](ObjectId::null()) to create a placeholder. These will not be written, and paths leading /// through them will not be considered a problem. /// /// `id` can also be an empty tree, along with [the respective `kind`](EntryKind::Tree), even though that's normally not allowed /// in Git trees. /// /// Validation of path-components will not be performed here, but when [writing the tree](Self::write()). pub fn upsert( &mut self, rela_path: impl ToComponents, kind: EntryKind, id: impl Into, ) -> Result<&mut Self, gix_object::tree::editor::Error> { self.inner.upsert(rela_path.to_components(), kind, id.into())?; Ok(self) } /// Remove the entry at `rela_path`, loading all trees on the path accordingly. /// It's no error if the entry doesn't exist, or if `rela_path` doesn't lead to an existing entry at all. pub fn remove(&mut self, rela_path: impl ToComponents) -> Result<&mut Self, gix_object::tree::editor::Error> { self.inner.remove(rela_path.to_components())?; Ok(self) } /// Write the entire in-memory state of all changed trees (and only changed trees) to the object database. /// Note that the returned object id *can* be the empty tree if everything was removed or if nothing /// was added to the tree. /// /// The last call to `out` will be the changed root tree, whose object-id will also be returned. /// `out` is free to do any kind of additional validation, like to assure that all entries in the tree exist. /// We don't assure that as there is no validation that inserted entries are valid object ids. /// /// Future calls to [`upsert`](Self::upsert) or similar will keep working on the last seen state of the /// just-written root-tree. /// If this is not desired, use [set_root()](Self::set_root()). /// /// Before writing a tree, all of its entries (not only added ones), will be validated to assure they are /// correct. The objects pointed to by entries also have to exist already. pub fn write(&mut self) -> Result, write::Error> { write_cursor(&mut self.to_cursor()) } } fn write_cursor<'repo>(cursor: &mut Cursor<'_, 'repo>) -> Result, write::Error> { cursor .inner .write(|tree| -> Result { for entry in &tree.entries { gix_validate::path::component( entry.filename.as_ref(), entry .mode .is_link() .then_some(gix_validate::path::component::Mode::Symlink), cursor.validate, ) .map_err(|err| write::Error::InvalidFilename { filename: entry.filename.clone(), kind: entry.mode.into(), id: entry.oid, source: err, })?; if !entry.mode.is_commit() && !cursor.repo.has_object(entry.oid) { return Err(write::Error::MissingObject { filename: entry.filename.clone(), kind: entry.mode.into(), id: entry.oid, }); } } Ok(cursor.repo.write_object(tree)?.detach()) }) .map(|id| id.attach(cursor.repo)) } gix-0.69.1/src/object/tree/iter.rs000064400000000000000000000041521046102023000150170ustar 00000000000000use super::Tree; use crate::Repository; /// An entry within a tree pub struct EntryRef<'repo, 'a> { /// The actual entry ref we are wrapping. pub inner: gix_object::tree::EntryRef<'a>, /// The owning repository. pub repo: &'repo Repository, } impl<'repo, 'a> EntryRef<'repo, 'a> { /// The kind of object to which [`id()`][Self::id()] is pointing. pub fn mode(&self) -> gix_object::tree::EntryMode { self.inner.mode } /// The name of the file in the parent tree. pub fn filename(&self) -> &gix_object::bstr::BStr { self.inner.filename } /// Return the entries id, connected to the underlying repository. pub fn id(&self) -> crate::Id<'repo> { crate::Id::from_id(self.inner.oid, self.repo) } /// Return the plain object id of this entry, without access to the repository. pub fn oid(&self) -> &gix_hash::oid { self.inner.oid } /// Return the object this entry points to. pub fn object(&self) -> Result, crate::object::find::existing::Error> { self.id().object() } /// Return the plain object id of this entry, without access to the repository. pub fn object_id(&self) -> gix_hash::ObjectId { self.inner.oid.to_owned() } /// Detach the repository from this instance. pub fn detach(&self) -> gix_object::tree::EntryRef<'a> { self.inner } } impl std::fmt::Display for EntryRef<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "{:06o} {:>6} {}\t{}", *self.mode(), self.mode().as_str(), self.id().shorten_or_id(), self.filename() ) } } impl<'repo> Tree<'repo> { /// Return an iterator over tree entries to obtain information about files and directories this tree contains. pub fn iter(&self) -> impl Iterator, gix_object::decode::Error>> { let repo = self.repo; gix_object::TreeRefIter::from_bytes(&self.data).map(move |e| e.map(|entry| EntryRef { inner: entry, repo })) } } gix-0.69.1/src/object/tree/mod.rs000064400000000000000000000235211046102023000146340ustar 00000000000000use gix_hash::ObjectId; pub use gix_object::tree::{EntryKind, EntryMode}; use gix_object::{bstr::BStr, FindExt, TreeRefIter}; use crate::{object::find, Id, ObjectDetached, Tree}; /// All state needed to conveniently edit a tree, using only [update-or-insert](Editor::upsert()) and [removals](Editor::remove()). #[cfg(feature = "tree-editor")] #[derive(Clone)] pub struct Editor<'repo> { pub(crate) inner: gix_object::tree::Editor<'repo>, pub(crate) validate: gix_validate::path::component::Options, /// The owning repository. pub repo: &'repo crate::Repository, } /// Initialization impl<'repo> Tree<'repo> { /// Obtain a tree instance by handing in all components that it is made up of. pub fn from_data(id: impl Into, data: Vec, repo: &'repo crate::Repository) -> Self { Tree { id: id.into(), data, repo, } } } /// Access impl<'repo> Tree<'repo> { /// Return this tree's identifier. pub fn id(&self) -> Id<'repo> { Id::from_id(self.id, self.repo) } /// Parse our tree data and return the parse tree for direct access to its entries. pub fn decode(&self) -> Result, gix_object::decode::Error> { gix_object::TreeRef::from_bytes(&self.data) } /// Find the entry named `name` by iteration, or return `None` if it wasn't found. pub fn find_entry(&self, name: impl PartialEq) -> Option> { TreeRefIter::from_bytes(&self.data) .filter_map(Result::ok) .find(|entry| name.eq(entry.filename)) .map(|entry| EntryRef { inner: entry, repo: self.repo, }) } /// Follow a sequence of `path` components starting from this instance, and look them up one by one until the last component /// is looked up and its tree entry is returned. /// /// # Performance Notes /// /// Searching tree entries is currently done in sequence, which allows to the search to be allocation free. It would be possible /// to reuse a vector and use a binary search instead, which might be able to improve performance over all. /// However, a benchmark should be created first to have some data and see which trade-off to choose here. /// pub fn lookup_entry(&self, path: I) -> Result>, find::existing::Error> where I: IntoIterator, P: PartialEq, { let mut buf = self.repo.empty_reusable_buffer(); buf.clear(); let mut path = path.into_iter().peekable(); buf.extend_from_slice(&self.data); while let Some(component) = path.next() { match TreeRefIter::from_bytes(&buf) .filter_map(Result::ok) .find(|entry| component.eq(entry.filename)) { Some(entry) => { if path.peek().is_none() { return Ok(Some(Entry { inner: entry.into(), repo: self.repo, })); } else { let next_id = entry.oid.to_owned(); let obj = self.repo.objects.find(&next_id, &mut buf)?; if !obj.kind.is_tree() { return Ok(None); } } } None => return Ok(None), } } Ok(None) } /// Follow a sequence of `path` components starting from this instance, and look them up one by one until the last component /// is looked up and its tree entry is returned, while changing this instance to point to the last seen tree. /// Note that if the lookup fails, it may be impossible to continue making lookups through this tree. /// It's useful to have this function to be able to reuse the internal buffer of the tree. /// /// # Performance Notes /// /// Searching tree entries is currently done in sequence, which allows to the search to be allocation free. It would be possible /// to reuse a vector and use a binary search instead, which might be able to improve performance over all. /// However, a benchmark should be created first to have some data and see which trade-off to choose here. /// pub fn peel_to_entry(&mut self, path: I) -> Result>, find::existing::Error> where I: IntoIterator, P: PartialEq, { let mut path = path.into_iter().peekable(); while let Some(component) = path.next() { match TreeRefIter::from_bytes(&self.data) .filter_map(Result::ok) .find(|entry| component.eq(entry.filename)) { Some(entry) => { if path.peek().is_none() { return Ok(Some(Entry { inner: entry.into(), repo: self.repo, })); } else { let next_id = entry.oid.to_owned(); let obj = self.repo.objects.find(&next_id, &mut self.data)?; self.id = next_id; if !obj.kind.is_tree() { return Ok(None); } } } None => return Ok(None), } } Ok(None) } /// Like [`Self::lookup_entry()`], but takes a `Path` directly via `relative_path`, a path relative to this tree. /// /// # Note /// /// If any path component contains illformed UTF-8 and thus can't be converted to bytes on platforms which can't do so natively, /// the returned component will be empty which makes the lookup fail. pub fn lookup_entry_by_path( &self, relative_path: impl AsRef, ) -> Result>, find::existing::Error> { use crate::bstr::ByteSlice; self.lookup_entry(relative_path.as_ref().components().map(|c: std::path::Component<'_>| { gix_path::os_str_into_bstr(c.as_os_str()) .unwrap_or_else(|_| "".into()) .as_bytes() })) } /// Like [`Self::peel_to_entry()`], but takes a `Path` directly via `relative_path`, a path relative to this tree. /// /// # Note /// /// If any path component contains illformed UTF-8 and thus can't be converted to bytes on platforms which can't do so natively, /// the returned component will be empty which makes the lookup fail. pub fn peel_to_entry_by_path( &mut self, relative_path: impl AsRef, ) -> Result>, find::existing::Error> { use crate::bstr::ByteSlice; self.peel_to_entry(relative_path.as_ref().components().map(|c: std::path::Component<'_>| { gix_path::os_str_into_bstr(c.as_os_str()) .unwrap_or_else(|_| "".into()) .as_bytes() })) } } /// #[cfg(feature = "tree-editor")] pub mod editor; /// #[cfg(feature = "blob-diff")] pub mod diff; /// pub mod traverse; /// mod iter; pub use iter::EntryRef; impl std::fmt::Debug for Tree<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Tree({})", self.id) } } /// An entry in a [`Tree`], similar to an entry in a directory. #[derive(PartialEq, Debug, Clone)] pub struct Entry<'repo> { pub(crate) inner: gix_object::tree::Entry, /// The owning repository. pub repo: &'repo crate::Repository, } mod entry { use crate::{bstr::BStr, ext::ObjectIdExt, object::tree::Entry}; /// Access impl<'repo> Entry<'repo> { /// The kind of object to which `oid` is pointing to. pub fn mode(&self) -> gix_object::tree::EntryMode { self.inner.mode } /// The name of the file in the parent tree. pub fn filename(&self) -> &BStr { self.inner.filename.as_ref() } /// Return the object id of the entry. pub fn id(&self) -> crate::Id<'repo> { self.inner.oid.attach(self.repo) } /// Return the object this entry points to. pub fn object(&self) -> Result, crate::object::find::existing::Error> { self.id().object() } /// Return the plain object id of this entry, without access to the repository. pub fn oid(&self) -> &gix_hash::oid { &self.inner.oid } /// Return the plain object id of this entry, without access to the repository. pub fn object_id(&self) -> gix_hash::ObjectId { self.inner.oid } } /// Consuming impl Entry<'_> { /// Return the contained object. pub fn detach(self) -> gix_object::tree::Entry { self.inner } } } mod _impls { use crate::Tree; impl TryFrom> for gix_object::Tree { type Error = gix_object::decode::Error; fn try_from(t: Tree<'_>) -> Result { t.decode().map(Into::into) } } } /// Remove Lifetime impl Tree<'_> { /// Create an owned instance of this object, copying our data in the process. pub fn detached(&self) -> ObjectDetached { ObjectDetached { id: self.id, kind: gix_object::Kind::Tree, data: self.data.clone(), } } /// Sever the connection to the `Repository` and turn this instance into a standalone object. pub fn detach(self) -> ObjectDetached { self.into() } /// Retrieve this instance's encoded data, leaving its own data empty. /// /// This method works around the immovability of members of this type. pub fn take_data(&mut self) -> Vec { std::mem::take(&mut self.data) } } gix-0.69.1/src/object/tree/traverse.rs000064400000000000000000000037401046102023000157110ustar 00000000000000use crate::Tree; /// Traversal impl<'repo> Tree<'repo> { /// Obtain a platform for initiating a variety of traversals. pub fn traverse(&self) -> Platform<'_, 'repo> { Platform { root: self, breadthfirst: BreadthFirstPresets { root: self }, } } } /// An intermediate object to start traversing the parent tree from. pub struct Platform<'a, 'repo> { root: &'a Tree<'repo>, /// Provides easy access to presets for common breadth-first traversal. pub breadthfirst: BreadthFirstPresets<'a, 'repo>, } /// Presets for common choices in breadth-first traversal. #[derive(Copy, Clone)] pub struct BreadthFirstPresets<'a, 'repo> { root: &'a Tree<'repo>, } impl BreadthFirstPresets<'_, '_> { /// Returns all entries and their file paths, recursively, as reachable from this tree. pub fn files(&self) -> Result, gix_traverse::tree::breadthfirst::Error> { let mut recorder = gix_traverse::tree::Recorder::default(); Platform { root: self.root, breadthfirst: *self, } .breadthfirst(&mut recorder)?; Ok(recorder.records) } } impl Platform<'_, '_> { /// Start a breadth-first, recursive traversal using `delegate`, for which a [`Recorder`][gix_traverse::tree::Recorder] can be used to get started. /// /// # Note /// /// - Results are returned in sort order according to tree-entry sorting rules, one level at a time. /// - for obtaining the direct children of the tree, use [.iter()][crate::Tree::iter()] instead. pub fn breadthfirst(&self, delegate: &mut V) -> Result<(), gix_traverse::tree::breadthfirst::Error> where V: gix_traverse::tree::Visit, { let root = gix_object::TreeRefIter::from_bytes(&self.root.data); let state = gix_traverse::tree::breadthfirst::State::default(); gix_traverse::tree::breadthfirst(root, state, &self.root.repo.objects, delegate) } } gix-0.69.1/src/open/mod.rs000064400000000000000000000057411046102023000133740ustar 00000000000000use std::path::PathBuf; use crate::{bstr::BString, config}; /// Permissions associated with various resources of a git repository #[derive(Debug, Clone)] pub struct Permissions { /// Control which environment variables may be accessed. pub env: permissions::Environment, /// Permissions related where git configuration should be loaded from. pub config: permissions::Config, /// Permissions related to where `gitattributes` should be loaded from. pub attributes: permissions::Attributes, } /// The options used in [`ThreadSafeRepository::open_opts()`][crate::ThreadSafeRepository::open_opts()]. /// /// ### Replacement Objects for the object database /// /// The environment variables `GIT_REPLACE_REF_BASE` and `GIT_NO_REPLACE_OBJECTS` are mapped to `gitoxide.objects.replaceRefBase` /// and `gitoxide.objects.noReplace` respectively and then interpreted exactly as their environment variable counterparts. /// /// Use [Permissions] to control which environment variables can be read, and config-overrides to control these values programmatically. #[derive(Clone)] pub struct Options { pub(crate) object_store_slots: gix_odb::store::init::Slots, /// Define what is allowed while opening a repository. pub permissions: Permissions, pub(crate) git_dir_trust: Option, /// Warning: this one is copied to config::Cache - don't change it after repo open or keep in sync. pub(crate) filter_config_section: Option bool>, pub(crate) lossy_config: Option, pub(crate) lenient_config: bool, pub(crate) bail_if_untrusted: bool, pub(crate) api_config_overrides: Vec, pub(crate) cli_config_overrides: Vec, pub(crate) open_path_as_is: bool, /// Internal to pass an already obtained CWD on to where it may also be used. This avoids the CWD being queried more than once per repo. pub(crate) current_dir: Option, } /// The error returned by [`crate::open()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Failed to load the git configuration")] Config(#[from] config::Error), #[error("\"{path}\" does not appear to be a git repository")] NotARepository { source: gix_discover::is_git::Error, path: PathBuf, }, #[error(transparent)] Io(#[from] std::io::Error), #[error("The git directory at '{}' is considered unsafe as it's not owned by the current user.", .path.display())] UnsafeGitDir { path: PathBuf }, #[error(transparent)] EnvironmentAccessDenied(#[from] gix_sec::permission::Error), } mod options; pub mod permissions; mod repository; #[cfg(test)] mod tests { use super::*; #[test] fn size_of_options() { let actual = std::mem::size_of::(); let limit = 160; assert!( actual <= limit, "{actual} <= {limit}: size shouldn't change without us knowing (on windows, it's bigger)" ); } } gix-0.69.1/src/open/options.rs000064400000000000000000000201501046102023000142770ustar 00000000000000use std::path::PathBuf; use super::{Error, Options}; use crate::{bstr::BString, config, open::Permissions, ThreadSafeRepository}; impl Default for Options { fn default() -> Self { Options { object_store_slots: Default::default(), permissions: Default::default(), git_dir_trust: None, filter_config_section: None, lossy_config: None, lenient_config: true, bail_if_untrusted: false, open_path_as_is: false, api_config_overrides: Vec::new(), cli_config_overrides: Vec::new(), current_dir: None, } } } /// Instantiation impl Options { /// Options configured to prevent accessing anything else than the repository configuration file, prohibiting /// accessing the environment or spreading beyond the git repository location. pub fn isolated() -> Self { Options::default().permissions(Permissions::isolated()) } } /// Generic modification impl Options { /// An adapter to allow calling any builder method on this instance despite only having a mutable reference. pub fn modify(&mut self, f: impl FnOnce(Self) -> Self) { *self = f(std::mem::take(self)); } } /// Builder methods impl Options { /// Apply the given configuration `values` like `init.defaultBranch=special` or `core.bool-implicit-true` in memory to as early /// as the configuration is initialized to allow affecting the repository instantiation phase, both on disk or when opening. /// The configuration is marked with [source API][gix_config::Source::Api]. pub fn config_overrides(mut self, values: impl IntoIterator>) -> Self { self.api_config_overrides = values.into_iter().map(Into::into).collect(); self } /// Set configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true` for application /// as CLI overrides to the repository configuration, marked with [source CLI][gix_config::Source::Cli]. /// These are equivalent to CLI overrides passed with `-c` in `git`, for example. pub fn cli_overrides(mut self, values: impl IntoIterator>) -> Self { self.cli_config_overrides = values.into_iter().map(Into::into).collect(); self } /// Set the amount of slots to use for the object database. It's a value that doesn't need changes on the client, typically, /// but should be controlled on the server. pub fn object_store_slots(mut self, slots: gix_odb::store::init::Slots) -> Self { self.object_store_slots = slots; self } // TODO: tests /// Set the given permissions, which are typically derived by a `Trust` level. pub fn permissions(mut self, permissions: Permissions) -> Self { self.permissions = permissions; self } /// If `true`, default `false`, we will not modify the incoming path to open to assure it is a `.git` directory. /// /// If `false`, we will try to open the input directory as is, even though it doesn't appear to be a `git` repository /// due to the lack of `.git` suffix or because its basename is not `.git` as in `worktree/.git`. pub fn open_path_as_is(mut self, enable: bool) -> Self { self.open_path_as_is = enable; self } /// Set the trust level of the `.git` directory we are about to open. /// /// This can be set manually to force trust even though otherwise it might /// not be fully trusted, leading to limitations in how configuration files /// are interpreted. /// /// If not called explicitly, it will be determined by looking at its /// ownership via [`gix_sec::Trust::from_path_ownership()`]. /// /// # Security Warning /// /// Use with extreme care and only if it's absolutely known that the repository /// is always controlled by the desired user. Using this capability _only_ saves /// a permission check and only so if the [`open()`][Self::open()] method is used, /// as opposed to discovery. pub fn with(mut self, trust: gix_sec::Trust) -> Self { self.git_dir_trust = trust.into(); self } /// If true, default false, and if the repository's trust level is not `Full` /// (see [`with()`][Self::with()] for more), then the open operation will fail. /// /// Use this to mimic `git`s way of handling untrusted repositories. Note that `gitoxide` solves /// this by not using configuration from untrusted sources and by generally being secured against /// doctored input files which at worst could cause out-of-memory at the time of writing. pub fn bail_if_untrusted(mut self, toggle: bool) -> Self { self.bail_if_untrusted = toggle; self } /// Set the filter which determines if a configuration section can be used to read values from, /// hence it returns true if it is eligible. /// /// The default filter selects sections whose trust level is [`full`][gix_sec::Trust::Full] or /// whose source is not [`repository-local`][gix_config::source::Kind::Repository]. pub fn filter_config_section(mut self, filter: fn(&gix_config::file::Metadata) -> bool) -> Self { self.filter_config_section = Some(filter); self } /// By default, in release mode configuration will be read without retaining non-essential information like /// comments or whitespace to optimize lookup performance. /// /// Some application might want to toggle this to false in they want to display or edit configuration losslessly /// with all whitespace and comments included. pub fn lossy_config(mut self, toggle: bool) -> Self { self.lossy_config = toggle.into(); self } /// If set, default is false, invalid configuration values will cause an error even if these can safely be defaulted. /// /// This is recommended for all applications that prefer correctness over usability. /// `git` itself defaults to strict configuration mode, flagging incorrect configuration immediately. /// /// Failure to read configuration files due to IO errors will also be a hard error if this mode is enabled, otherwise /// these errors will merely be logged. pub fn strict_config(mut self, toggle: bool) -> Self { self.lenient_config = !toggle; self } /// Open a repository at `path` with the options set so far. #[allow(clippy::result_large_err)] pub fn open(self, path: impl Into) -> Result { ThreadSafeRepository::open_opts(path, self) } } impl Options { pub(crate) fn current_dir_or_empty(&self) -> &std::path::Path { self.current_dir.as_deref().unwrap_or(std::path::Path::new("")) } } impl gix_sec::trust::DefaultForLevel for Options { fn default_for_level(level: gix_sec::Trust) -> Self { match level { gix_sec::Trust::Full => Options { object_store_slots: Default::default(), permissions: Permissions::default_for_level(level), git_dir_trust: gix_sec::Trust::Full.into(), filter_config_section: Some(config::section::is_trusted), lossy_config: None, bail_if_untrusted: false, lenient_config: true, open_path_as_is: false, api_config_overrides: Vec::new(), cli_config_overrides: Vec::new(), current_dir: None, }, gix_sec::Trust::Reduced => Options { object_store_slots: gix_odb::store::init::Slots::Given(32), // limit resource usage permissions: Permissions::default_for_level(level), git_dir_trust: gix_sec::Trust::Reduced.into(), filter_config_section: Some(config::section::is_trusted), bail_if_untrusted: false, lenient_config: true, open_path_as_is: false, lossy_config: None, api_config_overrides: Vec::new(), cli_config_overrides: Vec::new(), current_dir: None, }, } } } gix-0.69.1/src/open/permissions.rs000064400000000000000000000166451046102023000151750ustar 00000000000000//! Various permissions to define what can be done when operating a [`Repository`][crate::Repository]. use gix_sec::Trust; use crate::open::Permissions; /// Configure from which sources git configuration may be loaded. /// /// Note that configuration from inside of the repository is always loaded as it's definitely required for correctness. #[derive(Copy, Clone, Ord, PartialOrd, PartialEq, Eq, Debug, Hash)] pub struct Config { /// The git binary may come with configuration as part of its configuration, and if this is true (default false) /// we will load the configuration of the git binary, if present and not a duplicate of the ones below. /// /// It's disabled by default as it may involve executing the git binary once per execution of the application. pub git_binary: bool, /// Whether to use the system configuration. /// This is defined as `$(prefix)/etc/gitconfig` on unix. pub system: bool, /// Whether to use the git application configuration. /// /// A platform defined location for where a user's git application configuration should be located. /// If `$XDG_CONFIG_HOME` is not set or empty, `$HOME/.config/git/config` will be used /// on unix. pub git: bool, /// Whether to use the user configuration. /// This is usually `~/.gitconfig` on unix. pub user: bool, /// Whether to use the configuration from environment variables. pub env: bool, /// Whether to follow include files are encountered in loaded configuration, /// via `include` and `includeIf` sections. pub includes: bool, } impl Config { /// Allow everything which usually relates to a fully trusted environment pub fn all() -> Self { Config { git_binary: false, system: true, git: true, user: true, env: true, includes: true, } } /// Load only configuration local to the git repository. pub fn isolated() -> Self { Config { git_binary: false, system: false, git: false, user: false, env: false, includes: false, } } } impl Default for Config { fn default() -> Self { Self::all() } } /// Configure from which `gitattribute` files may be loaded. /// /// Note that `.gitattribute` files from within the repository are always loaded. #[derive(Copy, Clone, Ord, PartialOrd, PartialEq, Eq, Debug, Hash)] pub struct Attributes { /// The git binary may come with attribute configuration in its installation directory, and if this is true (default false) /// we will load the configuration of the git binary. /// /// It's disabled by default as it involves executing the git binary once per execution of the application. pub git_binary: bool, /// Whether to use the system configuration. /// This is typically defined as `$(prefix)/etc/gitconfig`. pub system: bool, /// Whether to use the git application configuration. /// /// A platform defined location for where a user's git application configuration should be located. /// If `$XDG_CONFIG_HOME` is not set or empty, `$HOME/.config/git/attributes` will be used /// on unix. pub git: bool, } impl Attributes { /// Allow everything which usually relates to a fully trusted environment pub fn all() -> Self { Attributes { git_binary: false, system: true, git: true, } } /// Allow loading attributes that are local to the git repository. pub fn isolated() -> Self { Attributes { git_binary: false, system: false, git: false, } } } impl Default for Attributes { fn default() -> Self { Self::all() } } /// Permissions related to the usage of environment variables #[derive(Debug, Clone, Copy)] pub struct Environment { /// Control whether resources pointed to by `XDG_CONFIG_HOME` can be used when looking up common configuration values. /// /// Note that [`gix_sec::Permission::Forbid`] will cause the operation to abort if a resource is set via the XDG config environment. pub xdg_config_home: gix_sec::Permission, /// Control the way resources pointed to by the home directory (similar to `xdg_config_home`) may be used. pub home: gix_sec::Permission, /// Control if environment variables to configure the HTTP transport, like `http_proxy` may be used. /// /// Note that http-transport related environment variables prefixed with `GIT_` may also be included here /// if they match this category like `GIT_HTTP_USER_AGENT`. pub http_transport: gix_sec::Permission, /// Control if the `EMAIL` environment variables may be read. /// /// Note that identity related environment variables prefixed with `GIT_` may also be included here /// if they match this category. pub identity: gix_sec::Permission, /// Control if environment variables related to the object database are handled. This includes features and performance /// options alike. pub objects: gix_sec::Permission, /// Control if resources pointed to by `GIT_*` prefixed environment variables can be used, **but only** if they /// are not contained in any other category. This is a catch-all section. pub git_prefix: gix_sec::Permission, /// Control if resources pointed to by `SSH_*` prefixed environment variables can be used (like `SSH_ASKPASS`) pub ssh_prefix: gix_sec::Permission, } impl Environment { /// Allow access to the entire environment. pub fn all() -> Self { let allow = gix_sec::Permission::Allow; Environment { xdg_config_home: allow, home: allow, git_prefix: allow, ssh_prefix: allow, http_transport: allow, identity: allow, objects: allow, } } /// Don't allow loading any environment variables. pub fn isolated() -> Self { let deny = gix_sec::Permission::Deny; Environment { xdg_config_home: deny, home: deny, ssh_prefix: deny, git_prefix: deny, http_transport: deny, identity: deny, objects: deny, } } } impl Permissions { /// Secure permissions are similar to `all()` pub fn secure() -> Self { Permissions { env: Environment::all(), config: Config::all(), attributes: Attributes::all(), } } /// Everything is allowed with this set of permissions, thus we read all configuration and do what git typically /// does with owned repositories. pub fn all() -> Self { Permissions { env: Environment::all(), config: Config::all(), attributes: Attributes::all(), } } /// Don't read any but the local git configuration and deny reading any environment variables. pub fn isolated() -> Self { Permissions { config: Config::isolated(), attributes: Attributes::isolated(), env: Environment::isolated(), } } } impl gix_sec::trust::DefaultForLevel for Permissions { fn default_for_level(level: Trust) -> Self { match level { Trust::Full => Permissions::all(), Trust::Reduced => Permissions::secure(), } } } impl Default for Permissions { fn default() -> Self { Permissions::secure() } } gix-0.69.1/src/open/repository.rs000064400000000000000000000454171046102023000150400ustar 00000000000000#![allow(clippy::result_large_err)] use gix_features::threading::OwnShared; use std::ffi::OsStr; use std::{borrow::Cow, path::PathBuf}; use super::{Error, Options}; use crate::{ config, config::{ cache::interpolate_context, tree::{gitoxide, Core, Key, Safe}, }, open::Permissions, ThreadSafeRepository, }; #[derive(Default, Clone)] pub(crate) struct EnvironmentOverrides { /// An override of the worktree typically from the environment, and overrides even worktree dirs set as parameter. /// /// This emulates the way git handles this override. worktree_dir: Option, /// An override for the .git directory, typically from the environment. /// /// If set, the passed in `git_dir` parameter will be ignored in favor of this one. git_dir: Option, } impl EnvironmentOverrides { fn from_env() -> Result> { let mut worktree_dir = None; if let Some(path) = std::env::var_os(Core::WORKTREE.the_environment_override()) { worktree_dir = PathBuf::from(path).into(); } let mut git_dir = None; if let Some(path) = std::env::var_os("GIT_DIR") { git_dir = PathBuf::from(path).into(); } Ok(EnvironmentOverrides { worktree_dir, git_dir }) } } impl ThreadSafeRepository { /// Open a git repository at the given `path`, possibly expanding it to `path/.git` if `path` is a work tree dir. pub fn open(path: impl Into) -> Result { Self::open_opts(path, Options::default()) } /// Open a git repository at the given `path`, possibly expanding it to `path/.git` if `path` is a work tree dir, and use /// `options` for fine-grained control. /// /// Note that you should use [`crate::discover()`] if security should be adjusted by ownership. /// /// ### Differences to `git2::Repository::open_ext()` /// /// Whereas `open_ext()` is the jack-of-all-trades that can do anything depending on its options, `gix` will always differentiate /// between discovering git repositories by searching, and opening a well-known repository by work tree or `.git` repository. /// /// Note that opening a repository for implementing custom hooks is also handle specifically in /// [`open_with_environment_overrides()`][Self::open_with_environment_overrides()]. pub fn open_opts(path: impl Into, mut options: Options) -> Result { let _span = gix_trace::coarse!("ThreadSafeRepository::open()"); let (path, kind) = { let path = path.into(); let looks_like_git_dir = path.ends_with(gix_discover::DOT_GIT_DIR) || path.extension() == Some(std::ffi::OsStr::new("git")); let candidate = if !options.open_path_as_is && !looks_like_git_dir { Cow::Owned(path.join(gix_discover::DOT_GIT_DIR)) } else { Cow::Borrowed(&path) }; match gix_discover::is_git(candidate.as_ref()) { Ok(kind) => (candidate.into_owned(), kind), Err(err) => { if options.open_path_as_is || matches!(candidate, Cow::Borrowed(_)) { return Err(Error::NotARepository { source: err, path: candidate.into_owned(), }); } match gix_discover::is_git(&path) { Ok(kind) => (path, kind), Err(err) => return Err(Error::NotARepository { source: err, path }), } } } }; // To be altered later based on `core.precomposeUnicode`. let cwd = gix_fs::current_dir(false)?; let (git_dir, worktree_dir) = gix_discover::repository::Path::from_dot_git_dir(path, kind, &cwd) .expect("we have sanitized path with is_git()") .into_repository_and_work_tree_directories(); if options.git_dir_trust.is_none() { options.git_dir_trust = gix_sec::Trust::from_path_ownership(&git_dir)?.into(); } options.current_dir = Some(cwd); ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, options) } /// Try to open a git repository in `fallback_directory` (can be worktree or `.git` directory) only if there is no override /// of the `gitdir` using git environment variables. /// /// Use the `trust_map` to apply options depending in the trust level for `directory` or the directory it's overridden with. /// The `.git` directory whether given or computed is used for trust checks. /// /// Note that this will read various `GIT_*` environment variables to check for overrides, and is probably most useful when implementing /// custom hooks. // TODO: tests, with hooks, GIT_QUARANTINE for ref-log and transaction control (needs gix-sec support to remove write access in gix-ref) // TODO: The following vars should end up as overrides of the respective configuration values (see git-config). // GIT_PROXY_SSL_CERT, GIT_PROXY_SSL_KEY, GIT_PROXY_SSL_CERT_PASSWORD_PROTECTED. // GIT_PROXY_SSL_CAINFO, GIT_SSL_CIPHER_LIST, GIT_HTTP_MAX_REQUESTS, GIT_CURL_FTP_NO_EPSV, #[doc(alias = "open_from_env", alias = "git2")] pub fn open_with_environment_overrides( fallback_directory: impl Into, trust_map: gix_sec::trust::Mapping, ) -> Result { let _span = gix_trace::coarse!("ThreadSafeRepository::open_with_environment_overrides()"); let overrides = EnvironmentOverrides::from_env()?; let (path, path_kind): (PathBuf, _) = match overrides.git_dir { Some(git_dir) => gix_discover::is_git(&git_dir) .map_err(|err| Error::NotARepository { source: err, path: git_dir.clone(), }) .map(|kind| (git_dir, kind))?, None => { let fallback_directory = fallback_directory.into(); gix_discover::is_git(&fallback_directory) .map_err(|err| Error::NotARepository { source: err, path: fallback_directory.clone(), }) .map(|kind| (fallback_directory, kind))? } }; // To be altered later based on `core.precomposeUnicode`. let cwd = gix_fs::current_dir(false)?; let (git_dir, worktree_dir) = gix_discover::repository::Path::from_dot_git_dir(path, path_kind, &cwd) .expect("we have sanitized path with is_git()") .into_repository_and_work_tree_directories(); let worktree_dir = worktree_dir.or(overrides.worktree_dir); let git_dir_trust = gix_sec::Trust::from_path_ownership(&git_dir)?; let mut options = trust_map.into_value_by_level(git_dir_trust); options.current_dir = Some(cwd); ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, options) } pub(crate) fn open_from_paths( mut git_dir: PathBuf, mut worktree_dir: Option, mut options: Options, ) -> Result { let _span = gix_trace::detail!("open_from_paths()"); let Options { git_dir_trust, object_store_slots, filter_config_section, lossy_config, lenient_config, bail_if_untrusted, open_path_as_is: _, permissions: Permissions { ref env, config, attributes, }, ref api_config_overrides, ref cli_config_overrides, ref mut current_dir, } = options; let git_dir_trust = git_dir_trust.expect("trust must be determined by now"); let mut common_dir = gix_discover::path::from_plain_file(git_dir.join("commondir").as_ref()) .transpose()? .map(|cd| git_dir.join(cd)); let repo_config = config::cache::StageOne::new( common_dir.as_deref().unwrap_or(&git_dir), git_dir.as_ref(), git_dir_trust, lossy_config, lenient_config, )?; if repo_config.precompose_unicode { git_dir = gix_utils::str::precompose_path(git_dir.into()).into_owned(); if let Some(common_dir) = common_dir.as_mut() { if let Cow::Owned(precomposed) = gix_utils::str::precompose_path((&*common_dir).into()) { *common_dir = precomposed; } } if let Some(worktree_dir) = worktree_dir.as_mut() { if let Cow::Owned(precomposed) = gix_utils::str::precompose_path((&*worktree_dir).into()) { *worktree_dir = precomposed; } } } let common_dir_ref = common_dir.as_deref().unwrap_or(&git_dir); let current_dir = { let current_dir_ref = current_dir.as_mut().expect("BUG: current_dir must be set by caller"); if repo_config.precompose_unicode { if let Cow::Owned(precomposed) = gix_utils::str::precompose_path((&*current_dir_ref).into()) { *current_dir_ref = precomposed; } } current_dir_ref.as_path() }; let mut refs = { let reflog = repo_config.reflog.unwrap_or(gix_ref::store::WriteReflog::Disable); let object_hash = repo_config.object_hash; let ref_store_init_opts = gix_ref::store::init::Options { write_reflog: reflog, object_hash, precompose_unicode: repo_config.precompose_unicode, prohibit_windows_device_names: repo_config.protect_windows, }; match &common_dir { Some(common_dir) => { crate::RefStore::for_linked_worktree(git_dir.to_owned(), common_dir.into(), ref_store_init_opts) } None => crate::RefStore::at(git_dir.to_owned(), ref_store_init_opts), } }; let head = refs.find("HEAD").ok(); let git_install_dir = crate::path::install_dir().ok(); let home = gix_path::env::home_dir().and_then(|home| env.home.check_opt(home)); let mut filter_config_section = filter_config_section.unwrap_or(config::section::is_trusted); let config = config::Cache::from_stage_one( repo_config, common_dir_ref, head.as_ref().and_then(|head| head.target.try_name()), filter_config_section, git_install_dir.as_deref(), home.as_deref(), *env, attributes, config, lenient_config, api_config_overrides, cli_config_overrides, )?; if bail_if_untrusted && git_dir_trust != gix_sec::Trust::Full { check_safe_directories( &git_dir, git_install_dir.as_deref(), current_dir, home.as_deref(), &config, )?; } // core.worktree might be used to overwrite the worktree directory if !config.is_bare { let mut key_source = None; let worktree_path = config .resolved .path_filter(Core::WORKTREE, { |section| { let res = filter_config_section(section); if res { key_source = Some(section.source); } res } }) .zip(key_source); if let Some((wt, key_source)) = worktree_path { let wt_clone = wt.clone(); let wt_path = wt .interpolate(interpolate_context(git_install_dir.as_deref(), home.as_deref())) .map_err(|err| config::Error::PathInterpolation { path: wt_clone.value.into_owned(), source: err, })?; let wt_path = match key_source { gix_config::Source::Env | gix_config::Source::Cli | gix_config::Source::Api | gix_config::Source::EnvOverride => wt_path, _ => git_dir.join(wt_path).into(), }; worktree_dir = gix_path::normalize(wt_path, current_dir).map(Cow::into_owned); #[allow(unused_variables)] if let Some(worktree_path) = worktree_dir.as_deref().filter(|wtd| !wtd.is_dir()) { gix_trace::warn!("The configured worktree path '{}' is not a directory or doesn't exist - `core.worktree` may be misleading", worktree_path.display()); } } else if !config.lenient_config && config .resolved .boolean_filter(Core::WORKTREE, &mut filter_config_section) .is_some() { return Err(Error::from(config::Error::ConfigTypedString( config::key::GenericErrorWithValue::from(&Core::WORKTREE), ))); } } { let looks_like_standard_git_dir = || refs.git_dir().file_name() == Some(OsStr::new(gix_discover::DOT_GIT_DIR)); match worktree_dir { None if !config.is_bare && looks_like_standard_git_dir() => { worktree_dir = Some(git_dir.parent().expect("parent is always available").to_owned()); } Some(_) => { // We may assume that the presence of a worktree-dir means it's not bare, but only if there // is no configuration saying otherwise. // Thus, if we are here and the common-dir config claims it's bare and we have inferred a worktree anyway, // forget about it. if looks_like_standard_git_dir() && config .resolved .boolean_filter("core.bare", |md| md.source == gix_config::Source::Local) .transpose() .ok() .flatten() .is_some() && config.is_bare { worktree_dir = None; } } None => {} } } refs.write_reflog = config::cache::util::reflog_or_default(config.reflog, worktree_dir.is_some()); refs.namespace.clone_from(&config.refs_namespace); let replacements = replacement_objects_refs_prefix(&config.resolved, lenient_config, filter_config_section)? .and_then(|prefix| { let _span = gix_trace::detail!("find replacement objects"); let platform = refs.iter().ok()?; let iter = platform.prefixed(&prefix).ok()?; let prefix = prefix.to_str()?; let replacements = iter .filter_map(Result::ok) .filter_map(|r: gix_ref::Reference| { let target = r.target.try_id()?.to_owned(); let source = gix_hash::ObjectId::from_hex(r.name.as_bstr().strip_prefix(prefix.as_bytes())?).ok()?; Some((source, target)) }) .collect::>(); Some(replacements) }) .unwrap_or_default(); Ok(ThreadSafeRepository { objects: OwnShared::new(gix_odb::Store::at_opts( common_dir_ref.join("objects"), &mut replacements.into_iter(), gix_odb::store::init::Options { slots: object_store_slots, object_hash: config.object_hash, use_multi_pack_index: config.use_multi_pack_index, current_dir: current_dir.to_owned().into(), }, )?), common_dir, refs, work_tree: worktree_dir, config, // used when spawning new repositories off this one when following worktrees linked_worktree_options: options, #[cfg(feature = "index")] index: gix_fs::SharedFileSnapshotMut::new().into(), shallow_commits: gix_fs::SharedFileSnapshotMut::new().into(), #[cfg(feature = "attributes")] modules: gix_fs::SharedFileSnapshotMut::new().into(), }) } } // TODO: tests fn replacement_objects_refs_prefix( config: &gix_config::File<'static>, lenient: bool, mut filter_config_section: fn(&gix_config::file::Metadata) -> bool, ) -> Result, Error> { let is_disabled = config::shared::is_replace_refs_enabled(config, lenient, filter_config_section) .map_err(config::Error::ConfigBoolean)? .unwrap_or(true); if is_disabled { return Ok(None); } let ref_base = gix_path::from_bstr({ let key = "gitoxide.objects.replaceRefBase"; debug_assert_eq!(gitoxide::Objects::REPLACE_REF_BASE.logical_name(), key); config .string_filter(key, &mut filter_config_section) .unwrap_or_else(|| Cow::Borrowed("refs/replace/".into())) }) .into_owned(); Ok(ref_base.into()) } fn check_safe_directories( git_dir: &std::path::Path, git_install_dir: Option<&std::path::Path>, current_dir: &std::path::Path, home: Option<&std::path::Path>, config: &config::Cache, ) -> Result<(), Error> { let mut is_safe = false; let git_dir = match gix_path::realpath_opts(git_dir, current_dir, gix_path::realpath::MAX_SYMLINKS) { Ok(p) => p, Err(_) => git_dir.to_owned(), }; for safe_dir in config .resolved .strings_filter(Safe::DIRECTORY, &mut Safe::directory_filter) .unwrap_or_default() { if safe_dir.as_ref() == "*" { is_safe = true; continue; } if safe_dir.is_empty() { is_safe = false; continue; } if !is_safe { let safe_dir = match gix_config::Path::from(std::borrow::Cow::Borrowed(safe_dir.as_ref())) .interpolate(interpolate_context(git_install_dir, home)) { Ok(path) => path, Err(_) => gix_path::from_bstr(safe_dir), }; if safe_dir == git_dir { is_safe = true; continue; } } } if is_safe { Ok(()) } else { Err(Error::UnsafeGitDir { path: git_dir }) } } gix-0.69.1/src/path.rs000064400000000000000000000005071046102023000126030ustar 00000000000000use std::path::PathBuf; pub use gix_path::*; pub(crate) fn install_dir() -> std::io::Result { std::env::current_exe().and_then(|exe| { exe.parent() .map(ToOwned::to_owned) .ok_or_else(|| std::io::Error::new(std::io::ErrorKind::Other, "no parent for current executable")) }) } gix-0.69.1/src/pathspec.rs000064400000000000000000000210371046102023000134570ustar 00000000000000//! Pathspec plumbing and abstractions pub use gix_pathspec::*; use crate::{bstr::BStr, AttributeStack, Pathspec, PathspecDetached, Repository}; /// pub mod init { /// The error returned by [`Pathspec::new()`](super::Pathspec::new()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] MakeAttributes(#[from] Box), #[error(transparent)] Defaults(#[from] crate::repository::pathspec_defaults_ignore_case::Error), #[error(transparent)] ParseSpec(#[from] gix_pathspec::parse::Error), #[error( "Could not obtain the repository prefix as the relative path of the CWD as seen from the working tree" )] NormalizeSpec(#[from] gix_pathspec::normalize::Error), #[error(transparent)] RepoPrefix(#[from] gix_path::realpath::Error), } } /// Lifecycle impl<'repo> Pathspec<'repo> { /// Create a new instance by parsing `patterns` into [`Pathspecs`](Pattern) to make them usable for searches. /// `make_attribute` may be called if one of the patterns has a `(attr:a)` element which requires attribute matching. It should /// be used to control where attributes are coming from. /// If `inherit_ignore_case` is `true`, the pathspecs may have their ignore-case default overridden to be case-insensitive by default. /// This only works towards turning ignore-case for pathspecs on, but won't ever turn that setting off if. /// If `empty_patterns_match_prefix` is `true`, then even empty patterns will match only what's inside of the prefix. Otherwise /// they will match everything. /// /// ### Deviation /// /// Pathspecs can declare to be case-insensitive as part of their elements, which is a setting that is now respected for attribute /// queries as well. pub fn new( repo: &'repo Repository, empty_patterns_match_prefix: bool, patterns: impl IntoIterator>, inherit_ignore_case: bool, make_attributes: impl FnOnce() -> Result>, ) -> Result { let defaults = repo.pathspec_defaults_inherit_ignore_case(inherit_ignore_case)?; let patterns = patterns .into_iter() .map(move |p| parse(p.as_ref(), defaults)) .collect::, _>>()?; let needs_cache = patterns.iter().any(|p| !p.attributes.is_empty()); let prefix = if patterns.is_empty() && !empty_patterns_match_prefix { None } else { repo.prefix()? }; let search = Search::from_specs( patterns, prefix, &gix_path::realpath_opts( repo.work_dir().unwrap_or_else(|| repo.git_dir()), repo.options.current_dir_or_empty(), gix_path::realpath::MAX_SYMLINKS, )?, )?; let cache = needs_cache.then(make_attributes).transpose()?; gix_trace::debug!( longest_prefix = ?search.longest_common_directory(), prefix_dir = ?search.prefix_directory(), patterns = ?search.patterns().map(gix_pathspec::Pattern::path).collect::>() ); Ok(Self { repo, search, stack: cache, }) } /// Turn ourselves into the functional parts for direct usage. /// Note that the [`cache`](AttributeStack) is only set if one of the [`search` patterns](Search) /// is specifying attributes to match for. pub fn into_parts(self) -> (Search, Option>) { ( self.search, self.stack.map(|stack| AttributeStack::new(stack, self.repo)), ) } /// Turn ourselves into an implementation that works without a repository instance and that is rather minimal. pub fn detach(self) -> std::io::Result { Ok(PathspecDetached { search: self.search, stack: self.stack, odb: self.repo.objects.clone().into_arc()?, }) } } /// Access impl<'repo> Pathspec<'repo> { /// Return the attributes cache which is used when matching attributes in pathspecs, or `None` if none of the pathspecs require that. pub fn attributes(&self) -> Option<&gix_worktree::Stack> { self.stack.as_ref() } /// Return the search itself which can be used for matching paths or accessing the actual patterns that will be used. pub fn search(&self) -> &gix_pathspec::Search { &self.search } /// Return the first [`Match`](search::Match) of `relative_path`, or `None`. /// Note that the match might [be excluded](search::Match::is_excluded()). /// `is_dir` is true if `relative_path` is a directory. #[doc( alias = "match_diff", alias = "match_tree", alias = "match_index", alias = "match_workdir", alias = "matches_path", alias = "git2" )] pub fn pattern_matching_relative_path<'a>( &mut self, relative_path: impl Into<&'a BStr>, is_dir: Option, ) -> Option> { self.search.pattern_matching_relative_path( relative_path.into(), is_dir, &mut |relative_path, case, is_dir, out| { let stack = self.stack.as_mut().expect("initialized in advance"); stack .set_case(case) .at_entry(relative_path, Some(is_dir_to_mode(is_dir)), &self.repo.objects) .map_or(false, |platform| platform.matching_attributes(out)) }, ) } /// The simplified version of [`pattern_matching_relative_path()`](Self::pattern_matching_relative_path()) which returns /// `true` if `relative_path` is included in the set of positive pathspecs, while not being excluded. pub fn is_included<'a>(&mut self, relative_path: impl Into<&'a BStr>, is_dir: Option) -> bool { self.pattern_matching_relative_path(relative_path, is_dir) .map_or(false, |m| !m.is_excluded()) } /// Return an iterator over all entries along with their path if the path matches the pathspec, or `None` if the pathspec is /// known to match no entry. // TODO: tests pub fn index_entries_with_paths<'s: 'repo, 'a: 'repo>( &'s mut self, index: &'a gix_index::State, ) -> Option + 'repo + 's> { index.prefixed_entries(self.search.common_prefix()).map(|entries| { entries.iter().filter_map(move |entry| { let path = entry.path(index); self.is_included(path, Some(false)).then_some((path, entry)) }) }) } } /// Access impl PathspecDetached { /// Return the first [`Match`](search::Match) of `relative_path`, or `None`. /// Note that the match might [be excluded](search::Match::is_excluded()). /// `is_dir` is true if `relative_path` is a directory. #[doc( alias = "match_diff", alias = "match_tree", alias = "match_index", alias = "match_workdir", alias = "matches_path", alias = "git2" )] pub fn pattern_matching_relative_path<'a>( &mut self, relative_path: impl Into<&'a BStr>, is_dir: Option, ) -> Option> { self.search.pattern_matching_relative_path( relative_path.into(), is_dir, &mut |relative_path, case, is_dir, out| { let stack = self.stack.as_mut().expect("initialized in advance"); stack .set_case(case) .at_entry(relative_path, Some(is_dir_to_mode(is_dir)), &self.odb) .map_or(false, |platform| platform.matching_attributes(out)) }, ) } /// The simplified version of [`pattern_matching_relative_path()`](Self::pattern_matching_relative_path()) which returns /// `true` if `relative_path` is included in the set of positive pathspecs, while not being excluded. pub fn is_included<'a>(&mut self, relative_path: impl Into<&'a BStr>, is_dir: Option) -> bool { self.pattern_matching_relative_path(relative_path, is_dir) .map_or(false, |m| !m.is_excluded()) } } fn is_dir_to_mode(is_dir: bool) -> gix_index::entry::Mode { if is_dir { gix_index::entry::Mode::DIR } else { gix_index::entry::Mode::FILE } } gix-0.69.1/src/prelude.rs000064400000000000000000000002341046102023000133040ustar 00000000000000pub use gix_features::parallel::reduce::Finalize; pub use gix_object::{Find, FindExt, Write}; pub use gix_odb::{Header, HeaderExt}; pub use crate::ext::*; gix-0.69.1/src/progress.rs000064400000000000000000000001641046102023000135120ustar 00000000000000#[cfg(feature = "progress-tree")] pub use gix_features::progress::prodash::tree; pub use gix_features::progress::*; gix-0.69.1/src/push.rs000064400000000000000000000015741046102023000126330ustar 00000000000000/// All possible values of `push.default`. #[derive(Default, Copy, Clone, PartialOrd, PartialEq, Ord, Eq, Hash, Debug)] pub enum Default { /// Do not push anything unless a refspec is provided explicitly. /// /// This is for safety. Nothing, /// Push the current branch to update a remote branch with the same name. Current, /// Push the current branch to the branch it would fetch from and merge with, /// i.e. what is configured in `branch..merge`, retrievable with /// the `@{upstream}` refspec. Upstream, /// Push the current branch with the same name to the remote. /// This is the same as [`Current`](Default::Current), but fails if /// `branch..merge` is set to a branch that is named differently. #[default] Simple, /// Push *all* branches to their similarly named counterpart on the remote. Matching, } gix-0.69.1/src/reference/edits.rs000064400000000000000000000056771046102023000147320ustar 00000000000000/// pub mod set_target_id { use gix_ref::{transaction::PreviousValue, Target}; use crate::{bstr::BString, Reference}; mod error { use gix_ref::FullName; /// The error returned by [`Reference::set_target_id()`][super::Reference::set_target_id()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Cannot change symbolic reference {name:?} into a direct one by setting it to an id")] SymbolicReference { name: FullName }, #[error(transparent)] ReferenceEdit(#[from] crate::reference::edit::Error), } } pub use error::Error; impl Reference<'_> { /// Set the id of this direct reference to `id` and use `reflog_message` for the reflog (if enabled in the repository). /// /// Note that the operation will fail on symbolic references, to change their type use the lower level reference database, /// or if the reference was deleted or changed in the mean time. /// Furthermore, refrain from using this method for more than a one-off change as it creates a transaction for each invocation. /// If multiple reference should be changed, use [`Repository::edit_references()`][crate::Repository::edit_references()] /// or the lower level reference database instead. #[allow(clippy::result_large_err)] pub fn set_target_id( &mut self, id: impl Into, reflog_message: impl Into, ) -> Result<(), Error> { match &self.inner.target { Target::Symbolic(name) => return Err(Error::SymbolicReference { name: name.clone() }), Target::Object(current_id) => { let changed = self.repo.reference( self.name(), id, PreviousValue::MustExistAndMatch(Target::Object(current_id.to_owned())), reflog_message, )?; *self = changed; } } Ok(()) } } } /// pub mod delete { use gix_ref::transaction::{Change, PreviousValue, RefEdit, RefLog}; use crate::Reference; impl Reference<'_> { /// Delete this reference or fail if it was changed since last observed. /// Note that this instance remains available in memory but probably shouldn't be used anymore. pub fn delete(&self) -> Result<(), crate::reference::edit::Error> { self.repo .edit_reference(RefEdit { change: Change::Delete { expected: PreviousValue::MustExistAndMatch(self.inner.target.clone()), log: RefLog::AndReference, }, name: self.inner.name.clone(), deref: false, }) .map(|_| ()) } } } gix-0.69.1/src/reference/errors.rs000064400000000000000000000116401046102023000151210ustar 00000000000000/// pub mod edit { use crate::config; /// The error returned by [`edit_references(…)`][crate::Repository::edit_references()], and others /// which ultimately create a reference. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FileTransactionPrepare(#[from] gix_ref::file::transaction::prepare::Error), #[error(transparent)] FileTransactionCommit(#[from] gix_ref::file::transaction::commit::Error), #[error(transparent)] NameValidation(#[from] gix_validate::reference::name::Error), #[error("Could not interpret core.filesRefLockTimeout or core.packedRefsTimeout, it must be the number in milliseconds to wait for locks or negative to wait forever")] LockTimeoutConfiguration(#[from] config::lock_timeout::Error), #[error(transparent)] ParseCommitterTime(#[from] crate::config::time::Error), } } /// pub mod peel { /// The error returned by [`Reference::peel_to_id_in_place(…)`](crate::Reference::peel_to_id_in_place()) and /// [`Reference::into_fully_peeled_id(…)`](crate::Reference::into_fully_peeled_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ToId(#[from] gix_ref::peel::to_id::Error), #[error(transparent)] PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error), } /// pub mod to_kind { /// The error returned by [`Reference::peel_to_kind(…)`](crate::Reference::peel_to_kind()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FollowToObject(#[from] gix_ref::peel::to_object::Error), #[error(transparent)] PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error), #[error(transparent)] FindObject(#[from] crate::object::find::existing::Error), #[error(transparent)] PeelObject(#[from] crate::object::peel::to_kind::Error), } } } /// pub mod follow { /// pub mod to_object { /// The error returned by [`Reference::follow_to_object(…)`](crate::Reference::follow_to_object()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FollowToObject(#[from] gix_ref::peel::to_object::Error), #[error(transparent)] PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error), } } } /// pub mod head_id { /// The error returned by [`Repository::head_id(…)`](crate::Repository::head_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Head(#[from] crate::reference::find::existing::Error), #[error(transparent)] PeelToId(#[from] crate::head::peel::into_id::Error), } } /// pub mod head_commit { /// The error returned by [`Repository::head_commit`(…)](crate::Repository::head_commit()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Head(#[from] crate::reference::find::existing::Error), #[error(transparent)] PeelToCommit(#[from] crate::head::peel::to_commit::Error), } } /// pub mod head_tree_id { /// The error returned by [`Repository::head_tree_id`(…)](crate::Repository::head_tree_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] HeadCommit(#[from] crate::reference::head_commit::Error), #[error(transparent)] DecodeCommit(#[from] gix_object::decode::Error), } } /// pub mod head_tree { /// The error returned by [`Repository::head_tree`(…)](crate::Repository::head_tree()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] HeadCommit(#[from] crate::reference::head_commit::Error), #[error(transparent)] CommitTree(#[from] crate::object::commit::Error), } } /// pub mod find { /// pub mod existing { use gix_ref::PartialName; /// The error returned by [`find_reference(…)`][crate::Repository::find_reference()], and others. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Find(#[from] crate::reference::find::Error), #[error("The reference '{}' did not exist", name.as_ref().as_bstr())] NotFound { name: PartialName }, } } /// The error returned by [`try_find_reference(…)`][crate::Repository::try_find_reference()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Find(#[from] gix_ref::file::find::Error), } } gix-0.69.1/src/reference/iter.rs000064400000000000000000000112571046102023000145540ustar 00000000000000//! #![allow(clippy::empty_docs)] use std::path::Path; use gix_ref::file::ReferenceExt; /// A platform to create iterators over references. #[must_use = "Iterators should be obtained from this iterator platform"] pub struct Platform<'r> { pub(crate) platform: gix_ref::file::iter::Platform<'r>, /// The owning repository. pub repo: &'r crate::Repository, } /// An iterator over references, with or without filter. pub struct Iter<'r> { inner: gix_ref::file::iter::LooseThenPacked<'r, 'r>, peel_with_packed: Option, peel: bool, repo: &'r crate::Repository, } impl<'r> Iter<'r> { fn new(repo: &'r crate::Repository, platform: gix_ref::file::iter::LooseThenPacked<'r, 'r>) -> Self { Iter { inner: platform, peel_with_packed: None, peel: false, repo, } } } impl Platform<'_> { /// Return an iterator over all references in the repository. /// /// Even broken or otherwise unparsable or inaccessible references are returned and have to be handled by the caller on a /// case by case basis. pub fn all(&self) -> Result, init::Error> { Ok(Iter::new(self.repo, self.platform.all()?)) } /// Return an iterator over all references that match the given `prefix`. /// /// These are of the form `refs/heads` or `refs/remotes/origin`, and must not contain relative paths components like `.` or `..`. // TODO: Create a custom `Path` type that enforces the requirements of git naturally, this type is surprising possibly on windows // and when not using a trailing '/' to signal directories. pub fn prefixed(&self, prefix: impl AsRef) -> Result, init::Error> { Ok(Iter::new(self.repo, self.platform.prefixed(prefix.as_ref())?)) } // TODO: tests /// Return an iterator over all references that are tags. /// /// They are all prefixed with `refs/tags`. pub fn tags(&self) -> Result, init::Error> { Ok(Iter::new(self.repo, self.platform.prefixed("refs/tags/".as_ref())?)) } // TODO: tests /// Return an iterator over all local branches. /// /// They are all prefixed with `refs/heads`. pub fn local_branches(&self) -> Result, init::Error> { Ok(Iter::new(self.repo, self.platform.prefixed("refs/heads/".as_ref())?)) } // TODO: tests /// Return an iterator over all remote branches. /// /// They are all prefixed with `refs/remotes`. pub fn remote_branches(&self) -> Result, init::Error> { Ok(Iter::new(self.repo, self.platform.prefixed("refs/remotes/".as_ref())?)) } } impl Iter<'_> { /// Automatically peel references before yielding them during iteration. /// /// This has the same effect as using `iter.map(|r| {r.peel_to_id_in_place(); r})`. /// /// # Note /// /// Doing this is necessary as the packed-refs buffer is already held by the iterator, disallowing the consumer of the iterator /// to peel the returned references themselves. pub fn peeled(mut self) -> Result { self.peel_with_packed = self.repo.refs.cached_packed_buffer()?; self.peel = true; Ok(self) } } impl<'r> Iterator for Iter<'r> { type Item = Result, Box>; fn next(&mut self) -> Option { self.inner.next().map(|res| { res.map_err(|err| Box::new(err) as Box) .and_then(|mut r| { if self.peel { let repo = &self.repo; r.peel_to_id_in_place_packed( &repo.refs, &repo.objects, self.peel_with_packed.as_ref().map(|p| &***p), ) .map_err(|err| Box::new(err) as Box) .map(|_| r) } else { Ok(r) } }) .map(|r| crate::Reference::from_ref(r, self.repo)) }) } } /// pub mod init { /// The error returned by [`Platform::all()`][super::Platform::all()] or [`Platform::prefixed()`][super::Platform::prefixed()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Io(#[from] std::io::Error), } } /// The error returned by [references()][crate::Repository::references()]. pub type Error = gix_ref::packed::buffer::open::Error; gix-0.69.1/src/reference/log.rs000064400000000000000000000024161046102023000143670ustar 00000000000000//! #![allow(clippy::empty_docs)] use gix_object::commit::MessageRef; use gix_ref::file::ReferenceExt; use crate::{ bstr::{BStr, BString, ByteVec}, Reference, }; impl Reference<'_> { /// Return a platform for obtaining iterators over reference logs. pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'_, '_> { self.inner.log_iter(&self.repo.refs) } /// Return true if a reflog is present for this reference. pub fn log_exists(&self) -> bool { self.inner.log_exists(&self.repo.refs) } } /// Generate a message typical for git commit logs based on the given `operation`, commit `message` and `num_parents` of the commit. pub fn message(operation: &str, message: &BStr, num_parents: usize) -> BString { let mut out = BString::from(operation); if let Some(commit_type) = commit_type_by_parents(num_parents) { out.push_str(b" ("); out.extend_from_slice(commit_type.as_bytes()); out.push_byte(b')'); } out.push_str(b": "); out.extend_from_slice(&MessageRef::from_bytes(message).summary()); out } pub(crate) fn commit_type_by_parents(count: usize) -> Option<&'static str> { Some(match count { 0 => "initial", 1 => return None, _two_or_more => "merge", }) } gix-0.69.1/src/reference/mod.rs000064400000000000000000000171731046102023000143730ustar 00000000000000//! #![allow(clippy::empty_docs)] use gix_ref::file::ReferenceExt; use crate::{Blob, Commit, Id, Object, Reference, Tag, Tree}; pub mod iter; /// pub mod remote; mod errors; pub use errors::{edit, find, follow, head_commit, head_id, head_tree, head_tree_id, peel}; use crate::ext::ObjectIdExt; pub mod log; pub use gix_ref::{Category, Kind}; /// Access impl<'repo> Reference<'repo> { /// Returns the attached id we point to, or `None` if this is a symbolic ref. pub fn try_id(&self) -> Option> { match self.inner.target { gix_ref::Target::Symbolic(_) => None, gix_ref::Target::Object(oid) => oid.to_owned().attach(self.repo).into(), } } /// Returns the attached id we point to, or panic if this is a symbolic ref. pub fn id(&self) -> Id<'repo> { self.try_id() .expect("BUG: tries to obtain object id from symbolic target") } /// Return the target to which this reference points to. pub fn target(&self) -> gix_ref::TargetRef<'_> { self.inner.target.to_ref() } /// Return the reference's full name. pub fn name(&self) -> &gix_ref::FullNameRef { self.inner.name.as_ref() } /// Turn this instances into a stand-alone reference. pub fn detach(self) -> gix_ref::Reference { self.inner } } impl std::fmt::Debug for Reference<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Debug::fmt(&self.inner, f) } } impl<'repo> Reference<'repo> { pub(crate) fn from_ref(reference: gix_ref::Reference, repo: &'repo crate::Repository) -> Self { Reference { inner: reference, repo } } } /// Peeling impl<'repo> Reference<'repo> { /// Follow all symbolic targets this reference might point to and peel all annotated tags /// to their first non-tag target, and return it, /// /// This is useful to learn where this reference is ultimately pointing to after following /// the chain of symbolic refs and annotated tags. pub fn peel_to_id_in_place(&mut self) -> Result, peel::Error> { let oid = self.inner.peel_to_id_in_place(&self.repo.refs, &self.repo.objects)?; Ok(Id::from_id(oid, self.repo)) } /// Follow all symbolic targets this reference might point to and peel all annotated tags /// to their first non-tag target, and return it, reusing the `packed` buffer if available. /// /// This is useful to learn where this reference is ultimately pointing to after following /// the chain of symbolic refs and annotated tags. pub fn peel_to_id_in_place_packed( &mut self, packed: Option<&gix_ref::packed::Buffer>, ) -> Result, peel::Error> { let oid = self .inner .peel_to_id_in_place_packed(&self.repo.refs, &self.repo.objects, packed)?; Ok(Id::from_id(oid, self.repo)) } /// Similar to [`peel_to_id_in_place()`](Reference::peel_to_id_in_place()), but consumes this instance. pub fn into_fully_peeled_id(mut self) -> Result, peel::Error> { self.peel_to_id_in_place() } /// Follow this reference's target until it points at an object directly, and peel that object until /// its type matches the given `kind`. It's an error to try to peel to a kind that this ref doesn't point to. /// /// Note that this ref will point to the first target object afterward, which may be a tag. This is different /// from [`peel_to_id_in_place()`](Self::peel_to_id_in_place()) where it will point to the first non-tag object. #[doc(alias = "peel", alias = "git2")] pub fn peel_to_kind(&mut self, kind: gix_object::Kind) -> Result, peel::to_kind::Error> { let packed = self.repo.refs.cached_packed_buffer().map_err(|err| { peel::to_kind::Error::FollowToObject(gix_ref::peel::to_object::Error::Follow( file::find::existing::Error::Find(file::find::Error::PackedOpen(err)), )) })?; self.peel_to_kind_packed(kind, packed.as_ref().map(|p| &***p)) } /// Peel this ref until the first commit. /// /// For details, see [`peel_to_kind`()](Self::peel_to_kind()). pub fn peel_to_commit(&mut self) -> Result, peel::to_kind::Error> { Ok(self.peel_to_kind(gix_object::Kind::Commit)?.into_commit()) } /// Peel this ref until the first annotated tag. /// /// For details, see [`peel_to_kind`()](Self::peel_to_kind()). pub fn peel_to_tag(&mut self) -> Result, peel::to_kind::Error> { Ok(self.peel_to_kind(gix_object::Kind::Tag)?.into_tag()) } /// Peel this ref until the first tree. /// /// For details, see [`peel_to_kind`()](Self::peel_to_kind()). pub fn peel_to_tree(&mut self) -> Result, peel::to_kind::Error> { Ok(self.peel_to_kind(gix_object::Kind::Tree)?.into_tree()) } /// Peel this ref until it points to a blob. Note that this is highly uncommon to happen /// as it would require an annotated tag to point to a blob, instead of a commit. /// /// For details, see [`peel_to_kind`()](Self::peel_to_kind()). pub fn peel_to_blob(&mut self) -> Result, peel::to_kind::Error> { Ok(self.peel_to_kind(gix_object::Kind::Blob)?.into_blob()) } /// Like [`peel_to_kind()`](Self::peel_to_kind), but allows to provide `packed` for best possible performance /// when peeling many refs. pub fn peel_to_kind_packed( &mut self, kind: gix_object::Kind, packed: Option<&gix_ref::packed::Buffer>, ) -> Result, peel::to_kind::Error> { let target = self .inner .follow_to_object_in_place_packed(&self.repo.refs, packed)? .attach(self.repo); Ok(target.object()?.peel_to_kind(kind)?) } /// Follow all symbolic references we point to up to the first object, which is typically (but not always) a tag, /// returning its id. /// After this call, this ref will be pointing to an object directly, but may still not consider itself 'peeled' unless /// a symbolic target ref was looked up from packed-refs. #[doc(alias = "resolve", alias = "git2")] pub fn follow_to_object(&mut self) -> Result, follow::to_object::Error> { let packed = self.repo.refs.cached_packed_buffer().map_err(|err| { follow::to_object::Error::FollowToObject(gix_ref::peel::to_object::Error::Follow( file::find::existing::Error::Find(file::find::Error::PackedOpen(err)), )) })?; self.follow_to_object_packed(packed.as_ref().map(|p| &***p)) } /// Like [`follow_to_object`](Self::follow_to_object), but can be used for repeated calls as it won't /// look up `packed` each time, but can reuse it instead. #[doc(alias = "resolve", alias = "git2")] pub fn follow_to_object_packed( &mut self, packed: Option<&gix_ref::packed::Buffer>, ) -> Result, follow::to_object::Error> { Ok(self .inner .follow_to_object_in_place_packed(&self.repo.refs, packed)? .attach(self.repo)) } /// Follow this symbolic reference one level and return the ref it refers to. /// /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain. pub fn follow(&self) -> Option, gix_ref::file::find::existing::Error>> { self.inner.follow(&self.repo.refs).map(|res| { res.map(|r| Reference { inner: r, repo: self.repo, }) }) } } mod edits; pub use edits::{delete, set_target_id}; use gix_ref::file; gix-0.69.1/src/reference/remote.rs000064400000000000000000000061561046102023000151060ustar 00000000000000use crate::bstr::ByteSlice; use crate::repository::{branch_remote_ref_name, branch_remote_tracking_ref_name}; use crate::{remote, Reference}; use gix_ref::{Category, FullNameRef}; use std::borrow::Cow; /// Remotes impl<'repo> Reference<'repo> { /// Find the name of our remote for `direction` as configured in `branch..remote|pushRemote` respectively. /// Return `None` if no remote is configured. /// /// See also [`Repository::branch_remote_name()`](crate::Repository::branch_remote_name()) for more details. pub fn remote_name(&self, direction: remote::Direction) -> Option> { let (category, shortname) = self.name().category_and_short_name()?; match category { Category::RemoteBranch => { if shortname.find_iter("/").take(2).count() == 1 { let slash_pos = shortname.find_byte(b'/').expect("it was just found"); shortname[..slash_pos] .as_bstr() .to_str() .ok() .map(|n| remote::Name::Symbol(n.into())) } else { let remotes = self.repo.remote_names(); for slash_pos in shortname.rfind_iter("/") { let candidate = shortname[..slash_pos].as_bstr(); if remotes.contains(candidate) { return candidate.to_str().ok().map(|n| remote::Name::Symbol(n.into())); } } None } } Category::LocalBranch => self.repo.branch_remote_name(shortname, direction), _ => None, } } /// Find the remote along with all configuration associated with it suitable for handling this reference. /// /// See also [`Repository::branch_remote()`](crate::Repository::branch_remote()) for more details. pub fn remote( &self, direction: remote::Direction, ) -> Option, remote::find::existing::Error>> { self.repo.branch_remote(self.name().shorten(), direction) } /// Return the name of this reference on the remote side. /// /// See [`Repository::branch_remote_ref_name()`](crate::Repository::branch_remote_ref_name()) for details. #[doc(alias = "upstream", alias = "git2")] pub fn remote_ref_name( &self, direction: remote::Direction, ) -> Option, branch_remote_ref_name::Error>> { self.repo.branch_remote_ref_name(self.name(), direction) } /// Return the name of the reference that tracks this reference on the remote side. /// /// See [`Repository::branch_remote_tracking_ref_name()`](crate::Repository::branch_remote_tracking_ref_name()) for details. #[doc(alias = "upstream", alias = "git2")] pub fn remote_tracking_ref_name( &self, direction: remote::Direction, ) -> Option, branch_remote_tracking_ref_name::Error>> { self.repo.branch_remote_tracking_ref_name(self.name(), direction) } } gix-0.69.1/src/remote/access.rs000064400000000000000000000077131046102023000144110ustar 00000000000000use gix_refspec::RefSpec; use crate::{bstr::BStr, remote, Remote}; /// Access impl<'repo> Remote<'repo> { /// Return the name of this remote or `None` if it wasn't persisted to disk yet. pub fn name(&self) -> Option<&remote::Name<'static>> { self.name.as_ref() } /// Return our repository reference. pub fn repo(&self) -> &'repo crate::Repository { self.repo } /// Return the set of ref-specs used for `direction`, which may be empty, in order of occurrence in the configuration. pub fn refspecs(&self, direction: remote::Direction) -> &[RefSpec] { match direction { remote::Direction::Fetch => &self.fetch_specs, remote::Direction::Push => &self.push_specs, } } /// Return how we handle tags when fetching the remote. pub fn fetch_tags(&self) -> remote::fetch::Tags { self.fetch_tags } /// Return the url used for the given `direction` with rewrites from `url..insteadOf|pushInsteadOf`, unless the instance /// was created with one of the `_without_url_rewrite()` methods. /// For pushing, this is the `remote..pushUrl` or the `remote..url` used for fetching, and for fetching it's /// the `remote..url`. /// Note that it's possible to only have the push url set, in which case there will be no way to fetch from the remote as /// the push-url isn't used for that. pub fn url(&self, direction: remote::Direction) -> Option<&gix_url::Url> { match direction { remote::Direction::Fetch => self.url_alias.as_ref().or(self.url.as_ref()), remote::Direction::Push => self .push_url_alias .as_ref() .or(self.push_url.as_ref()) .or_else(|| self.url(remote::Direction::Fetch)), } } } /// Modification impl Remote<'_> { /// Read `url..insteadOf|pushInsteadOf` configuration variables and apply them to our urls, changing them in place. /// /// This happens only once, and one if them may be changed even when reporting an error. /// If both urls fail, only the first error (for fetch urls) is reported. pub fn rewrite_urls(&mut self) -> Result<&mut Self, remote::init::Error> { let url_err = match remote::init::rewrite_url(&self.repo.config, self.url.as_ref(), remote::Direction::Fetch) { Ok(url) => { self.url_alias = url; None } Err(err) => err.into(), }; let push_url_err = match remote::init::rewrite_url(&self.repo.config, self.push_url.as_ref(), remote::Direction::Push) { Ok(url) => { self.push_url_alias = url; None } Err(err) => err.into(), }; url_err.or(push_url_err).map(Err::<&mut Self, _>).transpose()?; Ok(self) } /// Replace all currently set refspecs, typically from configuration, with the given `specs` for `direction`, /// or `None` if one of the input specs could not be parsed. pub fn replace_refspecs( &mut self, specs: impl IntoIterator, direction: remote::Direction, ) -> Result<(), gix_refspec::parse::Error> where Spec: AsRef, { use remote::Direction::*; let specs: Vec<_> = specs .into_iter() .map(|spec| { gix_refspec::parse( spec.as_ref(), match direction { Push => gix_refspec::parse::Operation::Push, Fetch => gix_refspec::parse::Operation::Fetch, }, ) .map(|url| url.to_owned()) }) .collect::>()?; let dst = match direction { Push => &mut self.push_specs, Fetch => &mut self.fetch_specs, }; *dst = specs; Ok(()) } } gix-0.69.1/src/remote/build.rs000064400000000000000000000053321046102023000142420ustar 00000000000000use crate::{bstr::BStr, remote, Remote}; /// Builder methods impl Remote<'_> { /// Set the `url` to be used when pushing data to a remote. pub fn push_url(self, url: Url) -> Result where Url: TryInto, gix_url::parse::Error: From, { self.push_url_inner( url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?, true, ) } /// Set the `url` to be used when pushing data to a remote, without applying rewrite rules in case these could be faulty, /// eliminating one failure mode. pub fn push_url_without_url_rewrite(self, url: Url) -> Result where Url: TryInto, gix_url::parse::Error: From, { self.push_url_inner( url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?, false, ) } /// Configure how tags should be handled when fetching from the remote. pub fn with_fetch_tags(mut self, tags: remote::fetch::Tags) -> Self { self.fetch_tags = tags; self } fn push_url_inner( mut self, push_url: gix_url::Url, should_rewrite_urls: bool, ) -> Result { self.push_url = push_url.into(); let (_, push_url_alias) = should_rewrite_urls .then(|| remote::init::rewrite_urls(&self.repo.config, None, self.push_url.as_ref())) .unwrap_or(Ok((None, None)))?; self.push_url_alias = push_url_alias; Ok(self) } /// Add `specs` as refspecs for `direction` to our list if they are unique, or ignore them otherwise. pub fn with_refspecs( mut self, specs: impl IntoIterator, direction: remote::Direction, ) -> Result where Spec: AsRef, { use remote::Direction::*; let new_specs = specs .into_iter() .map(|spec| { gix_refspec::parse( spec.as_ref(), match direction { Push => gix_refspec::parse::Operation::Push, Fetch => gix_refspec::parse::Operation::Fetch, }, ) .map(|s| s.to_owned()) }) .collect::, _>>()?; let specs = match direction { Push => &mut self.push_specs, Fetch => &mut self.fetch_specs, }; for spec in new_specs { if !specs.contains(&spec) { specs.push(spec); } } Ok(self) } } gix-0.69.1/src/remote/connect.rs000064400000000000000000000153731046102023000146020ustar 00000000000000#![allow(clippy::result_large_err)] use std::borrow::Cow; use gix_protocol::transport::client::Transport; use crate::{config::tree::Protocol, remote::Connection, Remote}; mod error { use crate::{bstr::BString, config, remote}; /// The error returned by [connect()][crate::Remote::connect()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not obtain options for connecting via ssh")] SshOptions(#[from] config::ssh_connect_options::Error), #[error("Could not obtain the current directory")] CurrentDir(#[from] std::io::Error), #[error("Could not access remote repository at \"{}\"", directory.display())] InvalidRemoteRepositoryPath { directory: std::path::PathBuf }, #[error(transparent)] SchemePermission(#[from] config::protocol::allow::Error), #[error("Protocol {scheme:?} of url {url:?} is denied per configuration")] ProtocolDenied { url: BString, scheme: gix_url::Scheme }, #[error(transparent)] Connect(#[from] gix_protocol::transport::client::connect::Error), #[error("The {} url was missing - don't know where to establish a connection to", direction.as_str())] MissingUrl { direction: remote::Direction }, #[error("The given protocol version was invalid. Choose between 1 and 2")] UnknownProtocol { source: config::key::GenericErrorWithValue }, #[error("Could not verify that \"{}\" url is a valid git directory before attempting to use it", url.to_bstring())] FileUrl { source: Box, url: gix_url::Url, }, } impl gix_protocol::transport::IsSpuriousError for Error { /// Return `true` if retrying might result in a different outcome due to IO working out differently. fn is_spurious(&self) -> bool { match self { Error::Connect(err) => err.is_spurious(), _ => false, } } } } pub use error::Error; /// Establishing connections to remote hosts (without performing a git-handshake). impl<'repo> Remote<'repo> { /// Create a new connection using `transport` to communicate, with `progress` to indicate changes. /// /// Note that this method expects the `transport` to be created by the user, which would involve the [`url()`](Self::url()). /// It's meant to be used when async operation is needed with runtimes of the user's choice. pub fn to_connection_with_transport(&self, transport: T) -> Connection<'_, 'repo, T> where T: Transport, { let trace = self.repo.config.trace_packet(); Connection { remote: self, authenticate: None, transport_options: None, handshake: None, transport: gix_protocol::SendFlushOnDrop::new(transport, trace), trace, } } /// Connect to the url suitable for `direction` and return a handle through which operations can be performed. /// /// Note that the `protocol.version` configuration key affects the transport protocol used to connect, /// with `2` being the default. /// /// The transport used for connection can be configured via `transport_mut().configure()` assuming the actually /// used transport is well known. If that's not the case, the transport can be created by hand and passed to /// [to_connection_with_transport()][Self::to_connection_with_transport()]. #[cfg(any(feature = "blocking-network-client", feature = "async-network-client-async-std"))] #[gix_protocol::maybe_async::maybe_async] pub async fn connect( &self, direction: crate::remote::Direction, ) -> Result>, Error> { let (url, version) = self.sanitized_url_and_version(direction)?; #[cfg(feature = "blocking-network-client")] let scheme_is_ssh = url.scheme == gix_url::Scheme::Ssh; let transport = gix_protocol::transport::connect( url, gix_protocol::transport::client::connect::Options { version, #[cfg(feature = "blocking-network-client")] ssh: scheme_is_ssh .then(|| self.repo.ssh_connect_options()) .transpose()? .unwrap_or_default(), trace: self.repo.config.trace_packet(), }, ) .await?; Ok(self.to_connection_with_transport(transport)) } /// Produce the sanitized URL and protocol version to use as obtained by querying the repository configuration. /// /// This can be useful when using custom transports to allow additional configuration. pub fn sanitized_url_and_version( &self, direction: crate::remote::Direction, ) -> Result<(gix_url::Url, gix_protocol::transport::Protocol), Error> { fn sanitize(mut url: gix_url::Url) -> Result { if url.scheme == gix_url::Scheme::File { let mut dir = gix_path::to_native_path_on_windows(Cow::Borrowed(url.path.as_ref())); let kind = gix_discover::is_git(dir.as_ref()) .or_else(|_| { dir.to_mut().push(gix_discover::DOT_GIT_DIR); gix_discover::is_git(dir.as_ref()) }) .map_err(|err| Error::FileUrl { source: err.into(), url: url.clone(), })?; let (git_dir, _work_dir) = gix_discover::repository::Path::from_dot_git_dir( dir.clone().into_owned(), kind, // precomposed unicode doesn't matter here as long as the produced path is accessible, // which is a given either way. &gix_fs::current_dir(false)?, ) .ok_or_else(|| Error::InvalidRemoteRepositoryPath { directory: dir.into_owned(), })? .into_repository_and_work_tree_directories(); url.path = gix_path::into_bstr(git_dir).into_owned(); } Ok(url) } let version = crate::config::tree::Protocol::VERSION .try_into_protocol_version(self.repo.config.resolved.integer(Protocol::VERSION)) .map_err(|err| Error::UnknownProtocol { source: err })?; let url = self.url(direction).ok_or(Error::MissingUrl { direction })?.to_owned(); if !self.repo.config.url_scheme()?.allow(&url.scheme) { return Err(Error::ProtocolDenied { url: url.to_bstring(), scheme: url.scheme, }); } Ok((sanitize(url)?, version)) } } gix-0.69.1/src/remote/connection/access.rs000064400000000000000000000101651046102023000165430ustar 00000000000000use crate::{ remote::{connection::AuthenticateFn, Connection}, Remote, }; /// Builder impl<'a, T> Connection<'a, '_, T> where T: gix_transport::client::Transport, { /// Set a custom credentials callback to provide credentials if the remotes require authentication. /// /// Otherwise, we will use the git configuration to perform the same task as the `git credential` helper program, /// which is calling other helper programs in succession while resorting to a prompt to obtain credentials from the /// user. /// /// A custom function may also be used to prevent accessing resources with authentication. /// /// Use the [`configured_credentials()`](Connection::configured_credentials()) method to obtain the implementation /// that would otherwise be used, which can be useful to proxy the default configuration and obtain information about the /// URLs to authenticate with. pub fn with_credentials( mut self, helper: impl FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a, ) -> Self { self.authenticate = Some(Box::new(helper)); self } /// Provide configuration to be used before the first handshake is conducted. /// It's typically created by initializing it with [`Repository::transport_options()`](crate::Repository::transport_options()), /// which is also the default if this isn't set explicitly. Note that all the default configuration is created from `git` /// configuration, which can also be manipulated through overrides to affect the default configuration. /// /// Use this method to provide transport configuration with custom backend configuration that is not configurable by other means and /// custom to the application at hand. pub fn with_transport_options(mut self, config: Box) -> Self { self.transport_options = Some(config); self } } /// Mutation impl<'a, T> Connection<'a, '_, T> where T: gix_transport::client::Transport, { /// Like [`with_credentials()`](Self::with_credentials()), but without consuming the connection. pub fn set_credentials( &mut self, helper: impl FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a, ) -> &mut Self { self.authenticate = Some(Box::new(helper)); self } /// Like [`with_transport_options()`](Self::with_transport_options()), but without consuming the connection. pub fn set_transport_options(&mut self, config: Box) -> &mut Self { self.transport_options = Some(config); self } } /// Access impl<'repo, T> Connection<'_, 'repo, T> where T: gix_transport::client::Transport, { /// A utility to return a function that will use this repository's configuration to obtain credentials, similar to /// what `git credential` is doing. /// /// It's meant to be used by users of the [`with_credentials()`](Self::with_credentials()) builder to gain access to the /// default way of handling credentials, which they can call as fallback. pub fn configured_credentials( &self, url: gix_url::Url, ) -> Result, crate::config::credential_helpers::Error> { let (mut cascade, _action_with_normalized_url, prompt_opts) = self.remote.repo.config_snapshot().credential_helpers(url)?; Ok(Box::new(move |action| cascade.invoke(action, prompt_opts.clone())) as AuthenticateFn<'_>) } /// Return the underlying remote that instantiate this connection. pub fn remote(&self) -> &Remote<'repo> { self.remote } /// Provide a mutable transport to allow interacting with it according to its actual type. /// Note that the caller _should not_ call [`configure()`](gix_protocol::transport::client::TransportWithoutIO::configure()) /// as we will call it automatically before performing the handshake. Instead, to bring in custom configuration, /// call [`with_transport_options()`](Connection::with_transport_options()). pub fn transport_mut(&mut self) -> &mut T { &mut self.transport.inner } } gix-0.69.1/src/remote/connection/fetch/config.rs000064400000000000000000000014531046102023000176400ustar 00000000000000use super::Error; use crate::{ config::{cache::util::ApplyLeniency, tree::Pack}, Repository, }; pub fn index_threads(repo: &Repository) -> Result, Error> { Ok(repo .config .resolved .integer_filter(Pack::THREADS, &mut repo.filter_config_section()) .map(|threads| Pack::THREADS.try_into_usize(threads)) .transpose() .with_leniency(repo.options.lenient_config)?) } pub fn pack_index_version(repo: &Repository) -> Result { Ok(repo .config .resolved .integer(Pack::INDEX_VERSION) .map(|value| Pack::INDEX_VERSION.try_into_index_version(value)) .transpose() .with_leniency(repo.options.lenient_config)? .unwrap_or(gix_pack::index::Version::V2)) } gix-0.69.1/src/remote/connection/fetch/error.rs000064400000000000000000000037711046102023000175310ustar 00000000000000use crate::config; /// The error returned by [`receive()`](super::Prepare::receive()). // TODO: remove unused variants #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Fetch(#[from] gix_protocol::fetch::Error), #[error("The value to configure pack threads should be 0 to auto-configure or the amount of threads to use")] PackThreads(#[from] config::unsigned_integer::Error), #[error("The value to configure the pack index version should be 1 or 2")] PackIndexVersion(#[from] config::key::GenericError), #[error("Cannot fetch from a remote that uses {remote} while local repository uses {local} for object hashes")] IncompatibleObjectHash { local: gix_hash::Kind, remote: gix_hash::Kind, }, #[error(transparent)] LoadAlternates(#[from] gix_odb::store::load_index::Error), #[error(transparent)] Client(#[from] gix_protocol::transport::client::Error), #[error(transparent)] UpdateRefs(#[from] super::refs::update::Error), #[error("Failed to remove .keep file at \"{}\"", path.display())] RemovePackKeepFile { path: std::path::PathBuf, source: std::io::Error, }, #[error("None of the refspec(s) {} matched any of the {num_remote_refs} refs on the remote", refspecs.iter().map(|r| r.to_ref().instruction().to_bstring().to_string()).collect::>().join(", "))] NoMapping { refspecs: Vec, num_remote_refs: usize, }, #[error("Could not obtain configuration to learn if shallow remotes should be rejected")] RejectShallowRemoteConfig(#[from] config::boolean::Error), #[error(transparent)] NegotiationAlgorithmConfig(#[from] config::key::GenericErrorWithValue), } impl gix_protocol::transport::IsSpuriousError for Error { fn is_spurious(&self) -> bool { match self { Error::Fetch(err) => err.is_spurious(), Error::Client(err) => err.is_spurious(), _ => false, } } } gix-0.69.1/src/remote/connection/fetch/mod.rs000064400000000000000000000175251046102023000171610ustar 00000000000000use gix_protocol::transport::client::Transport; use crate::{ bstr::BString, remote, remote::{ fetch::{DryRun, RefMap}, ref_map, Connection, }, Progress, }; mod error; pub use error::Error; use crate::remote::fetch::WritePackedRefs; /// The way reflog messages should be composed whenever a ref is written with recent objects from a remote. pub enum RefLogMessage { /// Prefix the log with `action` and generate the typical suffix as `git` would. Prefixed { /// The action to use, like `fetch` or `pull`. action: String, }, /// Control the entire message, using `message` verbatim. Override { /// The complete reflog message. message: BString, }, } impl RefLogMessage { pub(crate) fn compose(&self, context: &str) -> BString { match self { RefLogMessage::Prefixed { action } => format!("{action}: {context}").into(), RefLogMessage::Override { message } => message.to_owned(), } } } /// The status of the repository after the fetch operation #[derive(Debug, Clone)] pub enum Status { /// Nothing changed as the remote didn't have anything new compared to our tracking branches, thus no pack was received /// and no new object was added. /// /// As we could determine that nothing changed without remote interaction, there was no negotiation at all. NoPackReceived { /// If `true`, we didn't receive a pack due to dry-run mode being enabled. dry_run: bool, /// Information about the pack negotiation phase if negotiation happened at all. /// /// It's possible that negotiation didn't have to happen as no reference of interest changed on the server. negotiate: Option, /// However, depending on the refspecs, references might have been updated nonetheless to point to objects as /// reported by the remote. update_refs: refs::update::Outcome, }, /// There was at least one tip with a new object which we received. Change { /// Information about the pack negotiation phase. negotiate: outcome::Negotiate, /// Information collected while writing the pack and its index. write_pack_bundle: gix_pack::bundle::write::Outcome, /// Information collected while updating references. update_refs: refs::update::Outcome, }, } /// The outcome of receiving a pack via [`Prepare::receive()`]. #[derive(Debug, Clone)] pub struct Outcome { /// The result of the initial mapping of references, the prerequisite for any fetch. pub ref_map: RefMap, /// The outcome of the handshake with the server. pub handshake: gix_protocol::handshake::Outcome, /// The status of the operation to indicate what happened. pub status: Status, } /// Additional types related to the outcome of a fetch operation. pub mod outcome { /// Information about the negotiation phase of a fetch. /// /// Note that negotiation can happen even if no pack is ultimately produced. #[derive(Default, Debug, Clone)] pub struct Negotiate { /// The negotiation graph indicating what kind of information 'the algorithm' collected in the end. pub graph: gix_negotiate::IdMap, /// Additional information for each round of negotiation. pub rounds: Vec, } } pub use gix_protocol::fetch::ProgressId; /// pub mod prepare { /// The error returned by [`prepare_fetch()`][super::Connection::prepare_fetch()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Cannot perform a meaningful fetch operation without any configured ref-specs")] MissingRefSpecs, #[error(transparent)] RefMap(#[from] crate::remote::ref_map::Error), } impl gix_protocol::transport::IsSpuriousError for Error { fn is_spurious(&self) -> bool { match self { Error::RefMap(err) => err.is_spurious(), _ => false, } } } } impl<'remote, 'repo, T> Connection<'remote, 'repo, T> where T: Transport, { /// Perform a handshake with the remote and obtain a ref-map with `options`, and from there one /// Note that at this point, the `transport` should already be configured using the [`transport_mut()`][Self::transport_mut()] /// method, as it will be consumed here. /// /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via git-config. /// /// # Async Experimental /// /// Note that this implementation is currently limited correctly in blocking mode only as it relies on Drop semantics to close the connection /// should the fetch not be performed. Furthermore, there the code doing the fetch is inherently blocking and it's not offloaded to a thread, /// making this call block the executor. /// It's best to unblock it by placing it into its own thread or offload it should usage in an async context be truly required. #[allow(clippy::result_large_err)] #[gix_protocol::maybe_async::maybe_async] pub async fn prepare_fetch( mut self, progress: impl Progress, options: ref_map::Options, ) -> Result, prepare::Error> { if self.remote.refspecs(remote::Direction::Fetch).is_empty() && options.extra_refspecs.is_empty() { return Err(prepare::Error::MissingRefSpecs); } let ref_map = self.ref_map_by_ref(progress, options).await?; Ok(Prepare { con: Some(self), ref_map, dry_run: DryRun::No, reflog_message: None, write_packed_refs: WritePackedRefs::Never, shallow: Default::default(), }) } } impl Prepare<'_, '_, T> where T: Transport, { /// Return the `ref_map` (that includes the server handshake) which was part of listing refs prior to fetching a pack. pub fn ref_map(&self) -> &RefMap { &self.ref_map } } mod config; mod receive_pack; /// #[path = "update_refs/mod.rs"] pub mod refs; /// A structure to hold the result of the handshake with the remote and configure the upcoming fetch operation. pub struct Prepare<'remote, 'repo, T> where T: Transport, { con: Option>, ref_map: RefMap, dry_run: DryRun, reflog_message: Option, write_packed_refs: WritePackedRefs, shallow: remote::fetch::Shallow, } /// Builder impl Prepare<'_, '_, T> where T: Transport, { /// If dry run is enabled, no change to the repository will be made. /// /// This works by not actually fetching the pack after negotiating it, nor will refs be updated. pub fn with_dry_run(mut self, enabled: bool) -> Self { self.dry_run = if enabled { DryRun::Yes } else { DryRun::No }; self } /// If enabled, don't write ref updates to loose refs, but put them exclusively to packed-refs. /// /// This improves performance and allows case-sensitive filesystems to deal with ref names that would otherwise /// collide. pub fn with_write_packed_refs_only(mut self, enabled: bool) -> Self { self.write_packed_refs = if enabled { WritePackedRefs::Only } else { WritePackedRefs::Never }; self } /// Set the reflog message to use when updating refs after fetching a pack. pub fn with_reflog_message(mut self, reflog_message: RefLogMessage) -> Self { self.reflog_message = reflog_message.into(); self } /// Define what to do when the current repository is a shallow clone. /// /// *Has no effect if the current repository is not as shallow clone.* pub fn with_shallow(mut self, shallow: remote::fetch::Shallow) -> Self { self.shallow = shallow; self } } gix-0.69.1/src/remote/connection/fetch/receive_pack.rs000064400000000000000000000301041046102023000210060ustar 00000000000000use crate::{ config::{ cache::util::ApplyLeniency, tree::{Clone, Fetch, Key}, }, remote, remote::{ connection::fetch::config, fetch, fetch::{negotiate::Algorithm, outcome, refs, Error, Outcome, Prepare, RefLogMessage, Status}, }, }; use gix_odb::store::RefreshMode; use gix_protocol::fetch::negotiate; use gix_protocol::{fetch::Arguments, transport::client::Transport}; use std::ops::DerefMut; use std::path::PathBuf; use std::sync::atomic::AtomicBool; impl Prepare<'_, '_, T> where T: Transport, { /// Receive the pack and perform the operation as configured by git via `git-config` or overridden by various builder methods. /// Return `Ok(Outcome)` with an [`Outcome::status`] indicating if a change was made or not. /// /// Note that when in dry-run mode, we don't read the pack the server prepared, which leads the server to be hung up on unexpectedly. /// /// ### Negotiation /// /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being /// experimented with. /// /// ### Pack `.keep` files /// /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that /// it takes between them being placed and the respective references to be written to disk which binds their objects to the /// commit graph, making them reachable. /// /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, /// which indicates the garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone. /// /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving /// in its place at `write_pack_bundle.keep_path` a `None`. /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller. /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock ` to stdout /// to inform git about the file that it will remove once it updated the refs accordingly. /// /// ### Deviation /// /// When **updating refs**, the `git-fetch` docs state the following: /// /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), /// whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit /// as an ancestor etc. /// /// We explicitly don't special case those refs and expect the caller to take control. Note that by its nature, /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our /// implementation as well. /// /// ### Async Mode Shortcoming /// /// Currently, the entire process of resolving a pack is blocking the executor. This can be fixed using the `blocking` crate, but it /// didn't seem worth the tradeoff of having more complex code. /// /// ### Configuration /// /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well. /// #[gix_protocol::maybe_async::maybe_async] pub async fn receive

(mut self, progress: P, should_interrupt: &AtomicBool) -> Result where P: gix_features::progress::NestedProgress, P::SubProgress: 'static, { let ref_map = &self.ref_map; if ref_map.mappings.is_empty() && !ref_map.remote_refs.is_empty() { let mut specs = ref_map.refspecs.clone(); specs.extend(ref_map.extra_refspecs.clone()); return Err(Error::NoMapping { refspecs: specs, num_remote_refs: ref_map.remote_refs.len(), }); } let mut con = self.con.take().expect("receive() can only be called once"); let mut handshake = con.handshake.take().expect("receive() can only be called once"); let repo = con.remote.repo; let expected_object_hash = repo.object_hash(); if ref_map.object_hash != expected_object_hash { return Err(Error::IncompatibleObjectHash { local: expected_object_hash, remote: ref_map.object_hash, }); } let fetch_options = gix_protocol::fetch::Options { shallow_file: repo.shallow_file(), shallow: &self.shallow, tags: con.remote.fetch_tags, reject_shallow_remote: repo .config .resolved .boolean_filter("clone.rejectShallow", &mut repo.filter_config_section()) .map(|val| Clone::REJECT_SHALLOW.enrich_error(val)) .transpose()? .unwrap_or(false), }; let context = gix_protocol::fetch::Context { handshake: &mut handshake, transport: &mut con.transport.inner, user_agent: repo.config.user_agent_tuple(), trace_packetlines: con.trace, }; let negotiator = repo .config .resolved .string(Fetch::NEGOTIATION_ALGORITHM.logical_name().as_str()) .map(|n| Fetch::NEGOTIATION_ALGORITHM.try_into_negotiation_algorithm(n)) .transpose() .with_leniency(repo.config.lenient_config)? .unwrap_or(Algorithm::Consecutive) .into_negotiator(); let graph_repo = { let mut r = repo.clone(); // assure that checking for unknown server refs doesn't trigger ODB refreshes. r.objects.refresh = RefreshMode::Never; // we cache everything of importance in the graph and thus don't need an object cache. r.objects.unset_object_cache(); r }; let cache = graph_repo.commit_graph_if_enabled().ok().flatten(); let mut graph = graph_repo.revision_graph(cache.as_ref()); let alternates = repo.objects.store_ref().alternate_db_paths()?; let mut negotiate = Negotiate { objects: &graph_repo.objects, refs: &graph_repo.refs, graph: &mut graph, alternates, ref_map, shallow: &self.shallow, tags: con.remote.fetch_tags, negotiator, open_options: repo.options.clone(), }; let write_pack_options = gix_pack::bundle::write::Options { thread_limit: config::index_threads(repo)?, index_version: config::pack_index_version(repo)?, iteration_mode: gix_pack::data::input::Mode::Verify, object_hash: con.remote.repo.object_hash(), }; let mut write_pack_bundle = None; let res = gix_protocol::fetch( &mut negotiate, |reader, progress, should_interrupt| -> Result { let mut may_read_to_end = false; write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) { let res = gix_pack::Bundle::write_to_directory( reader, Some(&repo.objects.store_ref().path().join("pack")), progress, should_interrupt, Some(Box::new({ let repo = repo.clone(); repo.objects })), write_pack_options, )?; may_read_to_end = true; Some(res) } else { None }; Ok(may_read_to_end) }, progress, should_interrupt, context, fetch_options, ) .await?; let negotiate = res.map(|v| outcome::Negotiate { graph: graph.detach(), rounds: v.negotiate.rounds, }); if matches!(handshake.server_protocol_version, gix_protocol::transport::Protocol::V2) { gix_protocol::indicate_end_of_interaction(&mut con.transport.inner, con.trace) .await .ok(); } let update_refs = refs::update( repo, self.reflog_message .take() .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }), &self.ref_map.mappings, con.remote.refspecs(remote::Direction::Fetch), &self.ref_map.extra_refspecs, con.remote.fetch_tags, self.dry_run, self.write_packed_refs, )?; if let Some(bundle) = write_pack_bundle.as_mut() { if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 { if let Some(path) = bundle.keep_path.take() { std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?; } } } let out = Outcome { handshake, ref_map: std::mem::take(&mut self.ref_map), status: match write_pack_bundle { Some(write_pack_bundle) => Status::Change { write_pack_bundle, update_refs, negotiate: negotiate.expect("if we have a pack, we always negotiated it"), }, None => Status::NoPackReceived { dry_run: matches!(self.dry_run, fetch::DryRun::Yes), negotiate, update_refs, }, }, }; Ok(out) } } struct Negotiate<'a, 'b, 'c> { objects: &'a crate::OdbHandle, refs: &'a gix_ref::file::Store, graph: &'a mut gix_negotiate::Graph<'b, 'c>, alternates: Vec, ref_map: &'a gix_protocol::fetch::RefMap, shallow: &'a gix_protocol::fetch::Shallow, tags: gix_protocol::fetch::Tags, negotiator: Box, open_options: crate::open::Options, } impl gix_protocol::fetch::Negotiate for Negotiate<'_, '_, '_> { fn mark_complete_and_common_ref(&mut self) -> Result { negotiate::mark_complete_and_common_ref( &self.objects, self.refs, { let alternates = std::mem::take(&mut self.alternates); let open_options = self.open_options.clone(); move || -> Result<_, std::convert::Infallible> { Ok(alternates .into_iter() .filter_map(move |path| { path.ancestors() .nth(1) .and_then(|git_dir| crate::open_opts(git_dir, open_options.clone()).ok()) }) .map(|repo| (repo.refs, repo.objects))) } }, self.negotiator.deref_mut(), &mut *self.graph, self.ref_map, self.shallow, negotiate::make_refmapping_ignore_predicate(self.tags, self.ref_map), ) } fn add_wants(&mut self, arguments: &mut Arguments, remote_ref_target_known: &[bool]) -> bool { negotiate::add_wants( self.objects, arguments, self.ref_map, remote_ref_target_known, self.shallow, negotiate::make_refmapping_ignore_predicate(self.tags, self.ref_map), ) } fn one_round( &mut self, state: &mut negotiate::one_round::State, arguments: &mut Arguments, previous_response: Option<&gix_protocol::fetch::Response>, ) -> Result<(negotiate::Round, bool), negotiate::Error> { negotiate::one_round( self.negotiator.deref_mut(), &mut *self.graph, state, arguments, previous_response, ) } } gix-0.69.1/src/remote/connection/fetch/update_refs/mod.rs000064400000000000000000000535151046102023000214610ustar 00000000000000#![allow(clippy::result_large_err)] use std::{collections::BTreeMap, path::PathBuf}; use gix_object::Exists; use gix_ref::{ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog}, Target, TargetRef, }; use crate::{ ext::ObjectIdExt, remote::{ fetch, fetch::{ refmap::Source, refs::update::{Mode, TypeChange}, RefLogMessage, }, }, Repository, }; /// pub mod update; /// Information about the update of a single reference, corresponding the respective entry in [`RefMap::mappings`][crate::remote::fetch::RefMap::mappings]. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Update { /// The way the update was performed. pub mode: Mode, /// If not `None`, the update also affects the type of the reference. This also implies that `edit_index` is not None. pub type_change: Option, /// The index to the edit that was created from the corresponding mapping, or `None` if there was no local ref. pub edit_index: Option, } impl From for Update { fn from(mode: Mode) -> Self { Update { mode, type_change: None, edit_index: None, } } } /// Update all refs as derived from `refmap.mappings` and produce an `Outcome` informing about all applied changes in detail, with each /// [`update`][Update] corresponding to the [`fetch::Mapping`] of at the same index. /// If `dry_run` is true, ref transactions won't actually be applied, but are assumed to work without error so the underlying /// `repo` is not actually changed. Also it won't perform an 'object exists' check as these are likely not to exist as the pack /// wasn't fetched either. /// `action` is the prefix used for reflog entries, and is typically "fetch". /// /// It can be used to produce typical information that one is used to from `git fetch`. /// /// We will reject updates only if… /// /// * …fast-forward rules are violated /// * …the local ref is currently checked out /// * …existing refs would not become 'unborn', i.e. point to a reference that doesn't exist and won't be created due to ref-specs /// /// With these safeguards in place, one can handle each naturally and implement mirrors or bare repos easily. #[allow(clippy::too_many_arguments)] pub(crate) fn update( repo: &Repository, message: RefLogMessage, mappings: &[fetch::refmap::Mapping], refspecs: &[gix_refspec::RefSpec], extra_refspecs: &[gix_refspec::RefSpec], fetch_tags: fetch::Tags, dry_run: fetch::DryRun, write_packed_refs: fetch::WritePackedRefs, ) -> Result { let _span = gix_trace::detail!("update_refs()", mappings = mappings.len()); let mut edits = Vec::new(); let mut updates = Vec::new(); let mut edit_indices_to_validate = Vec::new(); let implicit_tag_refspec = fetch_tags .to_refspec() .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included)); for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map( |fetch::refmap::Mapping { remote, local, spec_index, }| { spec_index.get(refspecs, extra_refspecs).map(|spec| { ( remote, local, spec, implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec), ) }) }, ) { // `None` only if unborn. let remote_id = remote.as_id(); if matches!(dry_run, fetch::DryRun::No) && !remote_id.map_or(true, |id| repo.objects.exists(id)) { if let Some(remote_id) = remote_id.filter(|id| !repo.objects.exists(id)) { let update = if is_implicit_tag { Mode::ImplicitTagNotSentByRemote.into() } else { Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into() }; updates.push(update); continue; } } let mut checked_out_branches = worktree_branches(repo)?; let (mode, edit_index, type_change) = match local { Some(name) => { let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? { Some(existing) => { if let Some(wt_dirs) = checked_out_branches.get_mut(existing.name()) { wt_dirs.sort(); wt_dirs.dedup(); let mode = Mode::RejectedCurrentlyCheckedOut { worktree_dirs: wt_dirs.to_owned(), }; updates.push(mode.into()); continue; } match existing .try_id() .map_or_else(|| existing.clone().peel_to_id_in_place(), Ok) .map(crate::Id::detach) { Ok(local_id) => { let remote_id = match remote_id { Some(id) => id, None => { // we don't allow to go back to unborn state if there is a local reference already present. // Note that we will be changing it to a symbolic reference just fine. updates.push(Mode::RejectedToReplaceWithUnborn.into()); continue; } }; let (mode, reflog_message) = if local_id == remote_id { (Mode::NoChangeNeeded, "no update will be performed") } else if let Some(gix_ref::Category::Tag) = existing.name().category() { if spec.allow_non_fast_forward() { (Mode::Forced, "updating tag") } else { updates.push(Mode::RejectedTagUpdate.into()); continue; } } else { let mut force = spec.allow_non_fast_forward(); let is_fast_forward = match dry_run { fetch::DryRun::No => { let ancestors = repo .find_object(local_id)? .try_into_commit() .map_err(|_| ()) .and_then(|c| c.committer().map(|a| a.time.seconds).map_err(|_| ())) .and_then(|local_commit_time| { remote_id .to_owned() .ancestors(&repo.objects) .sorting( gix_traverse::commit::simple::Sorting::ByCommitTimeCutoff { order: Default::default(), seconds: local_commit_time, }, ) .map_err(|_| ()) }); match ancestors { Ok(mut ancestors) => { ancestors.any(|cid| cid.map_or(false, |c| c.id == local_id)) } Err(_) => { force = true; false } } } fetch::DryRun::Yes => true, }; if is_fast_forward { ( Mode::FastForward, matches!(dry_run, fetch::DryRun::Yes) .then(|| "fast-forward (guessed in dry-run)") .unwrap_or("fast-forward"), ) } else if force { (Mode::Forced, "forced-update") } else { updates.push(Mode::RejectedNonFastForward.into()); continue; } }; ( mode, reflog_message, existing.name().to_owned(), PreviousValue::MustExistAndMatch(existing.target().into_owned()), ) } Err(crate::reference::peel::Error::ToId(gix_ref::peel::to_id::Error::FollowToObject( gix_ref::peel::to_object::Error::Follow(_), ))) => { // An unborn reference, always allow it to be changed to whatever the remote wants. ( if existing.target().try_name().map(gix_ref::FullNameRef::as_bstr) == remote.as_target() { Mode::NoChangeNeeded } else { Mode::Forced }, "change unborn ref", existing.name().to_owned(), PreviousValue::MustExistAndMatch(existing.target().into_owned()), ) } Err(err) => return Err(err.into()), } } None => { let name: gix_ref::FullName = name.try_into()?; let reflog_msg = match name.category() { Some(gix_ref::Category::Tag) => "storing tag", Some(gix_ref::Category::LocalBranch) => "storing head", _ => "storing ref", }; ( Mode::New, reflog_msg, name, PreviousValue::ExistingMustMatch(new_value_by_remote(repo, remote, mappings)?), ) } }; let new = new_value_by_remote(repo, remote, mappings)?; let type_change = match (&previous_value, &new) { ( PreviousValue::ExistingMustMatch(Target::Object(_)) | PreviousValue::MustExistAndMatch(Target::Object(_)), Target::Symbolic(_), ) => Some(TypeChange::DirectToSymbolic), ( PreviousValue::ExistingMustMatch(Target::Symbolic(_)) | PreviousValue::MustExistAndMatch(Target::Symbolic(_)), Target::Object(_), ) => Some(TypeChange::SymbolicToDirect), _ => None, }; // We are here because this edit should work and fast-forward rules are respected. // But for setting a symref-target, we have to be sure that the target already exists // or will exists. To be sure all rules are respected, we delay the check to when the // edit-list has been built. let edit_index = edits.len(); if matches!(new, Target::Symbolic(_)) { let anticipated_update_index = updates.len(); edit_indices_to_validate.push((anticipated_update_index, edit_index)); } let edit = RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: message.compose(reflog_message), }, expected: previous_value, new, }, name, // We must not deref symrefs or we will overwrite their destination, which might be checked out // and we don't check for that case. deref: false, }; edits.push(edit); (mode, Some(edit_index), type_change) } None => (Mode::NoChangeNeeded, None, None), }; updates.push(Update { mode, type_change, edit_index, }); } for (update_index, edit_index) in edit_indices_to_validate { let edit = &edits[edit_index]; if update_needs_adjustment_as_edits_symbolic_target_is_missing(edit, repo, &edits) { let edit = &mut edits[edit_index]; let update = &mut updates[update_index]; update.mode = Mode::RejectedToReplaceWithUnborn; update.type_change = None; match edit.change { Change::Update { ref expected, ref mut new, ref mut log, .. } => match expected { PreviousValue::MustExistAndMatch(existing) => { *new = existing.clone(); log.message = "no-op".into(); } _ => unreachable!("at this point it can only be one variant"), }, Change::Delete { .. } => { unreachable!("we don't do that here") } }; } } let edits = match dry_run { fetch::DryRun::No => { let _span = gix_trace::detail!("apply", edits = edits.len()); let (file_lock_fail, packed_refs_lock_fail) = repo .config .lock_timeout() .map_err(crate::reference::edit::Error::from)?; repo.refs .transaction() .packed_refs( match write_packed_refs { fetch::WritePackedRefs::Only => { gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(&repo.objects))}, fetch::WritePackedRefs::Never => gix_ref::file::transaction::PackedRefs::DeletionsOnly } ) .prepare(edits, file_lock_fail, packed_refs_lock_fail) .map_err(crate::reference::edit::Error::from)? .commit(repo.committer().transpose().map_err(|err| update::Error::EditReferences(crate::reference::edit::Error::ParseCommitterTime(err)))?) .map_err(crate::reference::edit::Error::from)? } fetch::DryRun::Yes => edits, }; Ok(update::Outcome { edits, updates }) } /// Figure out if target of `edit` points to a reference that doesn't exist in `repo` and won't exist as it's not in any of `edits`. /// If so, return true. fn update_needs_adjustment_as_edits_symbolic_target_is_missing( edit: &RefEdit, repo: &Repository, edits: &[RefEdit], ) -> bool { match edit.change.new_value().expect("here we need a symlink") { TargetRef::Object(_) => unreachable!("BUG: we already know it's symbolic"), TargetRef::Symbolic(new_target_ref) => { match &edit.change { Change::Update { expected, .. } => match expected { PreviousValue::MustExistAndMatch(current_target) => { if let Target::Symbolic(current_target_name) = current_target { if current_target_name.as_ref() == new_target_ref { return false; // no-op are always fine } let current_is_unborn = repo.refs.try_find(current_target_name).ok().flatten().is_none(); if current_is_unborn { return false; } } } PreviousValue::ExistingMustMatch(_) => return false, // this means the ref doesn't exist locally, so we can create unborn refs anyway _ => { unreachable!("BUG: we don't do that here") } }, Change::Delete { .. } => { unreachable!("we don't ever delete here") } }; let target_ref_exists_locally = repo.refs.try_find(new_target_ref).ok().flatten().is_some(); if target_ref_exists_locally { return false; } let target_ref_will_be_created = edits.iter().any(|edit| edit.name.as_ref() == new_target_ref); !target_ref_will_be_created } } } fn new_value_by_remote( repo: &Repository, remote: &Source, mappings: &[fetch::refmap::Mapping], ) -> Result { let remote_id = remote.as_id(); Ok( if let Source::Ref( gix_protocol::handshake::Ref::Symbolic { target, .. } | gix_protocol::handshake::Ref::Unborn { target, .. }, ) = &remote { match mappings.iter().find_map(|m| { m.remote.as_name().and_then(|name| { (name == target) .then(|| m.local.as_ref().and_then(|local| local.try_into().ok())) .flatten() }) }) { // Map the target on the remote to the local branch name, which should be covered by refspecs. Some(local_branch) => { // This is always safe because… // - the reference may exist already // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the // target reference still exists and we can point to it. Target::Symbolic(local_branch) } None => { // If we can't map it, it's usually a an unborn branch causing this, or a the target isn't covered // by any refspec so we don't officially pull it in. match remote_id { Some(desired_id) => { if repo.try_find_reference(target)?.is_some() { // We are allowed to change a direct reference to a symbolic one, which may point to other objects // than the remote. The idea is that we are fine as long as the resulting refs are valid. Target::Symbolic(target.try_into()?) } else { // born branches that we don't have in our refspecs we create peeled. That way they can be used. Target::Object(desired_id.to_owned()) } } // Unborn branches we create as such, with the location they point to on the remote which helps mirroring. None => Target::Symbolic(target.try_into()?), } } } } else { Target::Object(remote_id.expect("unborn case handled earlier").to_owned()) }, ) } fn insert_head( head: Option>, out: &mut BTreeMap>, ) -> Result<(), update::Error> { if let Some((head, wd)) = head.and_then(|head| head.repo.work_dir().map(|wd| (head, wd))) { out.entry("HEAD".try_into().expect("valid")) .or_default() .push(wd.to_owned()); let mut ref_chain = Vec::new(); let mut cursor = head.try_into_referent(); while let Some(ref_) = cursor { ref_chain.push(ref_.name().to_owned()); cursor = ref_.follow().transpose()?; } for name in ref_chain { out.entry(name).or_default().push(wd.to_owned()); } } Ok(()) } fn worktree_branches(repo: &Repository) -> Result>, update::Error> { let mut map = BTreeMap::new(); insert_head(repo.head().ok(), &mut map)?; for proxy in repo.worktrees()? { let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?; insert_head(repo.head().ok(), &mut map)?; } Ok(map) } #[cfg(test)] mod tests; gix-0.69.1/src/remote/connection/fetch/update_refs/tests.rs000064400000000000000000001076071046102023000220460ustar 00000000000000pub fn restricted() -> crate::open::Options { crate::open::Options::isolated().config_overrides(["user.name=gitoxide", "user.email=gitoxide@localhost"]) } /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_. fn hex_to_id(hex: &str) -> gix_hash::ObjectId { gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex") } mod update { use gix_testtools::Result; use super::hex_to_id; use crate as gix; fn base_repo_path() -> String { gix::path::realpath( gix_testtools::scripted_fixture_read_only("make_remote_repos.sh") .unwrap() .join("base"), ) .unwrap() .to_string_lossy() .into_owned() } fn repo(name: &str) -> gix::Repository { let dir = gix_testtools::scripted_fixture_read_only_with_args_single_archive( "make_fetch_repos.sh", [base_repo_path()], ) .unwrap(); gix::open_opts(dir.join(name), restricted()).unwrap() } fn named_repo(name: &str) -> gix::Repository { let dir = gix_testtools::scripted_fixture_read_only("make_remote_repos.sh").unwrap(); gix::open_opts(dir.join(name), restricted()).unwrap() } fn repo_rw(name: &str) -> (gix::Repository, gix_testtools::tempfile::TempDir) { let dir = gix_testtools::scripted_fixture_writable_with_args_single_archive( "make_fetch_repos.sh", [base_repo_path()], gix_testtools::Creation::ExecuteScript, ) .unwrap(); let repo = gix::open_opts(dir.path().join(name), restricted()).unwrap(); (repo, dir) } use gix_ref::{ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog}, Target, TargetRef, }; use crate::{ bstr::BString, remote::{ fetch, fetch::{ refmap::Mapping, refmap::Source, refmap::SpecIndex, refs::{tests::restricted, update::TypeChange}, RefLogMessage, }, }, }; #[test] fn various_valid_updates() { let repo = repo("two-origins"); for (spec, expected_mode, reflog_message, detail) in [ ( "refs/heads/main:refs/remotes/origin/main", fetch::refs::update::Mode::NoChangeNeeded, Some("no update will be performed"), "these refs are en-par since the initial clone", ), ( "refs/heads/main", fetch::refs::update::Mode::NoChangeNeeded, None, "without local destination ref there is nothing to do for us, ever (except for FETCH_HEADs) later", ), ( "refs/heads/main:refs/remotes/origin/new-main", fetch::refs::update::Mode::New, Some("storing ref"), "the destination branch doesn't exist and needs to be created", ), ( "refs/heads/main:refs/heads/feature", fetch::refs::update::Mode::New, Some("storing head"), "reflog messages are specific to the type of branch stored, to some limited extend", ), ( "refs/heads/main:refs/tags/new-tag", fetch::refs::update::Mode::New, Some("storing tag"), "reflog messages are specific to the type of branch stored, to some limited extend", ), ( "+refs/heads/main:refs/remotes/origin/new-main", fetch::refs::update::Mode::New, Some("storing ref"), "just to validate that we really are in dry-run mode, or else this ref would be present now", ), ( "+refs/heads/main:refs/remotes/origin/g", fetch::refs::update::Mode::FastForward, Some("fast-forward (guessed in dry-run)"), "a forced non-fastforward (main goes backwards), but dry-run calls it fast-forward", ), ( "+refs/heads/main:refs/tags/b-tag", fetch::refs::update::Mode::Forced, Some("updating tag"), "tags can only be forced", ), ( "refs/heads/main:refs/tags/b-tag", fetch::refs::update::Mode::RejectedTagUpdate, None, "otherwise a tag is always refusing itself to be overwritten (no-clobber)", ), ( "+refs/remotes/origin/g:refs/heads/main", fetch::refs::update::Mode::RejectedCurrentlyCheckedOut { worktree_dirs: vec![repo.work_dir().expect("present").to_owned()], }, None, "checked out branches cannot be written, as it requires a merge of sorts which isn't done here", ), ( "ffffffffffffffffffffffffffffffffffffffff:refs/heads/invalid-source-object", fetch::refs::update::Mode::RejectedSourceObjectNotFound { id: hex_to_id("ffffffffffffffffffffffffffffffffffffffff"), }, None, "checked out branches cannot be written, as it requires a merge of sorts which isn't done here", ), ( "refs/remotes/origin/g:refs/heads/not-currently-checked-out", fetch::refs::update::Mode::FastForward, Some("fast-forward (guessed in dry-run)"), "a fast-forward only fast-forward situation, all good", ), ] { let (mapping, specs) = mapping_from_spec(spec, &repo); let out = fetch::refs::update( &repo, prefixed("action"), &mapping, &specs, &[], fetch::Tags::None, reflog_message.map_or(fetch::DryRun::No, |_| fetch::DryRun::Yes), fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!( out.updates, vec![fetch::refs::Update { type_change: None, mode: expected_mode.clone(), edit_index: reflog_message.map(|_| 0), }], "{spec:?}: {detail}" ); assert_eq!(out.edits.len(), reflog_message.map_or(0, |_| 1)); if let Some(reflog_message) = reflog_message { let edit = &out.edits[0]; match &edit.change { Change::Update { log, new, .. } => { assert_eq!( log.message, format!("action: {reflog_message}"), "{spec}: reflog messages are specific and we emulate git word for word" ); let remote_ref = repo .find_reference(specs[0].to_ref().source().expect("always present")) .unwrap(); assert_eq!( new.id(), remote_ref.target().id(), "remote ref provides the id to set in the local reference" ); } _ => unreachable!("only updates"), } } } } #[test] fn checked_out_branches_in_worktrees_are_rejected_with_additional_information() -> Result { let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args_single_archive( "make_fetch_repos.sh", [base_repo_path()], )?)?; let repo = root.join("worktree-root"); let repo = gix::open_opts(repo, restricted())?; for (branch, path_from_root) in [ ("main", "worktree-root"), ("wt-a-nested", "prev/wt-a-nested"), ("wt-a", "wt-a"), ("nested-wt-b", "wt-a/nested-wt-b"), ("wt-c-locked", "wt-c-locked"), ("wt-deleted", "wt-deleted"), ] { let spec = format!("refs/heads/main:refs/heads/{branch}"); let (mappings, specs) = mapping_from_spec(&spec, &repo); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut { worktree_dirs: vec![root.join(path_from_root)], }, type_change: None, edit_index: None, }], "{spec}: checked-out checks are done before checking if a change would actually be required (here it isn't)" ); assert_eq!(out.edits.len(), 0); } Ok(()) } #[test] fn unborn_remote_branches_can_be_created_locally_if_they_are_new() -> Result { let repo = named_repo("unborn"); let (mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/HEAD", &repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::New, type_change: None, edit_index: Some(0) }] ); assert_eq!(out.edits.len(), 1, "we are OK with creating unborn refs"); Ok(()) } #[test] fn unborn_remote_branches_can_update_local_unborn_branches() -> Result { let repo = named_repo("unborn"); let (mappings, specs) = mapping_from_spec("HEAD:refs/heads/existing-unborn-symbolic", &repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: None, edit_index: Some(0) }] ); assert_eq!(out.edits.len(), 1, "we are OK with updating unborn refs"); assert_eq!( out.edits[0], RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: change unborn ref".into(), }, expected: PreviousValue::MustExistAndMatch(Target::Symbolic( "refs/heads/main".try_into().expect("valid"), )), new: Target::Symbolic("refs/heads/main".try_into().expect("valid")), }, name: "refs/heads/existing-unborn-symbolic".try_into().expect("valid"), deref: false, } ); let (mappings, specs) = mapping_from_spec("HEAD:refs/heads/existing-unborn-symbolic-other", &repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::Forced, type_change: None, edit_index: Some(0) }] ); assert_eq!( out.edits.len(), 1, "we are OK with creating unborn refs even without actually forcing it" ); assert_eq!( out.edits[0], RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: change unborn ref".into(), }, expected: PreviousValue::MustExistAndMatch(Target::Symbolic( "refs/heads/other".try_into().expect("valid"), )), new: Target::Symbolic("refs/heads/main".try_into().expect("valid")), }, name: "refs/heads/existing-unborn-symbolic-other".try_into().expect("valid"), deref: false, } ); Ok(()) } #[test] fn remote_symbolic_refs_with_locally_unavailable_target_result_in_valid_peeled_branches() -> Result { let remote_repo = named_repo("one-commit-with-symref"); let local_repo = named_repo("unborn"); let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/new", &remote_repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &local_repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::New, type_change: None, edit_index: Some(0) }] ); assert_eq!(out.edits.len(), 1); let target = Target::Object(hex_to_id("66f16e4e8baf5c77bb6d0484495bebea80e916ce")); assert_eq!( out.edits[0], RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: storing head".into(), }, expected: PreviousValue::ExistingMustMatch(target.clone()), new: target, }, name: "refs/heads/new".try_into().expect("valid"), deref: false, }, "we create local-refs whose targets aren't present yet, even though the remote knows them.\ This leaves the caller with assuring all refs are mentioned in mappings." ); Ok(()) } #[test] fn remote_symbolic_refs_with_locally_unavailable_target_dont_overwrite_valid_local_branches() -> Result { let remote_repo = named_repo("one-commit-with-symref"); let local_repo = named_repo("one-commit-with-symref-missing-branch"); let (mappings, specs) = mapping_from_spec("refs/heads/unborn:refs/heads/valid-locally", &remote_repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &local_repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::RejectedToReplaceWithUnborn, type_change: None, edit_index: None }] ); assert_eq!(out.edits.len(), 0); Ok(()) } #[test] fn unborn_remote_refs_dont_overwrite_valid_local_refs() -> Result { let remote_repo = named_repo("unborn"); let local_repo = named_repo("one-commit-with-symref"); let (mappings, specs) = mapping_from_spec("refs/heads/existing-unborn-symbolic:refs/heads/branch", &remote_repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &local_repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, )?; assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::RejectedToReplaceWithUnborn, type_change: None, edit_index: None }], "we don't overwrite locally present refs with unborn ones for safety" ); assert_eq!(out.edits.len(), 0); Ok(()) } #[test] fn local_symbolic_refs_can_be_overwritten() { let repo = repo("two-origins"); for (source, destination, expected_update, expected_edit) in [ ( // attempt to overwrite HEAD isn't possible as the matching engine will normalize the path. That way, `HEAD` // can never be set. This is by design (of git) and we follow it. "refs/heads/symbolic", "HEAD", fetch::refs::Update { mode: fetch::refs::update::Mode::New, type_change: None, edit_index: Some(0), }, Some(RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: storing head".into(), }, expected: PreviousValue::ExistingMustMatch(Target::Symbolic( "refs/heads/main".try_into().expect("valid"), )), new: Target::Symbolic("refs/heads/main".try_into().expect("valid")), }, name: "refs/heads/HEAD".try_into().expect("valid"), deref: false, }), ), ( // attempt to overwrite checked out branch fails "refs/remotes/origin/b", // strange, but the remote-refs are simulated and based on local refs "refs/heads/main", fetch::refs::Update { mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut { worktree_dirs: vec![repo.work_dir().expect("present").to_owned()], }, type_change: None, edit_index: None, }, None, ), ( // symbolic becomes direct "refs/heads/main", "refs/heads/symbolic", fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: Some(TypeChange::SymbolicToDirect), edit_index: Some(0), }, Some(RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: no update will be performed".into(), }, expected: PreviousValue::MustExistAndMatch(Target::Symbolic( "refs/heads/main".try_into().expect("valid"), )), new: Target::Object(hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5")), }, name: "refs/heads/symbolic".try_into().expect("valid"), deref: false, }), ), ( // direct becomes symbolic "refs/heads/symbolic", "refs/remotes/origin/a", fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: Some(TypeChange::DirectToSymbolic), edit_index: Some(0), }, Some(RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: no update will be performed".into(), }, expected: PreviousValue::MustExistAndMatch(Target::Object(hex_to_id( "f99771fe6a1b535783af3163eba95a927aae21d5", ))), new: Target::Symbolic("refs/heads/main".try_into().expect("valid")), }, name: "refs/remotes/origin/a".try_into().expect("valid"), deref: false, }), ), ( // symbolic to symbolic (same) "refs/heads/symbolic", "refs/heads/symbolic", fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: None, edit_index: Some(0), }, Some(RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: "action: no update will be performed".into(), }, expected: PreviousValue::MustExistAndMatch(Target::Symbolic( "refs/heads/main".try_into().expect("valid"), )), new: Target::Symbolic("refs/heads/main".try_into().expect("valid")), }, name: "refs/heads/symbolic".try_into().expect("valid"), deref: false, }), ), ] { let (mappings, specs) = mapping_from_spec(&format!("{source}:{destination}"), &repo); assert_eq!(mappings.len(), 1); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!(out.edits.len(), usize::from(expected_edit.is_some())); assert_eq!(out.updates, vec![expected_update]); if let Some(expected) = expected_edit { assert_eq!(out.edits, vec![expected]); } } } #[test] fn remote_symbolic_refs_can_always_be_set_as_there_is_no_scenario_where_it_could_be_nonexisting_and_rejected() { let repo = repo("two-origins"); let (mut mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/remotes/origin/new", &repo); mappings.push(Mapping { remote: Source::Ref(gix_protocol::handshake::Ref::Direct { full_ref_name: "refs/heads/main".into(), object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"), }), local: Some("refs/heads/symbolic".into()), spec_index: SpecIndex::ExplicitInRemote(0), }); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!(out.edits.len(), 2, "symbolic refs are handled just like any other ref"); assert_eq!( out.updates, vec![ fetch::refs::Update { mode: fetch::refs::update::Mode::New, type_change: None, edit_index: Some(0) }, fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: Some(TypeChange::SymbolicToDirect), edit_index: Some(1) } ], ); let edit = &out.edits[0]; match &edit.change { Change::Update { log, new, .. } => { assert_eq!(log.message, "action: storing ref"); assert!( new.try_name().is_some(), "remote falls back to peeled id as it's the only thing we seem to have locally, it won't refer to a non-existing local ref" ); } _ => unreachable!("only updates"), } } #[test] fn local_direct_refs_are_written_with_symbolic_ones() { let repo = repo("two-origins"); let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!(out.edits.len(), 1); assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: Some(fetch::refs::update::TypeChange::DirectToSymbolic), edit_index: Some(0) }], ); } #[test] fn remote_refs_cannot_map_to_local_head() { let repo = repo("two-origins"); let (mappings, specs) = mapping_from_spec("refs/heads/main:HEAD", &repo); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!(out.edits.len(), 1); assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::New, type_change: None, edit_index: Some(0), }], ); let edit = &out.edits[0]; match &edit.change { Change::Update { log, new, .. } => { assert_eq!(log.message, "action: storing head"); assert!( new.try_id().is_some(), "remote is peeled, so local will be peeled as well" ); } _ => unreachable!("only updates"), } assert_eq!( edit.name.as_bstr(), "refs/heads/HEAD", "it's not possible to refer to the local HEAD with refspecs" ); } #[test] fn remote_symbolic_refs_can_be_written_locally_and_point_to_tracking_branch() { let repo = repo("two-origins"); let (mut mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/new-HEAD", &repo); mappings.push(Mapping { remote: Source::Ref(gix_protocol::handshake::Ref::Direct { full_ref_name: "refs/heads/main".into(), object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"), }), local: Some("refs/remotes/origin/main".into()), spec_index: SpecIndex::ExplicitInRemote(0), }); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!( out.updates, vec![ fetch::refs::Update { mode: fetch::refs::update::Mode::New, type_change: None, edit_index: Some(0), }, fetch::refs::Update { mode: fetch::refs::update::Mode::NoChangeNeeded, type_change: None, edit_index: Some(1), } ], ); assert_eq!(out.edits.len(), 2); let edit = &out.edits[0]; match &edit.change { Change::Update { log, new, .. } => { assert_eq!(log.message, "action: storing ref"); assert_eq!( new.try_name().expect("symbolic ref").as_bstr(), "refs/remotes/origin/main", "remote is symbolic, so local will be symbolic as well, but is rewritten to tracking branch" ); } _ => unreachable!("only updates"), } assert_eq!(edit.name.as_bstr(), "refs/remotes/origin/new-HEAD",); } #[test] fn non_fast_forward_is_rejected_but_appears_to_be_fast_forward_in_dryrun_mode() { let repo = repo("two-origins"); let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo); let reflog_message: BString = "very special".into(); let out = fetch::refs::update( &repo, RefLogMessage::Override { message: reflog_message.clone(), }, &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::Yes, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::FastForward, type_change: None, edit_index: Some(0), }], "The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects" ); assert_eq!(out.edits.len(), 1); let edit = &out.edits[0]; match &edit.change { Change::Update { log, .. } => { assert_eq!(log.message, reflog_message); } _ => unreachable!("only updates"), } } #[test] fn non_fast_forward_is_rejected_if_dry_run_is_disabled() { let (repo, _tmp) = repo_rw("two-origins"); let (mappings, specs) = mapping_from_spec("refs/remotes/origin/g:refs/heads/not-currently-checked-out", &repo); let out = fetch::refs::update( &repo, prefixed("action"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::No, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::RejectedNonFastForward, type_change: None, edit_index: None, }] ); assert_eq!(out.edits.len(), 0); let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo); let out = fetch::refs::update( &repo, prefixed("prefix"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::No, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::FastForward, type_change: None, edit_index: Some(0), }] ); assert_eq!(out.edits.len(), 1); let edit = &out.edits[0]; match &edit.change { Change::Update { log, .. } => { assert_eq!(log.message, format!("prefix: {}", "fast-forward")); } _ => unreachable!("only updates"), } } #[test] fn fast_forwards_are_called_out_even_if_force_is_given() { let (repo, _tmp) = repo_rw("two-origins"); let (mappings, specs) = mapping_from_spec("+refs/heads/main:refs/remotes/origin/g", &repo); let out = fetch::refs::update( &repo, prefixed("prefix"), &mappings, &specs, &[], fetch::Tags::None, fetch::DryRun::No, fetch::WritePackedRefs::Never, ) .unwrap(); assert_eq!( out.updates, vec![fetch::refs::Update { mode: fetch::refs::update::Mode::FastForward, type_change: None, edit_index: Some(0), }] ); assert_eq!(out.edits.len(), 1); let edit = &out.edits[0]; match &edit.change { Change::Update { log, .. } => { assert_eq!(log.message, format!("prefix: {}", "fast-forward")); } _ => unreachable!("only updates"), } } fn mapping_from_spec( spec: &str, remote_repo: &gix::Repository, ) -> (Vec, Vec) { let spec = gix_refspec::parse(spec.into(), gix_refspec::parse::Operation::Fetch).unwrap(); let group = gix_refspec::MatchGroup::from_fetch_specs(Some(spec)); let references = remote_repo.references().unwrap(); let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect(); references.push(into_remote_ref(remote_repo.find_reference("HEAD").unwrap())); let mappings = group .match_remotes(references.iter().map(remote_ref_to_item)) .mappings .into_iter() .map(|m| fetch::refmap::Mapping { remote: m.item_index.map_or_else( || match m.lhs { gix_refspec::match_group::SourceRef::ObjectId(id) => fetch::refmap::Source::ObjectId(id), _ => unreachable!("not a ref, must be id: {:?}", m), }, |idx| fetch::refmap::Source::Ref(references[idx].clone()), ), local: m.rhs.map(std::borrow::Cow::into_owned), spec_index: SpecIndex::ExplicitInRemote(m.spec_index), }) .collect(); (mappings, vec![spec.to_owned()]) } fn into_remote_ref(mut r: gix::Reference<'_>) -> gix_protocol::handshake::Ref { let full_ref_name = r.name().as_bstr().into(); match r.target() { TargetRef::Object(id) => gix_protocol::handshake::Ref::Direct { full_ref_name, object: id.into(), }, TargetRef::Symbolic(name) => { let target = name.as_bstr().into(); match r.peel_to_id_in_place() { Ok(id) => gix_protocol::handshake::Ref::Symbolic { full_ref_name, target, tag: None, object: id.detach(), }, Err(_) => gix_protocol::handshake::Ref::Unborn { full_ref_name, target }, } } } } fn remote_ref_to_item(r: &gix_protocol::handshake::Ref) -> gix_refspec::match_group::Item<'_> { let (full_ref_name, target, object) = r.unpack(); static NULL: gix_hash::ObjectId = gix_hash::Kind::Sha1.null(); gix_refspec::match_group::Item { full_ref_name, target: target.unwrap_or(NULL.as_ref()), object, } } fn prefixed(action: &str) -> RefLogMessage { RefLogMessage::Prefixed { action: action.into() } } } gix-0.69.1/src/remote/connection/fetch/update_refs/update.rs000064400000000000000000000165501046102023000221620ustar 00000000000000use std::path::PathBuf; use crate::remote::fetch; mod error { /// The error returned when updating references. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindReference(#[from] crate::reference::find::Error), #[error("A remote reference had a name that wasn't considered valid. Corrupt remote repo or insufficient checks on remote?")] InvalidRefName(#[from] gix_validate::reference::name::Error), #[error("Failed to update references to their new position to match their remote locations")] EditReferences(#[from] crate::reference::edit::Error), #[error("Failed to read or iterate worktree dir")] WorktreeListing(#[from] std::io::Error), #[error("Could not open worktree repository")] OpenWorktreeRepo(#[from] crate::open::Error), #[error("Could not find local commit for fast-forward ancestor check")] FindCommit(#[from] crate::object::find::existing::Error), #[error("Could not peel symbolic local reference to its ID")] PeelToId(#[from] crate::reference::peel::Error), #[error("Failed to follow a symbolic reference to assure worktree isn't affected")] FollowSymref(#[from] gix_ref::file::find::existing::Error), } } pub use error::Error; /// The outcome of the refs-update operation at the end of a fetch. #[derive(Debug, Clone)] pub struct Outcome { /// All edits that were performed to update local refs. pub edits: Vec, /// Each update provides more information about what happened to the corresponding mapping. /// Use [`iter_mapping_updates()`][Self::iter_mapping_updates()] to recombine the update information with ref-edits and their /// mapping. pub updates: Vec, } /// Describe the way a ref was updated, with particular focus on how the (peeled) target commit was affected. /// /// Note that for all the variants that signal a change or `NoChangeNeeded` it's additionally possible to change the target type /// from symbolic to direct, or the other way around. #[derive(Debug, Clone, PartialEq, Eq)] pub enum Mode { /// No change was attempted as the remote ref didn't change compared to the current ref, or because no remote ref was specified /// in the ref-spec. Note that the expected value is still asserted to uncover potential race conditions with other processes. NoChangeNeeded, /// The old ref's commit was an ancestor of the new one, allowing for a fast-forward without a merge. FastForward, /// The ref was set to point to the new commit from the remote without taking into consideration its ancestry. Forced, /// A new ref has been created as there was none before. New, /// The reference belongs to a tag that was listed by the server but whose target didn't get sent as it doesn't point /// to the commit-graph we were fetching explicitly. /// /// This is kind of update is only happening if `remote..tagOpt` is not set explicitly to either `--tags` or `--no-tags`. ImplicitTagNotSentByRemote, /// The object id to set the target reference to could not be found. RejectedSourceObjectNotFound { /// The id of the object that didn't exist in the object database, even though it should since it should be part of the pack. id: gix_hash::ObjectId, }, /// Tags can never be overwritten (whether the new object would be a fast-forward or not, or unchanged), unless the refspec /// specifies force. RejectedTagUpdate, /// The reference update would not have been a fast-forward, and force is not specified in the ref-spec. RejectedNonFastForward, /// The remote has an unborn symbolic reference where we have one that is set. This means the remote /// has reset itself to a newly initialized state or a state that is highly unusual. /// It may also mean that the remote knows the target name, but it's not available locally and not included in the ref-mappings /// to be created, so we would effectively change a valid local ref into one that seems unborn, which is rejected. /// Note that this mode may have an associated ref-edit that is a no-op, or current-state assertion, for logistical reasons only /// and having no edit would be preferred. RejectedToReplaceWithUnborn, /// The update was rejected because the branch is checked out in the given worktree_dir. /// /// Note that the check applies to any known worktree, whether it's present on disk or not. RejectedCurrentlyCheckedOut { /// The path(s) to the worktree directory where the branch is checked out. worktree_dirs: Vec, }, } impl std::fmt::Display for Mode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Mode::NoChangeNeeded => "up-to-date", Mode::FastForward => "fast-forward", Mode::Forced => "forced-update", Mode::New => "new", Mode::ImplicitTagNotSentByRemote => "unrelated tag on remote", Mode::RejectedSourceObjectNotFound { id } => return write!(f, "rejected ({id} not found)"), Mode::RejectedTagUpdate => "rejected (would overwrite existing tag)", Mode::RejectedNonFastForward => "rejected (non-fast-forward)", Mode::RejectedToReplaceWithUnborn => "rejected (refusing to overwrite existing with unborn ref)", Mode::RejectedCurrentlyCheckedOut { worktree_dirs } => { return write!( f, "rejected (cannot write into checked-out branch at \"{}\")", worktree_dirs .iter() .filter_map(|d| d.to_str()) .collect::>() .join(", ") ) } } .fmt(f) } } /// Indicates that a ref changes its type. #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum TypeChange { /// A local direct reference is changed into a symbolic one. DirectToSymbolic, /// A local symbolic reference is changed into a direct one. SymbolicToDirect, } impl Outcome { /// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings` /// used when producing the ref update. /// /// Note that mappings that don't have a corresponding entry in `refspecs` these will be `None` even though that should never be the case. /// This can happen if the `refspecs` passed in aren't the respecs used to create the `mapping`, and it's up to the caller to sort it out. pub fn iter_mapping_updates<'a, 'b>( &self, mappings: &'a [fetch::refmap::Mapping], refspecs: &'b [gix_refspec::RefSpec], extra_refspecs: &'b [gix_refspec::RefSpec], ) -> impl Iterator< Item = ( &super::Update, &'a fetch::refmap::Mapping, Option<&'b gix_refspec::RefSpec>, Option<&gix_ref::transaction::RefEdit>, ), > { self.updates.iter().zip(mappings.iter()).map(move |(update, mapping)| { ( update, mapping, mapping.spec_index.get(refspecs, extra_refspecs), update.edit_index.and_then(|idx| self.edits.get(idx)), ) }) } } gix-0.69.1/src/remote/connection/mod.rs000064400000000000000000000017371046102023000160660ustar 00000000000000use crate::Remote; /// A function that performs a given credential action, trying to obtain credentials for an operation that needs it. pub type AuthenticateFn<'a> = Box gix_credentials::protocol::Result + 'a>; /// A type to represent an ongoing connection to a remote host, typically with the connection already established. /// /// It can be used to perform a variety of operations with the remote without worrying about protocol details, /// much like a remote procedure call. pub struct Connection<'a, 'repo, T> where T: gix_transport::client::Transport, { pub(crate) remote: &'a Remote<'repo>, pub(crate) authenticate: Option>, pub(crate) transport_options: Option>, pub(crate) transport: gix_protocol::SendFlushOnDrop, pub(crate) handshake: Option, pub(crate) trace: bool, } mod access; /// pub mod ref_map; /// pub mod fetch; gix-0.69.1/src/remote/connection/ref_map.rs000064400000000000000000000141741046102023000167170ustar 00000000000000use gix_features::progress::Progress; use gix_protocol::transport::client::Transport; use crate::{ bstr::BString, remote::{fetch, Connection, Direction}, }; /// The error returned by [`Connection::ref_map()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] InitRefMap(#[from] gix_protocol::fetch::refmap::init::Error), #[error("Failed to configure the transport before connecting to {url:?}")] GatherTransportConfig { url: BString, source: crate::config::transport::Error, }, #[error("Failed to configure the transport layer")] ConfigureTransport(#[from] Box), #[error(transparent)] Handshake(#[from] gix_protocol::handshake::Error), #[error(transparent)] Transport(#[from] gix_protocol::transport::client::Error), #[error(transparent)] ConfigureCredentials(#[from] crate::config::credential_helpers::Error), } impl gix_protocol::transport::IsSpuriousError for Error { fn is_spurious(&self) -> bool { match self { Error::Transport(err) => err.is_spurious(), Error::Handshake(err) => err.is_spurious(), _ => false, } } } /// For use in [`Connection::ref_map()`]. #[derive(Debug, Clone)] pub struct Options { /// Use a two-component prefix derived from the ref-spec's source, like `refs/heads/` to let the server pre-filter refs /// with great potential for savings in traffic and local CPU time. Defaults to `true`. pub prefix_from_spec_as_filter_on_remote: bool, /// Parameters in the form of `(name, optional value)` to add to the handshake. /// /// This is useful in case of custom servers. pub handshake_parameters: Vec<(String, Option)>, /// A list of refspecs to use as implicit refspecs which won't be saved or otherwise be part of the remote in question. /// /// This is useful for handling `remote..tagOpt` for example. pub extra_refspecs: Vec, } impl Default for Options { fn default() -> Self { Options { prefix_from_spec_as_filter_on_remote: true, handshake_parameters: Vec::new(), extra_refspecs: Vec::new(), } } } impl Connection<'_, '_, T> where T: Transport, { /// List all references on the remote that have been filtered through our remote's [`refspecs`][crate::Remote::refspecs()] /// for _fetching_. /// /// This comes in the form of all matching tips on the remote and the object they point to, along with /// the local tracking branch of these tips (if available). /// /// Note that this doesn't fetch the objects mentioned in the tips nor does it make any change to underlying repository. /// /// # Consumption /// /// Due to management of the transport, it's cleanest to only use it for a single interaction. Thus, it's consumed /// along with the connection. /// /// ### Configuration /// /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well. #[allow(clippy::result_large_err)] #[gix_protocol::maybe_async::maybe_async] pub async fn ref_map( mut self, progress: impl Progress, options: Options, ) -> Result<(fetch::RefMap, gix_protocol::handshake::Outcome), Error> { let refmap = self.ref_map_by_ref(progress, options).await; let handshake = self.handshake.expect("refmap always performs handshake"); refmap.map(|map| (map, handshake)) } #[allow(clippy::result_large_err)] #[gix_protocol::maybe_async::maybe_async] pub(crate) async fn ref_map_by_ref( &mut self, mut progress: impl Progress, Options { prefix_from_spec_as_filter_on_remote, handshake_parameters, mut extra_refspecs, }: Options, ) -> Result { let _span = gix_trace::coarse!("remote::Connection::ref_map()"); if let Some(tag_spec) = self.remote.fetch_tags.to_refspec().map(|spec| spec.to_owned()) { if !extra_refspecs.contains(&tag_spec) { extra_refspecs.push(tag_spec); } }; let mut credentials_storage; let url = self.transport.inner.to_url(); let authenticate = match self.authenticate.as_mut() { Some(f) => f, None => { let url = self.remote.url(Direction::Fetch).map_or_else( || gix_url::parse(url.as_ref()).expect("valid URL to be provided by transport"), ToOwned::to_owned, ); credentials_storage = self.configured_credentials(url)?; &mut credentials_storage } }; let repo = self.remote.repo; if self.transport_options.is_none() { self.transport_options = repo .transport_options(url.as_ref(), self.remote.name().map(crate::remote::Name::as_bstr)) .map_err(|err| Error::GatherTransportConfig { source: err, url: url.into_owned(), })?; } if let Some(config) = self.transport_options.as_ref() { self.transport.inner.configure(&**config)?; } let mut handshake = gix_protocol::fetch::handshake( &mut self.transport.inner, authenticate, handshake_parameters, &mut progress, ) .await?; let refmap = gix_protocol::fetch::RefMap::new( progress, &self.remote.fetch_specs, gix_protocol::fetch::Context { handshake: &mut handshake, transport: &mut self.transport.inner, user_agent: self.remote.repo.config.user_agent_tuple(), trace_packetlines: self.trace, }, gix_protocol::fetch::refmap::init::Options { prefix_from_spec_as_filter_on_remote, extra_refspecs, }, ) .await?; self.handshake = Some(handshake); Ok(refmap) } } gix-0.69.1/src/remote/errors.rs000064400000000000000000000046241046102023000144620ustar 00000000000000/// pub mod find { use crate::{bstr::BString, config, remote}; /// The error returned by [`Repository::find_remote(…)`](crate::Repository::find_remote()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The value for 'remote..tagOpt` is invalid and must either be '--tags' or '--no-tags'")] TagOpt(#[from] config::key::GenericErrorWithValue), #[error("{kind} ref-spec under `remote.{remote_name}` was invalid")] RefSpec { kind: &'static str, remote_name: BString, source: config::refspec::Error, }, #[error("Neither 'url` nor 'pushUrl' fields were set in the remote's configuration.")] UrlMissing, #[error("The {kind} url under `remote.{remote_name}` was invalid")] Url { kind: &'static str, remote_name: BString, source: config::url::Error, }, #[error(transparent)] Init(#[from] remote::init::Error), } /// pub mod existing { use crate::bstr::BString; /// The error returned by [`Repository::find_remote(…)`](crate::Repository::find_remote()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Find(#[from] super::Error), #[error("remote name could not be parsed as URL")] UrlParse(#[from] gix_url::parse::Error), #[error("The remote named {name:?} did not exist")] NotFound { name: BString }, } } /// pub mod for_fetch { /// The error returned by [`Repository::find_fetch_remote(…)`](crate::Repository::find_fetch_remote()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindExisting(#[from] super::existing::Error), #[error(transparent)] FindExistingReferences(#[from] crate::reference::find::existing::Error), #[error("Could not initialize a URL remote")] Init(#[from] crate::remote::init::Error), #[error("remote name could not be parsed as URL")] UrlParse(#[from] gix_url::parse::Error), #[error("No configured remote could be found, or too many were available")] ExactlyOneRemoteNotAvailable, } } } gix-0.69.1/src/remote/fetch.rs000064400000000000000000000027611046102023000142370ustar 00000000000000/// pub mod negotiate { #[cfg(feature = "credentials")] pub use gix_negotiate::Algorithm; #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub use gix_protocol::fetch::negotiate::Error; } #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub use super::connection::fetch::{ outcome, prepare, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status, }; /// If `Yes`, don't really make changes but do as much as possible to get an idea of what would be done. #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub(crate) enum DryRun { /// Enable dry-run mode and don't actually change the underlying repository in any way. Yes, /// Run the operation like normal, making changes to the underlying repository. No, } /// How to deal with refs when cloning or fetching. #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub(crate) enum WritePackedRefs { /// Normal operation, i.e. don't use packed-refs at all for writing. Never, /// Put ref updates straight into the `packed-refs` file, without creating loose refs first or dealing with them in any way. Only, } #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub use gix_protocol::fetch::{refmap, RefMap}; pub use gix_protocol::fetch::{Shallow, Tags}; gix-0.69.1/src/remote/init.rs000064400000000000000000000073001046102023000141030ustar 00000000000000use gix_refspec::RefSpec; use crate::{config, remote, Remote, Repository}; mod error { use crate::bstr::BString; /// The error returned by [`Repository::remote_at(…)`][crate::Repository::remote_at()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Url(#[from] gix_url::parse::Error), #[error("The rewritten {kind} url {rewritten_url:?} failed to parse")] RewrittenUrlInvalid { kind: &'static str, rewritten_url: BString, source: gix_url::parse::Error, }, } } pub use error::Error; use crate::bstr::BString; /// Initialization impl<'repo> Remote<'repo> { #[allow(clippy::too_many_arguments)] pub(crate) fn from_preparsed_config( name_or_url: Option, url: Option, push_url: Option, fetch_specs: Vec, push_specs: Vec, should_rewrite_urls: bool, fetch_tags: remote::fetch::Tags, repo: &'repo Repository, ) -> Result { debug_assert!( url.is_some() || push_url.is_some(), "BUG: fetch or push url must be set at least" ); let (url_alias, push_url_alias) = should_rewrite_urls .then(|| rewrite_urls(&repo.config, url.as_ref(), push_url.as_ref())) .unwrap_or(Ok((None, None)))?; Ok(Remote { name: name_or_url.map(Into::into), url, url_alias, push_url, push_url_alias, fetch_specs, push_specs, fetch_tags, repo, }) } pub(crate) fn from_fetch_url( url: Url, should_rewrite_urls: bool, repo: &'repo Repository, ) -> Result where Url: TryInto, gix_url::parse::Error: From, { Self::from_fetch_url_inner( url.try_into().map_err(|err| Error::Url(err.into()))?, should_rewrite_urls, repo, ) } fn from_fetch_url_inner( url: gix_url::Url, should_rewrite_urls: bool, repo: &'repo Repository, ) -> Result { let (url_alias, _) = should_rewrite_urls .then(|| rewrite_urls(&repo.config, Some(&url), None)) .unwrap_or(Ok((None, None)))?; Ok(Remote { name: None, url: Some(url), url_alias, push_url: None, push_url_alias: None, fetch_specs: Vec::new(), push_specs: Vec::new(), fetch_tags: Default::default(), repo, }) } } pub(crate) fn rewrite_url( config: &config::Cache, url: Option<&gix_url::Url>, direction: remote::Direction, ) -> Result, Error> { url.and_then(|url| config.url_rewrite().longest(url, direction)) .map(|url| { gix_url::parse(url.as_ref()).map_err(|err| Error::RewrittenUrlInvalid { kind: match direction { remote::Direction::Fetch => "fetch", remote::Direction::Push => "push", }, source: err, rewritten_url: url, }) }) .transpose() } pub(crate) fn rewrite_urls( config: &config::Cache, url: Option<&gix_url::Url>, push_url: Option<&gix_url::Url>, ) -> Result<(Option, Option), Error> { let url_alias = rewrite_url(config, url, remote::Direction::Fetch)?; let push_url_alias = rewrite_url(config, push_url, remote::Direction::Push)?; Ok((url_alias, push_url_alias)) } gix-0.69.1/src/remote/mod.rs000064400000000000000000000034231046102023000137210ustar 00000000000000use crate::bstr::BStr; use std::borrow::Cow; use std::collections::BTreeSet; /// The direction of an operation carried out (or to be carried out) through a remote. #[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)] pub enum Direction { /// Push local changes to the remote. Push, /// Fetch changes from the remote to the local repository. Fetch, } impl Direction { /// Return ourselves as string suitable for use as verb in an english sentence. pub fn as_str(&self) -> &'static str { match self { Direction::Push => "push", Direction::Fetch => "fetch", } } } /// The name of a remote, either interpreted as symbol like `origin` or as url as returned by [`Remote::name()`][crate::Remote::name()]. #[derive(Debug, PartialEq, Eq, Clone, Ord, PartialOrd, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Name<'repo> { /// A symbolic name, like `origin`. /// Note that it has not necessarily been validated yet. Symbol(Cow<'repo, str>), /// A url pointing to the remote host directly. Url(Cow<'repo, BStr>), } /// A type-definition for a sorted list of unvalidated remote names - they have been read straight from the configuration. pub type Names<'a> = BTreeSet>; /// pub mod name; mod build; mod errors; pub use errors::find; /// pub mod init; /// pub mod fetch; /// #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] pub mod connect; #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] mod connection; #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] pub use connection::{ref_map, AuthenticateFn, Connection}; /// pub mod save; mod access; /// pub mod url; gix-0.69.1/src/remote/name.rs000064400000000000000000000051771046102023000140720ustar 00000000000000use std::borrow::Cow; use super::Name; use crate::bstr::{BStr, BString, ByteSlice, ByteVec}; /// The error returned by [validated()]. #[derive(Debug, thiserror::Error)] #[error("remote names must be valid within refspecs for fetching: {name:?}")] #[allow(missing_docs)] pub struct Error { pub source: gix_refspec::parse::Error, pub name: BString, } /// Return `name` if it is valid as symbolic remote name. /// /// This means it has to be valid within a the ref path of a tracking branch. pub fn validated(name: impl Into) -> Result { let name = name.into(); match gix_refspec::parse( format!("refs/heads/test:refs/remotes/{name}/test").as_str().into(), gix_refspec::parse::Operation::Fetch, ) { Ok(_) => Ok(name), Err(err) => Err(Error { source: err, name }), } } impl Name<'_> { /// Obtain the name as string representation. pub fn as_bstr(&self) -> &BStr { match self { Name::Symbol(v) => v.as_ref().into(), Name::Url(v) => v.as_ref(), } } /// Return this instance as a symbolic name, if it is one. pub fn as_symbol(&self) -> Option<&str> { match self { Name::Symbol(n) => n.as_ref().into(), Name::Url(_) => None, } } /// Return this instance as url, if it is one. pub fn as_url(&self) -> Option<&BStr> { match self { Name::Url(n) => n.as_ref().into(), Name::Symbol(_) => None, } } /// Return a fully-owned copy of this instance. pub fn to_owned(&self) -> Name<'static> { match self { Name::Symbol(s) => Name::Symbol(s.clone().into_owned().into()), Name::Url(s) => Name::Url(s.clone().into_owned().into()), } } } impl<'a> TryFrom> for Name<'a> { type Error = Cow<'a, BStr>; fn try_from(name: Cow<'a, BStr>) -> Result { if name.contains(&b'/') || name.as_ref() == "." { Ok(Name::Url(name)) } else { match name { Cow::Borrowed(n) => n.to_str().ok().map(Cow::Borrowed).ok_or(name), Cow::Owned(n) => Vec::from(n) .into_string() .map_err(|err| Cow::Owned(err.into_vec().into())) .map(Cow::Owned), } .map(Name::Symbol) } } } impl From for Name<'static> { fn from(name: BString) -> Self { Self::try_from(Cow::Owned(name)).expect("String is never illformed") } } impl AsRef for Name<'_> { fn as_ref(&self) -> &BStr { self.as_bstr() } } gix-0.69.1/src/remote/save.rs000064400000000000000000000117141046102023000141020ustar 00000000000000use crate::{ bstr::{BStr, BString}, config, remote, Remote, }; /// The error returned by [`Remote::save_to()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The remote pointing to {} is anonymous and can't be saved.", url.to_bstring())] NameMissing { url: gix_url::Url }, } /// The error returned by [`Remote::save_as_to()`]. /// /// Note that this type should rather be in the `as` module, but cannot be as it's part of the Rust syntax. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum AsError { #[error(transparent)] Save(#[from] Error), #[error(transparent)] Name(#[from] crate::remote::name::Error), } /// Serialize into git-config. impl Remote<'_> { /// Save ourselves to the given `config` if we are a named remote or fail otherwise. /// /// Note that all sections named `remote ""` will be cleared of all values we are about to write, /// and the last `remote ""` section will be containing all relevant values so that reloading the remote /// from `config` would yield the same in-memory state. #[allow(clippy::result_large_err)] pub fn save_to(&self, config: &mut gix_config::File<'static>) -> Result<(), Error> { fn as_key(name: &str) -> gix_config::parse::section::ValueName<'_> { name.try_into().expect("valid") } let name = self.name().ok_or_else(|| Error::NameMissing { url: self .url .as_ref() .or(self.push_url.as_ref()) .expect("one url is always set") .to_owned(), })?; if let Some(section_ids) = config.sections_and_ids_by_name("remote").map(|it| { it.filter_map(|(s, id)| (s.header().subsection_name() == Some(name.as_bstr())).then_some(id)) .collect::>() }) { let mut sections_to_remove = Vec::new(); const KEYS_TO_REMOVE: &[&str] = &[ config::tree::Remote::URL.name, config::tree::Remote::PUSH_URL.name, config::tree::Remote::FETCH.name, config::tree::Remote::PUSH.name, config::tree::Remote::TAG_OPT.name, ]; for id in section_ids { let mut section = config.section_mut_by_id(id).expect("just queried"); let was_empty = section.num_values() == 0; for key in KEYS_TO_REMOVE { while section.remove(key).is_some() {} } let is_empty_after_deletions_of_values_to_be_written = section.num_values() == 0; if !was_empty && is_empty_after_deletions_of_values_to_be_written { sections_to_remove.push(id); } } for id in sections_to_remove { config.remove_section_by_id(id); } } let mut section = config .section_mut_or_create_new("remote", Some(name.as_ref())) .expect("section name is validated and 'remote' is acceptable"); if let Some(url) = self.url.as_ref() { section.push(as_key("url"), Some(url.to_bstring().as_ref())); } if let Some(url) = self.push_url.as_ref() { section.push(as_key("pushurl"), Some(url.to_bstring().as_ref())); } if self.fetch_tags != Default::default() { section.push( as_key(config::tree::Remote::TAG_OPT.name), BStr::new(match self.fetch_tags { remote::fetch::Tags::All => "--tags", remote::fetch::Tags::None => "--no-tags", remote::fetch::Tags::Included => unreachable!("BUG: the default shouldn't be written and we try"), }) .into(), ); } for (key, spec) in self .fetch_specs .iter() .map(|spec| ("fetch", spec)) .chain(self.push_specs.iter().map(|spec| ("push", spec))) { section.push(as_key(key), Some(spec.to_ref().to_bstring().as_ref())); } Ok(()) } /// Forcefully set our name to `name` and write our state to `config` similar to [`save_to()`][Self::save_to()]. /// /// Note that this sets a name for anonymous remotes, but overwrites the name for those who were named before. /// If this name is different from the current one, the git configuration will still contain the previous name, /// and the caller should account for that. #[allow(clippy::result_large_err)] pub fn save_as_to( &mut self, name: impl Into, config: &mut gix_config::File<'static>, ) -> Result<(), AsError> { let name = crate::remote::name::validated(name)?; let prev_name = self.name.take(); self.name = Some(name.into()); self.save_to(config).map_err(|err| { self.name = prev_name; err.into() }) } } gix-0.69.1/src/remote/url/mod.rs000064400000000000000000000004471046102023000145260ustar 00000000000000mod rewrite; /// #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub mod scheme_permission; pub(crate) use rewrite::Rewrite; #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] pub(crate) use scheme_permission::SchemePermission; gix-0.69.1/src/remote/url/rewrite.rs000064400000000000000000000066271046102023000154360ustar 00000000000000use gix_features::threading::OwnShared; use crate::{ bstr::{BStr, BString, ByteVec}, config, remote::Direction, }; #[derive(Debug, Clone)] struct Replace { find: BString, with: OwnShared, } #[derive(Default, Debug, Clone)] pub(crate) struct Rewrite { url_rewrite: Vec, push_url_rewrite: Vec, } /// Init impl Rewrite { pub fn from_config( config: &gix_config::File<'static>, mut filter: fn(&gix_config::file::Metadata) -> bool, ) -> Rewrite { config .sections_by_name_and_filter("url", &mut filter) .map(|sections| { let mut url_rewrite = Vec::new(); let mut push_url_rewrite = Vec::new(); for section in sections { let replace = match section.header().subsection_name() { Some(base) => OwnShared::new(base.to_owned()), None => continue, }; for instead_of in section.values(config::tree::Url::INSTEAD_OF.name) { url_rewrite.push(Replace { with: OwnShared::clone(&replace), find: instead_of.into_owned(), }); } for instead_of in section.values(config::tree::Url::PUSH_INSTEAD_OF.name) { push_url_rewrite.push(Replace { with: OwnShared::clone(&replace), find: instead_of.into_owned(), }); } } Rewrite { url_rewrite, push_url_rewrite, } }) .unwrap_or_default() } } /// Access impl Rewrite { fn replacements_for(&self, direction: Direction) -> &[Replace] { match direction { Direction::Fetch => &self.url_rewrite, Direction::Push => &self.push_url_rewrite, } } pub fn longest(&self, url: &gix_url::Url, direction: Direction) -> Option { if self.replacements_for(direction).is_empty() { None } else { let mut url = url.to_bstring(); self.rewrite_url_in_place(&mut url, direction).then_some(url) } } /// Rewrite the given `url` of `direction` and return `true` if a replacement happened. /// /// Note that the result must still be checked for validity, it might not be a valid URL as we do a syntax-unaware replacement. pub fn rewrite_url_in_place(&self, url: &mut BString, direction: Direction) -> bool { self.replacements_for(direction) .iter() .fold(None::<(usize, &BStr)>, |mut acc, replace| { if url.starts_with(replace.find.as_ref()) { let (bytes_matched, prev_rewrite_with) = acc.get_or_insert((replace.find.len(), replace.with.as_slice().into())); if *bytes_matched < replace.find.len() { *bytes_matched = replace.find.len(); *prev_rewrite_with = replace.with.as_slice().into(); } }; acc }) .map(|(bytes_matched, replace_with)| { url.replace_range(..bytes_matched, replace_with); }) .is_some() } } gix-0.69.1/src/remote/url/scheme_permission.rs000064400000000000000000000077141046102023000174670ustar 00000000000000use std::{borrow::Cow, collections::BTreeMap}; use crate::{ bstr::{BStr, BString, ByteSlice}, config, config::tree::{gitoxide, Key, Protocol}, }; /// All allowed values of the `protocol.allow` key. #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum Allow { /// Allow use this protocol. Always, /// Forbid using this protocol Never, /// Only supported if the `GIT_PROTOCOL_FROM_USER` is unset or is set to `1`. User, } impl Allow { /// Return true if we represent something like 'allow == true'. pub fn to_bool(self, user_allowed: Option) -> bool { match self { Allow::Always => true, Allow::Never => false, Allow::User => user_allowed.unwrap_or(true), } } } impl<'a> TryFrom> for Allow { type Error = BString; fn try_from(v: Cow<'a, BStr>) -> Result { Ok(match v.as_ref().as_bytes() { b"never" => Allow::Never, b"always" => Allow::Always, b"user" => Allow::User, unknown => return Err(unknown.into()), }) } } #[derive(Debug, Clone)] pub(crate) struct SchemePermission { /// `None`, env-var is unset or wasn't queried, otherwise true if `GIT_PROTOCOL_FROM_USER` is `1`. user_allowed: Option, /// The general allow value from `protocol.allow`. allow: Option, /// Per scheme allow information allow_per_scheme: BTreeMap, } /// Init impl SchemePermission { /// NOTE: _intentionally without leniency_ pub fn from_config( config: &gix_config::File<'static>, mut filter: fn(&gix_config::file::Metadata) -> bool, ) -> Result { let allow: Option = config .string_filter("protocol.allow", &mut filter) .map(|value| Protocol::ALLOW.try_into_allow(value, None)) .transpose()?; let mut saw_user = allow.map_or(false, |allow| allow == Allow::User); let allow_per_scheme = match config.sections_by_name_and_filter("protocol", &mut filter) { Some(it) => { let mut map = BTreeMap::default(); for (section, scheme) in it.filter_map(|section| { section.header().subsection_name().and_then(|scheme| { scheme .to_str() .ok() .map(|scheme| (section, gix_url::Scheme::from(scheme))) }) }) { if let Some(value) = section .value("allow") .map(|value| Protocol::ALLOW.try_into_allow(value, Some(scheme.as_str()))) .transpose()? { saw_user |= value == Allow::User; map.insert(scheme, value); } } map } None => Default::default(), }; let user_allowed = saw_user.then(|| { config .string_filter(gitoxide::Allow::PROTOCOL_FROM_USER.logical_name().as_str(), &mut filter) .map_or(true, |val| val.as_ref() == "1") }); Ok(SchemePermission { allow, allow_per_scheme, user_allowed, }) } } /// Access impl SchemePermission { pub fn allow(&self, scheme: &gix_url::Scheme) -> bool { self.allow_per_scheme.get(scheme).or(self.allow.as_ref()).map_or_else( || { use gix_url::Scheme::*; match scheme { File | Git | Ssh | Http | Https => true, Ext(_) => false, // TODO: figure out what 'ext' really entails, and what 'other' protocols are which aren't representable for us yet } }, |allow| allow.to_bool(self.user_allowed), ) } } gix-0.69.1/src/repository/attributes.rs000064400000000000000000000134601046102023000162560ustar 00000000000000//! exclude information use crate::{config, AttributeStack, Repository}; /// The error returned by [`Repository::attributes()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigureAttributes(#[from] config::attribute_stack::Error), #[error(transparent)] ConfigureExcludes(#[from] config::exclude_stack::Error), } impl Repository { /// Configure a file-system cache for accessing git attributes *and* excludes on a per-path basis. /// /// Use `attribute_source` to specify where to read attributes from. Also note that exclude information will /// always try to read `.gitignore` files from disk before trying to read it from the `index`. /// /// Note that no worktree is required for this to work, even though access to in-tree `.gitattributes` and `.gitignore` files /// would require a non-empty `index` that represents a git tree. /// /// This takes into consideration all the usual repository configuration, namely: /// /// * `$XDG_CONFIG_HOME/…/ignore|attributes` if `core.excludesFile|attributesFile` is *not* set, otherwise use the configured file. /// * `$GIT_DIR/info/exclude|attributes` if present. #[cfg(feature = "attributes")] pub fn attributes( &self, index: &gix_index::State, attributes_source: gix_worktree::stack::state::attributes::Source, ignore_source: gix_worktree::stack::state::ignore::Source, exclude_overrides: Option, ) -> Result, Error> { let case = if self.config.ignore_case { gix_glob::pattern::Case::Fold } else { gix_glob::pattern::Case::Sensitive }; let (attributes, mut buf) = self.config.assemble_attribute_globals( self.git_dir(), attributes_source, self.options.permissions.attributes, )?; let ignore = self.config .assemble_exclude_globals(self.git_dir(), exclude_overrides, ignore_source, &mut buf)?; let state = gix_worktree::stack::State::AttributesAndIgnoreStack { attributes, ignore }; let attribute_list = state.id_mappings_from_index(index, index.path_backing(), case); Ok(AttributeStack::new( gix_worktree::Stack::new( // this is alright as we don't cause mutation of that directory, it's virtual. self.work_dir().unwrap_or(self.git_dir()), state, case, buf, attribute_list, ), self, )) } /// Like [attributes()][Self::attributes()], but without access to exclude/ignore information. #[cfg(feature = "attributes")] pub fn attributes_only( &self, index: &gix_index::State, attributes_source: gix_worktree::stack::state::attributes::Source, ) -> Result, config::attribute_stack::Error> { let case = if self.config.ignore_case { gix_glob::pattern::Case::Fold } else { gix_glob::pattern::Case::Sensitive }; let (attributes, buf) = self.config.assemble_attribute_globals( self.git_dir(), attributes_source, self.options.permissions.attributes, )?; let state = gix_worktree::stack::State::AttributesStack(attributes); let attribute_list = state.id_mappings_from_index(index, index.path_backing(), case); Ok(AttributeStack::new( gix_worktree::Stack::new( // this is alright as we don't cause mutation of that directory, it's virtual. self.work_dir().unwrap_or(self.git_dir()), state, case, buf, attribute_list, ), self, )) } /// Configure a file-system cache checking if files below the repository are excluded, reading `.gitignore` files from /// the specified `source`. /// /// Note that no worktree is required for this to work, even though access to in-tree `.gitignore` files would require /// a non-empty `index` that represents a tree with `.gitignore` files. /// /// This takes into consideration all the usual repository configuration, namely: /// /// * `$XDG_CONFIG_HOME/…/ignore` if `core.excludesFile` is *not* set, otherwise use the configured file. /// * `$GIT_DIR/info/exclude` if present. /// /// When only excludes are desired, this is the most efficient way to obtain them. Otherwise use /// [`Repository::attributes()`] for accessing both attributes and excludes. #[doc(alias = "is_path_ignored", alias = "git2")] #[cfg(feature = "excludes")] pub fn excludes( &self, index: &gix_index::State, overrides: Option, source: gix_worktree::stack::state::ignore::Source, ) -> Result, config::exclude_stack::Error> { let case = if self.config.ignore_case { gix_glob::pattern::Case::Fold } else { gix_glob::pattern::Case::Sensitive }; let mut buf = Vec::with_capacity(512); let ignore = self .config .assemble_exclude_globals(self.git_dir(), overrides, source, &mut buf)?; let state = gix_worktree::stack::State::IgnoreStack(ignore); let attribute_list = state.id_mappings_from_index(index, index.path_backing(), case); Ok(AttributeStack::new( gix_worktree::Stack::new( // this is alright as we don't cause mutation of that directory, it's virtual. self.work_dir().unwrap_or(self.git_dir()), state, case, buf, attribute_list, ), self, )) } } gix-0.69.1/src/repository/cache.rs000064400000000000000000000047701046102023000151370ustar 00000000000000/// Configure how caches are used to speed up various git repository operations impl crate::Repository { /// Sets the amount of space used at most for caching most recently accessed fully decoded objects, to `Some(bytes)`, /// or `None` to deactivate it entirely. /// /// Note that it is unset by default but can be enabled once there is time for performance optimization. /// Well-chosen cache sizes can improve performance particularly if objects are accessed multiple times in a row. /// The cache is configured to grow gradually. /// /// Note that a cache on application level should be considered as well as the best object access is not doing one. pub fn object_cache_size(&mut self, bytes: impl Into>) { let bytes = bytes.into(); match bytes { Some(0) => self.objects.unset_object_cache(), Some(bytes) => self .objects .set_object_cache(move || Box::new(crate::object::cache::MemoryCappedHashmap::new(bytes))), None => self.objects.unset_object_cache(), } } /// Set an object cache of size `bytes` if none is set. /// /// Use this method to avoid overwriting any existing value while assuring better performance in case no value is set. pub fn object_cache_size_if_unset(&mut self, bytes: usize) { if !self.objects.has_object_cache() { self.object_cache_size(bytes); } } /// Return the amount of bytes the object cache [should be set to](Self::object_cache_size_if_unset) to perform /// diffs between trees who are similar to `index` in a typical source code repository. /// /// Currently, this allocates about 10MB for every 10k files in `index`, and a minimum of 4KB. #[cfg(feature = "index")] pub fn compute_object_cache_size_for_tree_diffs(&self, index: &gix_index::State) -> usize { let num_tracked = index.entries().len(); let ten_mb_for_every_10k_files = (num_tracked as f32 / 10_000.0) * (10 * 1024 * 1024) as f32; (ten_mb_for_every_10k_files as usize).max(4 * 1024) } } /// Handling of InMemory object writing impl crate::Repository { /// When writing objects, keep them in memory instead of writing them to disk. /// This makes any change to the object database non-persisting, while keeping the view /// to the object database consistent for this instance. pub fn with_object_memory(mut self) -> Self { self.objects.enable_object_memory(); self } } gix-0.69.1/src/repository/config/branch.rs000064400000000000000000000241311046102023000165670ustar 00000000000000use std::{borrow::Cow, collections::BTreeSet}; use gix_ref::{FullName, FullNameRef}; use crate::bstr::BStr; use crate::config::cache::util::ApplyLeniencyDefault; use crate::config::tree::{Branch, Push}; use crate::repository::{branch_remote_ref_name, branch_remote_tracking_ref_name}; use crate::{push, remote}; /// Query configuration related to branches. impl crate::Repository { /// Return a set of unique short branch names for which custom configuration exists in the configuration, /// if we deem them [trustworthy][crate::open::Options::filter_config_section()]. /// /// ### Note /// /// Branch names that have illformed UTF-8 will silently be skipped. pub fn branch_names(&self) -> BTreeSet<&str> { self.subsection_str_names_of("branch") } /// Returns the validated reference on the remote associated with the given `name`, /// which will be used when *merging*. /// The returned value corresponds to the `branch..merge` configuration key. /// /// Returns `None` if there is no value at the given key, or if no remote or remote ref is configured. /// May return an error if the reference name to be returned is invalid. /// /// ### Note /// /// This name refers to what Git calls upstream branch (as opposed to upstream *tracking* branch). /// The value is also fast to retrieve compared to its tracking branch. /// Also note that a [remote::Direction] isn't used here as Git only supports (and requires) configuring /// the remote to fetch from, not the one to push to. /// /// See also [`Reference::remote_ref_name()`](crate::Reference::remote_ref_name()). #[doc(alias = "branch_upstream_name", alias = "git2")] pub fn branch_remote_ref_name( &self, name: &FullNameRef, direction: remote::Direction, ) -> Option, branch_remote_ref_name::Error>> { match direction { remote::Direction::Fetch => { let short_name = name.shorten(); self.config .resolved .string_by("branch", Some(short_name), Branch::MERGE.name) .map(|name| crate::config::tree::branch::Merge::try_into_fullrefname(name).map_err(Into::into)) } remote::Direction::Push => { let remote = match self.branch_remote(name.shorten(), direction)? { Ok(r) => r, Err(err) => return Some(Err(err.into())), }; if remote.push_specs.is_empty() { let push_default = match self .config .resolved .string(Push::DEFAULT) .map_or(Ok(Default::default()), |v| { Push::DEFAULT .try_into_default(v) .with_lenient_default(self.config.lenient_config) }) { Ok(v) => v, Err(err) => return Some(Err(err.into())), }; match push_default { push::Default::Nothing => None, push::Default::Current | push::Default::Matching => Some(Ok(Cow::Owned(name.to_owned()))), push::Default::Upstream => self.branch_remote_ref_name(name, remote::Direction::Fetch), push::Default::Simple => match self.branch_remote_ref_name(name, remote::Direction::Fetch)? { Ok(fetch_ref) if fetch_ref.as_ref() == name => Some(Ok(fetch_ref)), Err(err) => Some(Err(err)), Ok(_different_fetch_ref) => None, }, } } else { matching_remote(name, remote.push_specs.iter(), self.object_hash()) .map(|res| res.map_err(Into::into)) } } } } /// Return the validated name of the reference that tracks the corresponding reference of `name` on the remote for /// `direction`. Note that a branch with that name might not actually exist. /// /// * with `remote` being [remote::Direction::Fetch], we return the tracking branch that is on the destination /// side of a `src:dest` refspec. For instance, with `name` being `main` and the default refspec /// `refs/heads/*:refs/remotes/origin/*`, `refs/heads/main` would match and produce `refs/remotes/origin/main`. /// * with `remote` being [remote::Direction::Push], we return the tracking branch that corresponds to the remote /// branch that we would push to. For instance, with `name` being `main` and no setup at all, we /// would push to `refs/heads/main` on the remote. And that one would be fetched matching the /// `refs/heads/*:refs/remotes/origin/*` fetch refspec, hence `refs/remotes/origin/main` is returned. /// Note that `push` refspecs can be used to map `main` to `other` (using a push refspec `refs/heads/main:refs/heads/other`), /// which would then lead to `refs/remotes/origin/other` to be returned instead. /// /// Note that if there is an ambiguity, that is if `name` maps to multiple tracking branches, the first matching mapping /// is returned, according to the order in which the fetch or push refspecs occur in the configuration file. /// /// See also [`Reference::remote_tracking_ref_name()`](crate::Reference::remote_tracking_ref_name()). #[doc(alias = "branch_upstream_name", alias = "git2")] pub fn branch_remote_tracking_ref_name( &self, name: &FullNameRef, direction: remote::Direction, ) -> Option, branch_remote_tracking_ref_name::Error>> { let remote_ref = match self.branch_remote_ref_name(name, direction)? { Ok(r) => r, Err(err) => return Some(Err(err.into())), }; let remote = match self.branch_remote(name.shorten(), direction)? { Ok(r) => r, Err(err) => return Some(Err(err.into())), }; if remote.fetch_specs.is_empty() { return None; } matching_remote(remote_ref.as_ref(), remote.fetch_specs.iter(), self.object_hash()) .map(|res| res.map_err(Into::into)) } /// Returns the unvalidated name of the remote associated with the given `short_branch_name`, /// typically `main` instead of `refs/heads/main`. /// In some cases, the returned name will be an URL. /// Returns `None` if the remote was not found or if the name contained illformed UTF-8. /// /// * if `direction` is [remote::Direction::Fetch], we will query the `branch..remote` configuration. /// * if `direction` is [remote::Direction::Push], the push remote will be queried by means of `branch..pushRemote` /// or `remote.pushDefault` as fallback. /// /// See also [`Reference::remote_name()`](crate::Reference::remote_name()) for a more typesafe version /// to be used when a `Reference` is available. /// /// `short_branch_name` can typically be obtained by [shortening a full branch name](FullNameRef::shorten()). #[doc(alias = "branch_upstream_remote", alias = "git2")] pub fn branch_remote_name<'a>( &self, short_branch_name: impl Into<&'a BStr>, direction: remote::Direction, ) -> Option> { let name = short_branch_name.into(); let config = &self.config.resolved; (direction == remote::Direction::Push) .then(|| { config .string_by("branch", Some(name), Branch::PUSH_REMOTE.name) .or_else(|| config.string(crate::config::tree::Remote::PUSH_DEFAULT)) }) .flatten() .or_else(|| config.string_by("branch", Some(name), Branch::REMOTE.name)) .and_then(|name| name.try_into().ok()) } /// Like [`branch_remote_name(…)`](Self::branch_remote_name()), but returns a [Remote](crate::Remote). /// `short_branch_name` is the name to use for looking up `branch..*` values in the /// configuration. /// /// See also [`Reference::remote()`](crate::Reference::remote()). pub fn branch_remote<'a>( &self, short_branch_name: impl Into<&'a BStr>, direction: remote::Direction, ) -> Option, remote::find::existing::Error>> { let name = self.branch_remote_name(short_branch_name, direction)?; self.try_find_remote(name.as_bstr()) .map(|res| res.map_err(Into::into)) .or_else(|| match name { remote::Name::Url(url) => gix_url::parse(url.as_ref()) .map_err(Into::into) .and_then(|url| { self.remote_at(url) .map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err))) }) .into(), remote::Name::Symbol(_) => None, }) } } fn matching_remote<'a>( lhs: &FullNameRef, specs: impl IntoIterator, object_hash: gix_hash::Kind, ) -> Option, gix_validate::reference::name::Error>> { let search = gix_refspec::MatchGroup { specs: specs .into_iter() .map(gix_refspec::RefSpec::to_ref) .filter(|spec| spec.source().is_some() && spec.destination().is_some()) .collect(), }; let null_id = object_hash.null(); let out = search.match_remotes( Some(gix_refspec::match_group::Item { full_ref_name: lhs.as_bstr(), target: &null_id, object: None, }) .into_iter(), ); out.mappings.into_iter().next().and_then(|m| { m.rhs.map(|name| { FullName::try_from(name.into_owned()) .map(Cow::Owned) .map_err(Into::into) }) }) } gix-0.69.1/src/repository/config/mod.rs000064400000000000000000000143341046102023000161150ustar 00000000000000use std::collections::BTreeSet; use crate::{bstr::ByteSlice, config}; /// General Configuration impl crate::Repository { /// Return a snapshot of the configuration as seen upon opening the repository. pub fn config_snapshot(&self) -> config::Snapshot<'_> { config::Snapshot { repo: self } } /// Return a mutable snapshot of the configuration as seen upon opening the repository, starting a transaction. /// When the returned instance is dropped, it is applied in full, even if the reason for the drop is an error. /// /// Note that changes to the configuration are in-memory only and are observed only the this instance /// of the [`Repository`][crate::Repository]. pub fn config_snapshot_mut(&mut self) -> config::SnapshotMut<'_> { let config = self.config.resolved.as_ref().clone(); config::SnapshotMut { repo: Some(self), config, } } /// Return filesystem options as retrieved from the repository configuration. /// /// Note that these values have not been [probed](gix_fs::Capabilities::probe()). pub fn filesystem_options(&self) -> Result { self.config.fs_capabilities() } /// Return filesystem options on how to perform stat-checks, typically in relation to the index. /// /// Note that these values have not been [probed](gix_fs::Capabilities::probe()). #[cfg(feature = "index")] pub fn stat_options(&self) -> Result { self.config.stat_options() } /// The options used to open the repository. pub fn open_options(&self) -> &crate::open::Options { &self.options } /// Obtain options for use when connecting via `ssh`. #[cfg(feature = "blocking-network-client")] pub fn ssh_connect_options( &self, ) -> Result { use crate::config::{ cache::util::ApplyLeniency, tree::{gitoxide, Core, Ssh}, }; let config = &self.config.resolved; let mut trusted = self.filter_config_section(); let mut fallback_active = false; let ssh_command = config .string_filter(Core::SSH_COMMAND, &mut trusted) .or_else(|| { fallback_active = true; config.string_filter(gitoxide::Ssh::COMMAND_WITHOUT_SHELL_FALLBACK, &mut trusted) }) .map(|cmd| gix_path::from_bstr(cmd).into_owned().into()); let opts = gix_protocol::transport::client::ssh::connect::Options { disallow_shell: fallback_active, command: ssh_command, kind: config .string_filter("ssh.variant", &mut trusted) .and_then(|variant| Ssh::VARIANT.try_into_variant(variant).transpose()) .transpose() .with_leniency(self.options.lenient_config)?, }; Ok(opts) } /// Return the context to be passed to any spawned program that is supposed to interact with the repository, like /// hooks or filters. #[cfg(feature = "attributes")] pub fn command_context(&self) -> Result { use crate::config::{cache::util::ApplyLeniency, tree::gitoxide}; let pathspec_boolean = |key: &'static config::tree::keys::Boolean| { self.config .resolved .boolean(key) .map(|value| key.enrich_error(value)) .transpose() .with_leniency(self.config.lenient_config) }; Ok(gix_command::Context { stderr: { self.config .resolved .boolean(gitoxide::Core::EXTERNAL_COMMAND_STDERR) .map(|value| gitoxide::Core::EXTERNAL_COMMAND_STDERR.enrich_error(value)) .transpose() .with_leniency(self.config.lenient_config)? .unwrap_or(true) .into() }, git_dir: self.git_dir().to_owned().into(), worktree_dir: self.work_dir().map(ToOwned::to_owned), no_replace_objects: config::shared::is_replace_refs_enabled( &self.config.resolved, self.config.lenient_config, self.filter_config_section(), )? .map(|enabled| !enabled), ref_namespace: self.refs.namespace.as_ref().map(|ns| ns.as_bstr().to_owned()), literal_pathspecs: pathspec_boolean(&gitoxide::Pathspec::LITERAL)?, glob_pathspecs: pathspec_boolean(&gitoxide::Pathspec::GLOB)? .or(pathspec_boolean(&gitoxide::Pathspec::NOGLOB)?), icase_pathspecs: pathspec_boolean(&gitoxide::Pathspec::ICASE)?, }) } /// The kind of object hash the repository is configured to use. pub fn object_hash(&self) -> gix_hash::Kind { self.config.object_hash } /// Return the algorithm to perform diffs or merges with. /// /// In case of merges, a diff is performed under the hood in order to learn which hunks need merging. #[cfg(feature = "blob-diff")] pub fn diff_algorithm(&self) -> Result { self.config.diff_algorithm() } } mod branch; mod remote; #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] mod transport; impl crate::Repository { pub(crate) fn filter_config_section(&self) -> fn(&gix_config::file::Metadata) -> bool { self.options .filter_config_section .unwrap_or(config::section::is_trusted) } fn subsection_str_names_of<'a>(&'a self, header_name: &'a str) -> BTreeSet<&'a str> { self.config .resolved .sections_by_name(header_name) .map(|it| { let filter = self.filter_config_section(); it.filter(move |s| filter(s.meta())) .filter_map(|section| section.header().subsection_name().and_then(|b| b.to_str().ok())) .collect() }) .unwrap_or_default() } } gix-0.69.1/src/repository/config/remote.rs000064400000000000000000000037541046102023000166350ustar 00000000000000use crate::bstr::BStr; use std::borrow::Cow; use crate::config::tree::{Remote, Section}; use crate::remote; /// Query configuration related to remotes. impl crate::Repository { /// Returns a sorted list unique of symbolic names of remotes that /// we deem [trustworthy][crate::open::Options::filter_config_section()]. pub fn remote_names(&self) -> remote::Names<'_> { self.config .resolved .sections_by_name(Remote.name()) .map(|it| { let filter = self.filter_config_section(); it.filter(move |s| filter(s.meta())) .filter_map(|section| section.header().subsection_name().map(Cow::Borrowed)) .collect() }) .unwrap_or_default() } /// Obtain the branch-independent name for a remote for use in the given `direction`, or `None` if it could not be determined. /// /// For _fetching_, use the only configured remote, or default to `origin` if it exists. /// For _pushing_, use the `remote.pushDefault` trusted configuration key, or fall back to the rules for _fetching_. /// /// # Notes /// /// It's up to the caller to determine what to do if the current `head` is unborn or detached. pub fn remote_default_name(&self, direction: remote::Direction) -> Option> { let name = (direction == remote::Direction::Push) .then(|| { self.config .resolved .string_filter(Remote::PUSH_DEFAULT, &mut self.filter_config_section()) }) .flatten(); name.or_else(|| { let names = self.remote_names(); match names.len() { 0 => None, 1 => names.into_iter().next(), _more_than_one => { let origin = Cow::Borrowed("origin".into()); names.contains(&origin).then_some(origin) } } }) } } gix-0.69.1/src/repository/config/transport.rs000064400000000000000000000537341046102023000174010ustar 00000000000000#![allow(clippy::result_large_err)] use std::any::Any; use crate::bstr::BStr; impl crate::Repository { /// Produce configuration suitable for `url`, as differentiated by its protocol/scheme, to be passed to a transport instance via /// [configure()][gix_transport::client::TransportWithoutIO::configure()] (via `&**config` to pass the contained `Any` and not the `Box`). /// `None` is returned if there is no known configuration. If `remote_name` is not `None`, the remote's name may contribute to /// configuration overrides, typically for the HTTP transport. /// /// Note that the caller may cast the instance themselves to modify it before passing it on. /// /// For transports that support proxy authentication, the /// [default authentication method](crate::config::Snapshot::credential_helpers()) will be used with the url of the proxy /// if it contains a user name. #[cfg_attr( not(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" )), allow(unused_variables) )] pub fn transport_options<'a>( &self, url: impl Into<&'a BStr>, remote_name: Option<&BStr>, ) -> Result>, crate::config::transport::Error> { let url = gix_url::parse(url.into())?; use gix_url::Scheme::*; match &url.scheme { Http | Https => { #[cfg(not(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" )))] { Ok(None) } #[cfg(any( feature = "blocking-http-transport-reqwest", feature = "blocking-http-transport-curl" ))] { use std::{ borrow::Cow, sync::{Arc, Mutex}, }; use gix_transport::client::{ http, http::options::{ProxyAuthMethod, SslVersion, SslVersionRangeInclusive}, }; use crate::{ config, config::{ cache::util::ApplyLeniency, tree::{gitoxide, Key, Remote}, }, }; fn try_cow_to_string( v: Cow<'_, BStr>, lenient: bool, key_str: impl Into>, key: &'static config::tree::keys::String, ) -> Result, config::transport::Error> { key.try_into_string(v) .map_err(|err| config::transport::Error::IllformedUtf8 { source: err, key: key_str.into(), }) .map(Some) .with_leniency(lenient) } fn cow_bstr(v: &str) -> Cow<'_, BStr> { Cow::Borrowed(v.into()) } fn proxy_auth_method( value_and_key: Option<( Cow<'_, BStr>, Cow<'static, BStr>, &'static config::tree::http::ProxyAuthMethod, )>, ) -> Result { let value = value_and_key .map(|(method, key, key_type)| { key_type.try_into_proxy_auth_method(method).map_err(|err| { config::transport::http::Error::InvalidProxyAuthMethod { source: err, key } }) }) .transpose()? .unwrap_or_default(); Ok(value) } fn ssl_version( config: &gix_config::File<'static>, key_str: &'static str, key: &'static config::tree::http::SslVersion, mut filter: fn(&gix_config::file::Metadata) -> bool, lenient: bool, ) -> Result, config::transport::Error> { debug_assert_eq!( key_str, key.logical_name(), "BUG: hardcoded and generated key names must match" ); config .string_filter(key_str, &mut filter) .filter(|v| !v.is_empty()) .map(|v| { key.try_into_ssl_version(v) .map_err(crate::config::transport::http::Error::from) }) .transpose() .with_leniency(lenient) .map_err(Into::into) } fn proxy( value: Option<(Cow<'_, BStr>, Cow<'static, BStr>, &'static config::tree::keys::String)>, lenient: bool, ) -> Result, config::transport::Error> { Ok(value .and_then(|(v, k, key)| try_cow_to_string(v, lenient, k.clone(), key).transpose()) .transpose()? .map(|mut proxy| { if !proxy.trim().is_empty() && !proxy.contains("://") { proxy.insert_str(0, "http://"); proxy } else { proxy } })) } let mut opts = http::Options::default(); let config = &self.config.resolved; let mut trusted_only = self.filter_config_section(); let lenient = self.config.lenient_config; opts.extra_headers = { let key = "http.extraHeader"; debug_assert_eq!(key, &config::tree::Http::EXTRA_HEADER.logical_name()); config .strings_filter(key, &mut trusted_only) .map(|values| config::tree::Http::EXTRA_HEADER.try_into_extra_header(values)) .transpose() .map_err(|err| config::transport::Error::IllformedUtf8 { source: err, key: Cow::Borrowed(key.into()), })? .unwrap_or_default() }; opts.follow_redirects = { let key = "http.followRedirects"; config::tree::Http::FOLLOW_REDIRECTS .try_into_follow_redirects( config.string_filter(key, &mut trusted_only).unwrap_or_default(), || { config .boolean_filter(key, &mut trusted_only) .transpose() .with_leniency(lenient) }, ) .map_err(config::transport::http::Error::InvalidFollowRedirects)? }; opts.low_speed_time_seconds = config .integer_filter("http.lowSpeedTime", &mut trusted_only) .map(|value| config::tree::Http::LOW_SPEED_TIME.try_into_u64(value)) .transpose() .with_leniency(lenient) .map_err(config::transport::http::Error::from)? .unwrap_or_default(); opts.low_speed_limit_bytes_per_second = config .integer_filter("http.lowSpeedLimit", &mut trusted_only) .map(|value| config::tree::Http::LOW_SPEED_LIMIT.try_into_u32(value)) .transpose() .with_leniency(lenient) .map_err(config::transport::http::Error::from)? .unwrap_or_default(); opts.proxy = proxy( remote_name .and_then(|name| { config .string_filter(format!("remote.{}.{}", name, Remote::PROXY.name), &mut trusted_only) .map(|v| (v, Cow::Owned(format!("remote.{name}.proxy").into()), &Remote::PROXY)) }) .or_else(|| { let key = "http.proxy"; debug_assert_eq!(key, config::tree::Http::PROXY.logical_name()); let http_proxy = config .string_filter(key, &mut trusted_only) .map(|v| (v, cow_bstr(key), &config::tree::Http::PROXY)) .or_else(|| { let key = "gitoxide.http.proxy"; debug_assert_eq!(key, gitoxide::Http::PROXY.logical_name()); config .string_filter(key, &mut trusted_only) .map(|v| (v, cow_bstr(key), &gitoxide::Http::PROXY)) }); if url.scheme == Https { http_proxy.or_else(|| { let key = "gitoxide.https.proxy"; debug_assert_eq!(key, gitoxide::Https::PROXY.logical_name()); config .string_filter(key, &mut trusted_only) .map(|v| (v, cow_bstr(key), &gitoxide::Https::PROXY)) }) } else { http_proxy } }) .or_else(|| { let key = "gitoxide.http.allProxy"; debug_assert_eq!(key, gitoxide::Http::ALL_PROXY.logical_name()); config .string_filter(key, &mut trusted_only) .map(|v| (v, cow_bstr(key), &gitoxide::Http::ALL_PROXY)) }), lenient, )?; { let key = "gitoxide.http.noProxy"; debug_assert_eq!(key, gitoxide::Http::NO_PROXY.logical_name()); opts.no_proxy = config .string_filter(key, &mut trusted_only) .and_then(|v| { try_cow_to_string(v, lenient, Cow::Borrowed(key.into()), &gitoxide::Http::NO_PROXY) .transpose() }) .transpose()?; } opts.proxy_auth_method = proxy_auth_method({ let key = "gitoxide.http.proxyAuthMethod"; debug_assert_eq!(key, gitoxide::Http::PROXY_AUTH_METHOD.logical_name()); config .string_filter(key, &mut trusted_only) .map(|v| (v, Cow::Borrowed(key.into()), &gitoxide::Http::PROXY_AUTH_METHOD)) .or_else(|| { remote_name .and_then(|name| { config .string_filter(format!("remote.{name}.proxyAuthMethod"), &mut trusted_only) .map(|v| { ( v, Cow::Owned(format!("remote.{name}.proxyAuthMethod").into()), &Remote::PROXY_AUTH_METHOD, ) }) }) .or_else(|| { let key = "http.proxyAuthMethod"; debug_assert_eq!(key, config::tree::Http::PROXY_AUTH_METHOD.logical_name()); config.string_filter(key, &mut trusted_only).map(|v| { (v, Cow::Borrowed(key.into()), &config::tree::Http::PROXY_AUTH_METHOD) }) }) }) })?; opts.proxy_authenticate = opts .proxy .as_deref() .filter(|url| !url.is_empty()) .map(|url| gix_url::parse(url.into())) .transpose()? .filter(|url| url.user().is_some()) .map(|url| -> Result<_, config::transport::http::Error> { let (mut cascade, action_with_normalized_url, prompt_opts) = self.config_snapshot().credential_helpers(url)?; Ok(( action_with_normalized_url, Arc::new(Mutex::new(move |action| cascade.invoke(action, prompt_opts.clone()))) as Arc>, )) }) .transpose()?; opts.connect_timeout = { let key = "gitoxide.http.connectTimeout"; config .integer_filter(key, &mut trusted_only) .map(|v| { debug_assert_eq!(key, gitoxide::Http::CONNECT_TIMEOUT.logical_name()); gitoxide::Http::CONNECT_TIMEOUT .try_into_duration(v) .map_err(crate::config::transport::http::Error::from) }) .transpose() .with_leniency(lenient)? }; { let key = "http.userAgent"; opts.user_agent = config .string_filter(key, &mut trusted_only) .and_then(|v| { try_cow_to_string( v, lenient, Cow::Borrowed(key.into()), &config::tree::Http::USER_AGENT, ) .transpose() }) .transpose()? .or_else(|| Some(crate::env::agent().into())); } { let key = "http.version"; opts.http_version = config .string_filter(key, &mut trusted_only) .map(|v| { config::tree::Http::VERSION .try_into_http_version(v) .map_err(config::transport::http::Error::InvalidHttpVersion) }) .transpose()?; } { opts.verbose = config .boolean_filter(gitoxide::Http::VERBOSE, &mut trusted_only) .and_then(Result::ok) .unwrap_or_default(); } let may_use_cainfo = { let key = "http.schannelUseSSLCAInfo"; config .boolean_filter(key, &mut trusted_only) .map(|value| config::tree::Http::SCHANNEL_USE_SSL_CA_INFO.enrich_error(value)) .transpose() .with_leniency(lenient) .map_err(config::transport::http::Error::from)? .unwrap_or(true) }; if may_use_cainfo { let key = "http.sslCAInfo"; debug_assert_eq!(key, config::tree::Http::SSL_CA_INFO.logical_name()); opts.ssl_ca_info = config .path_filter(key, &mut trusted_only) .map(|p| { use crate::config::cache::interpolate_context; p.interpolate(interpolate_context( self.install_dir().ok().as_deref(), self.config.home_dir().as_deref(), )) .map(std::borrow::Cow::into_owned) }) .transpose() .with_leniency(lenient) .map_err(|err| config::transport::Error::InterpolatePath { source: err, key })?; } { opts.ssl_version = ssl_version( config, "http.sslVersion", &config::tree::Http::SSL_VERSION, trusted_only, lenient, )? .map(|v| SslVersionRangeInclusive { min: v, max: v }); let min_max = ssl_version( config, "gitoxide.http.sslVersionMin", &gitoxide::Http::SSL_VERSION_MIN, trusted_only, lenient, ) .and_then(|min| { ssl_version( config, "gitoxide.http.sslVersionMax", &gitoxide::Http::SSL_VERSION_MAX, trusted_only, lenient, ) .map(|max| min.and_then(|min| max.map(|max| (min, max)))) })?; if let Some((min, max)) = min_max { let v = opts.ssl_version.get_or_insert(SslVersionRangeInclusive { min: SslVersion::TlsV1_3, max: SslVersion::TlsV1_3, }); v.min = min; v.max = max; } } { let key = "gitoxide.http.sslNoVerify"; let ssl_no_verify = config .boolean_filter(key, &mut trusted_only) .map(|value| config::tree::gitoxide::Http::SSL_NO_VERIFY.enrich_error(value)) .transpose() .with_leniency(lenient) .map_err(config::transport::http::Error::from)? .unwrap_or_default(); if ssl_no_verify { opts.ssl_verify = false; } else { let key = "http.sslVerify"; opts.ssl_verify = config .boolean_filter(key, &mut trusted_only) .map(|value| config::tree::Http::SSL_VERIFY.enrich_error(value)) .transpose() .with_leniency(lenient) .map_err(config::transport::http::Error::from)? .unwrap_or(true); } } #[cfg(feature = "blocking-http-transport-curl")] { let key = "http.schannelCheckRevoke"; let schannel_check_revoke = config .boolean_filter(key, &mut trusted_only) .map(|value| config::tree::Http::SCHANNEL_CHECK_REVOKE.enrich_error(value)) .transpose() .with_leniency(lenient) .map_err(config::transport::http::Error::from)?; let backend = gix_protocol::transport::client::http::curl::Options { schannel_check_revoke }; opts.backend = Some(Arc::new(Mutex::new(backend)) as Arc>); } Ok(Some(Box::new(opts))) } } File | Git | Ssh | Ext(_) => Ok(None), } } } gix-0.69.1/src/repository/diff.rs000064400000000000000000000102201046102023000147670ustar 00000000000000use crate::repository::{diff_resource_cache, diff_tree_to_tree}; use crate::{Repository, Tree}; use gix_object::TreeRefIter; /// Diff-utilities impl Repository { /// Create a resource cache for diffable objects, and configured with everything it needs to know to perform diffs /// faithfully just like `git` would. /// `mode` controls what version of a resource should be diffed. /// `worktree_roots` determine if files can be read from the worktree, where each side of the diff operation can /// be represented by its own worktree root. `.gitattributes` are automatically read from the worktree if at least /// one worktree is present. /// /// Note that attributes will always be obtained from the current `HEAD` index even if the resources being diffed /// might live in another tree. Further, if one of the `worktree_roots` are set, attributes will also be read from /// the worktree. Otherwise, it will be skipped and attributes are read from the index tree instead. pub fn diff_resource_cache( &self, mode: gix_diff::blob::pipeline::Mode, worktree_roots: gix_diff::blob::pipeline::WorktreeRoots, ) -> Result { let index = self.index_or_load_from_head_or_empty()?; Ok(crate::diff::resource_cache( self, mode, self.attributes_only( &index, if worktree_roots.is_unset() { gix_worktree::stack::state::attributes::Source::IdMapping } else { gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping }, )? .inner, worktree_roots, )?) } /// Produce the changes that would need to be applied to `old_tree` to create `new_tree`. /// If `options` are unset, they will be filled in according to the git configuration of this repository, and with /// [full paths being tracked](crate::diff::Options::track_path()) as well, which typically means that /// rewrite tracking might be disabled if done so explicitly by the user. /// If `options` are set, the user can take full control over the settings. /// /// Note that this method exists to evoke similarity to `git2`, and makes it easier to fully control diff settings. /// A more fluent version [may be used as well](Tree::changes()). pub fn diff_tree_to_tree<'a, 'old_repo: 'a, 'new_repo: 'a>( &self, old_tree: impl Into>>, new_tree: impl Into>>, options: impl Into>, ) -> Result, diff_tree_to_tree::Error> { let mut cache = self.diff_resource_cache(gix_diff::blob::pipeline::Mode::ToGit, Default::default())?; let opts = options .into() .map_or_else(|| crate::diff::Options::from_configuration(&self.config), Ok)? .into(); let empty_tree = self.empty_tree(); let old_tree = old_tree.into().unwrap_or(&empty_tree); let new_tree = new_tree.into().unwrap_or(&empty_tree); let mut out = Vec::new(); gix_diff::tree_with_rewrites( TreeRefIter::from_bytes(&old_tree.data), TreeRefIter::from_bytes(&new_tree.data), &mut cache, &mut Default::default(), &self.objects, |change| -> Result<_, std::convert::Infallible> { out.push(change.into_owned()); Ok(gix_diff::tree_with_rewrites::Action::Continue) }, opts, )?; Ok(out) } /// Return a resource cache suitable for diffing blobs from trees directly, where no worktree checkout exists. /// /// For more control, see [`diff_resource_cache()`](Self::diff_resource_cache). pub fn diff_resource_cache_for_tree_diff(&self) -> Result { self.diff_resource_cache( gix_diff::blob::pipeline::Mode::ToGit, gix_diff::blob::pipeline::WorktreeRoots::default(), ) } } gix-0.69.1/src/repository/dirwalk.rs000064400000000000000000000140531046102023000155240ustar 00000000000000use crate::bstr::{BStr, BString}; use crate::util::OwnedOrStaticAtomicBool; use crate::worktree::IndexPersistedOrInMemory; use crate::{config, dirwalk, is_dir_to_mode, Repository}; use std::sync::atomic::AtomicBool; impl Repository { /// Return default options suitable for performing a directory walk on this repository. /// /// Used in conjunction with [`dirwalk()`](Self::dirwalk()) pub fn dirwalk_options(&self) -> Result { Ok(dirwalk::Options::from_fs_caps(self.filesystem_options()?)) } /// Perform a directory walk configured with `options` under control of the `delegate`. Use `patterns` to /// further filter entries. `should_interrupt` is polled to see if an interrupt is requested, causing an /// error to be returned instead. /// /// The `index` is used to determine if entries are tracked, and for excludes and attributes /// lookup. Note that items will only count as tracked if they have the [`gix_index::entry::Flags::UPTODATE`] /// flag set. /// /// Note that dirwalks for the purpose of deletion will be initialized with the worktrees of this repository /// if they fall into the working directory of this repository as well to mark them as `tracked`. That way /// it's hard to accidentally flag them for deletion. /// This is intentionally not the case when deletion is not intended so they look like /// untracked repositories instead. /// /// See [`gix_dir::walk::delegate::Collect`] for a delegate that collects all seen entries. pub fn dirwalk( &self, index: &gix_index::State, patterns: impl IntoIterator>, should_interrupt: &AtomicBool, options: dirwalk::Options, delegate: &mut dyn gix_dir::walk::Delegate, ) -> Result, dirwalk::Error> { let _span = gix_trace::coarse!("gix::dirwalk"); let workdir = self.work_dir().ok_or(dirwalk::Error::MissingWorkDir)?; let mut excludes = self.excludes( index, None, crate::worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped, )?; let mut pathspec = self.pathspec( options.empty_patterns_match_prefix, /* empty patterns match prefix */ patterns, true, /* inherit ignore case */ index, crate::worktree::stack::state::attributes::Source::WorktreeThenIdMapping, )?; let git_dir_realpath = crate::path::realpath_opts(self.git_dir(), self.current_dir(), crate::path::realpath::MAX_SYMLINKS)?; let fs_caps = self.filesystem_options()?; let accelerate_lookup = fs_caps.ignore_case.then(|| index.prepare_icase_backing()); let mut opts = gix_dir::walk::Options::from(options); let worktree_relative_worktree_dirs_storage; if let Some(workdir) = self.work_dir().filter(|_| opts.for_deletion.is_some()) { let linked_worktrees = self.worktrees()?; if !linked_worktrees.is_empty() { let real_workdir = gix_path::realpath_opts( workdir, self.options.current_dir_or_empty(), gix_path::realpath::MAX_SYMLINKS, )?; worktree_relative_worktree_dirs_storage = linked_worktrees .into_iter() .filter_map(|proxy| proxy.base().ok()) .filter_map(|base| base.strip_prefix(&real_workdir).map(ToOwned::to_owned).ok()) .map(|rela_path| { gix_path::to_unix_separators_on_windows(gix_path::into_bstr(rela_path)).into_owned() }) .collect(); opts.worktree_relative_worktree_dirs = Some(&worktree_relative_worktree_dirs_storage); } } let (outcome, traversal_root) = gix_dir::walk( workdir, gix_dir::walk::Context { should_interrupt: Some(should_interrupt), git_dir_realpath: git_dir_realpath.as_ref(), current_dir: self.current_dir(), index, ignore_case_index_lookup: accelerate_lookup.as_ref(), pathspec: &mut pathspec.search, pathspec_attributes: &mut |relative_path, case, is_dir, out| { let stack = pathspec .stack .as_mut() .expect("can only be called if attributes are used in patterns"); stack .set_case(case) .at_entry(relative_path, Some(is_dir_to_mode(is_dir)), &self.objects) .map_or(false, |platform| platform.matching_attributes(out)) }, excludes: Some(&mut excludes.inner), objects: &self.objects, explicit_traversal_root: (!options.empty_patterns_match_prefix).then_some(workdir), }, opts, delegate, )?; Ok(dirwalk::Outcome { dirwalk: outcome, traversal_root, excludes, pathspec, }) } /// Create an iterator over a running traversal, which stops if the iterator is dropped. All arguments /// are the same as in [`dirwalk()`](Self::dirwalk). /// /// `should_interrupt` should be set to `Default::default()` if it is supposed to be unused. /// Otherwise, it can be created by passing a `&'static AtomicBool`, `&Arc` or `Arc`. pub fn dirwalk_iter( &self, index: impl Into, patterns: impl IntoIterator>, should_interrupt: OwnedOrStaticAtomicBool, options: dirwalk::Options, ) -> Result { dirwalk::Iter::new( self, index.into(), patterns.into_iter().map(Into::into).collect(), should_interrupt, options, ) } } gix-0.69.1/src/repository/filter.rs000064400000000000000000000062131046102023000153530ustar 00000000000000use crate::{filter, worktree::IndexPersistedOrInMemory, Id, Repository}; /// pub mod pipeline { /// The error returned by [Repository::filter_pipeline()](super::Repository::filter_pipeline()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not obtain head commit of bare repository")] HeadCommit(#[from] crate::reference::head_commit::Error), #[error(transparent)] DecodeCommit(#[from] gix_object::decode::Error), #[error("Could not create index from tree at HEAD^{{tree}}")] TreeTraverse(#[from] crate::repository::index_from_tree::Error), #[error(transparent)] BareAttributes(#[from] crate::config::attribute_stack::Error), #[error(transparent)] WorktreeIndex(#[from] crate::worktree::open_index::Error), #[error(transparent)] Init(#[from] crate::filter::pipeline::options::Error), } } impl Repository { /// Configure a pipeline for converting byte buffers to the worktree representation, and byte streams to the git-internal /// representation. Also return the index that was used when initializing the pipeline as it may be useful when calling /// [convert_to_git()](filter::Pipeline::convert_to_git()). /// Bare repositories will either use `HEAD^{tree}` for accessing all relevant worktree files or the given `tree_if_bare`. /// /// Note that this is considered a primitive as it operates on data directly and will not have permanent effects. /// We also return the index that was used to configure the attributes cache (for accessing `.gitattributes`), which can be reused /// after it was possibly created from a tree, an expensive operation. /// /// ### Performance /// /// Note that when in a repository with worktree, files in the worktree will be read with priority, which causes at least a stat /// each time the directory is changed. This can be expensive if access isn't in sorted order, which would cause more then necessary /// stats: one per directory. pub fn filter_pipeline( &self, tree_if_bare: Option, ) -> Result<(filter::Pipeline<'_>, IndexPersistedOrInMemory), pipeline::Error> { let (cache, index) = if self.is_bare() { let index = self.index_from_tree(&tree_if_bare.map_or_else( || { self.head_commit() .map_err(pipeline::Error::from) .and_then(|c| c.tree_id().map(Id::detach).map_err(Into::into)) }, Ok, )?)?; let cache = self.attributes_only(&index, gix_worktree::stack::state::attributes::Source::IdMapping)?; (cache, IndexPersistedOrInMemory::InMemory(index)) } else { let index = self.index_or_empty()?; let cache = self.attributes_only( &index, gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping, )?; (cache, IndexPersistedOrInMemory::Persisted(index)) }; Ok((filter::Pipeline::new(self, cache.detach())?, index)) } } gix-0.69.1/src/repository/freelist.rs000064400000000000000000000063411046102023000157050ustar 00000000000000use std::cell::RefCell; use std::ops::{Deref, DerefMut}; /// A buffer that is returned to the free-list after usage. #[derive(Clone)] pub struct Buffer<'repo> { /// The buffer that would be returned to the freelist of `repo`. /// Note that buffers without capacity (i.e. without allocation) aren't returned. pub inner: Vec, /// The repository from whose free-list the `inner` buffer was taken, and to which it will be returned. pub repo: &'repo crate::Repository, } impl From> for Vec { fn from(mut value: Buffer<'_>) -> Self { std::mem::take(&mut value.inner) } } impl Deref for Buffer<'_> { type Target = Vec; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for Buffer<'_> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl Drop for Buffer<'_> { fn drop(&mut self) { self.repo.reuse_buffer(&mut self.inner); } } /// Internal impl crate::Repository { /// Note that the returned buffer might still have data in it. #[inline] pub(crate) fn free_buf(&self) -> Vec { self.bufs .as_ref() .and_then(|bufs| bufs.borrow_mut().pop()) .unwrap_or_default() } /// This method is commonly called from the destructor of objects that previously claimed an entry /// in the free-list with [crate::Repository::free_buf]. /// They are welcome to take out the data themselves, for instance when the object is detached, to avoid /// it to be reclaimed. #[inline] pub(crate) fn reuse_buffer(&self, data: &mut Vec) { if data.capacity() > 0 { if let Some(bufs) = self.bufs.as_ref() { bufs.borrow_mut().push(std::mem::take(data)); } } } } /// Freelist configuration /// /// The free-list is an internal and 'transparent' mechanism for obtaining and re-using memory buffers when /// reading objects. That way, trashing is avoided as buffers are re-used and re-written. /// /// However, there are circumstances when releasing memory early is preferred, for instance on the server side. /// /// Also note that the free-list isn't cloned, so each clone of this instance starts with an empty one. impl crate::Repository { /// Return an empty buffer which is tied to this repository instance, and reuse its memory allocation by /// keeping it around even after it drops. pub fn empty_reusable_buffer(&self) -> Buffer<'_> { let mut inner = self.free_buf(); inner.clear(); Buffer { inner, repo: self } } /// Set the currently used freelist to `list`. If `None`, it will be disabled entirely. /// /// Return the currently previously allocated free-list, a list of reusable buffers typically used when reading objects. /// May be `None` if there was no free-list. pub fn set_freelist(&mut self, list: Option>>) -> Option>> { let previous = self.bufs.take(); self.bufs = list.map(RefCell::new); previous.map(RefCell::into_inner) } /// A builder method to disable the free-list on a newly created instance. pub fn without_freelist(mut self) -> Self { self.bufs.take(); self } } gix-0.69.1/src/repository/graph.rs000064400000000000000000000041511046102023000151660ustar 00000000000000impl crate::Repository { /// Create a graph data-structure capable of accelerating graph traversals and storing state of type `T` with each commit /// it encountered. /// /// Note that the `cache` will be used if present, and it's best obtained with /// [`commit_graph_if_enabled()`](crate::Repository::commit_graph_if_enabled()). /// /// Note that a commitgraph is only allowed to be used if `core.commitGraph` is true (the default), and that configuration errors are /// ignored as well. /// /// ### Performance /// /// Note that the [Graph][gix_revwalk::Graph] can be sensitive to various object database settings that may affect the performance /// of the commit walk. pub fn revision_graph<'cache, T>( &self, cache: Option<&'cache gix_commitgraph::Graph>, ) -> gix_revwalk::Graph<'_, 'cache, T> { gix_revwalk::Graph::new(&self.objects, cache) } /// Return a cache for commits and their graph structure, as managed by `git commit-graph`, for accelerating commit walks on /// a low level. /// /// Note that [`revision_graph()`][crate::Repository::revision_graph()] should be preferred for general purpose walks that don't /// rely on the actual commit cache to be present, while leveraging the commit-graph if possible. pub fn commit_graph(&self) -> Result { gix_commitgraph::at(self.objects.store_ref().path().join("info")) } /// Return a newly opened commit-graph if it is available *and* enabled in the Git configuration. pub fn commit_graph_if_enabled( &self, ) -> Result, super::commit_graph_if_enabled::Error> { Ok(self .config .may_use_commit_graph()? .then(|| gix_commitgraph::at(self.objects.store_ref().path().join("info"))) .transpose() .or_else(|err| match err { gix_commitgraph::init::Error::Io { err, .. } if err.kind() == std::io::ErrorKind::NotFound => Ok(None), _ => Err(err), })?) } } gix-0.69.1/src/repository/identity.rs000064400000000000000000000151451046102023000157230ustar 00000000000000use std::time::SystemTime; use crate::{ bstr::BString, config, config::tree::{gitoxide, keys, Author, Committer, Key, User}, }; /// Identity handling. /// /// # Deviation /// /// There is no notion of a default user like in git, and instead failing to provide a user /// is fatal. That way, we enforce correctness and force application developers to take care /// of this issue which can be done in various ways, for instance by setting /// `gitoxide.committer.nameFallback` and similar. impl crate::Repository { /// Return the committer as configured by this repository, which is determined by… /// /// * …the git configuration `committer.name|email`… /// * …the `GIT_COMMITTER_(NAME|EMAIL|DATE)` environment variables… /// * …the configuration for `user.name|email` as fallback… /// /// …and in that order, or `None` if no committer name or email was configured, or `Some(Err(…))` /// if the committer date could not be parsed. /// /// # Note /// /// The values are cached when the repository is instantiated. pub fn committer(&self) -> Option, config::time::Error>> { let p = self.config.personas(); Ok(gix_actor::SignatureRef { name: p.committer.name.as_ref().or(p.user.name.as_ref()).map(AsRef::as_ref)?, email: p .committer .email .as_ref() .or(p.user.email.as_ref()) .map(AsRef::as_ref)?, time: match extract_time_or_default(p.committer.time.as_ref(), &gitoxide::Commit::COMMITTER_DATE) { Ok(t) => t, Err(err) => return Some(Err(err)), }, }) .into() } /// Return the author as configured by this repository, which is determined by… /// /// * …the git configuration `author.name|email`… /// * …the `GIT_AUTHOR_(NAME|EMAIL|DATE)` environment variables… /// * …the configuration for `user.name|email` as fallback… /// /// …and in that order, or `None` if there was nothing configured. /// /// # Note /// /// The values are cached when the repository is instantiated. pub fn author(&self) -> Option, config::time::Error>> { let p = self.config.personas(); Ok(gix_actor::SignatureRef { name: p.author.name.as_ref().or(p.user.name.as_ref()).map(AsRef::as_ref)?, email: p.author.email.as_ref().or(p.user.email.as_ref()).map(AsRef::as_ref)?, time: match extract_time_or_default(p.author.time.as_ref(), &gitoxide::Commit::AUTHOR_DATE) { Ok(t) => t, Err(err) => return Some(Err(err)), }, }) .into() } } fn extract_time_or_default( time: Option<&Result>, config_key: &'static keys::Time, ) -> Result { match time { Some(Ok(t)) => Ok(*t), None => Ok(gix_date::Time::now_local_or_utc()), Some(Err(err)) => Err(config::time::Error::from(config_key).with_source(err.clone())), } } #[derive(Debug, Clone)] pub(crate) struct Entity { pub name: Option, pub email: Option, /// A time parsed from an environment variable, handling potential errors is delayed. pub time: Option>, } #[derive(Debug, Clone)] pub(crate) struct Personas { user: Entity, committer: Entity, author: Entity, } impl Personas { pub fn from_config_and_env(config: &gix_config::File<'_>) -> Self { fn entity_in_section( config: &gix_config::File<'_>, name_key: &keys::Any, email_key: &keys::Any, fallback: Option<(&keys::Any, &keys::Any)>, ) -> (Option, Option) { let fallback = fallback.and_then(|(name_key, email_key)| { debug_assert_eq!(name_key.section.name(), email_key.section.name()); config .section("gitoxide", Some(name_key.section.name().into())) .ok() .map(|section| (section, name_key, email_key)) }); ( config .string(name_key) .or_else(|| fallback.as_ref().and_then(|(s, name_key, _)| s.value(name_key.name))) .map(std::borrow::Cow::into_owned), config .string(email_key) .or_else(|| fallback.as_ref().and_then(|(s, _, email_key)| s.value(email_key.name))) .map(std::borrow::Cow::into_owned), ) } let now = SystemTime::now(); let parse_date = |key: &str, date: &keys::Time| -> Option> { debug_assert_eq!( key, date.logical_name(), "BUG: drift of expected name and actual name of the key (we hardcode it to save an allocation)" ); config.string(key).map(|time| date.try_into_time(time, now.into())) }; let fallback = ( &gitoxide::Committer::NAME_FALLBACK, &gitoxide::Committer::EMAIL_FALLBACK, ); let (committer_name, committer_email) = entity_in_section(config, &Committer::NAME, &Committer::EMAIL, Some(fallback)); let fallback = (&gitoxide::Author::NAME_FALLBACK, &gitoxide::Author::EMAIL_FALLBACK); let (author_name, author_email) = entity_in_section(config, &Author::NAME, &Author::EMAIL, Some(fallback)); let (user_name, mut user_email) = entity_in_section(config, &User::NAME, &User::EMAIL, None); let committer_date = parse_date("gitoxide.commit.committerDate", &gitoxide::Commit::COMMITTER_DATE); let author_date = parse_date("gitoxide.commit.authorDate", &gitoxide::Commit::AUTHOR_DATE); user_email = user_email.or_else(|| { config .string(gitoxide::User::EMAIL_FALLBACK.logical_name().as_str()) .map(std::borrow::Cow::into_owned) }); Personas { user: Entity { name: user_name, email: user_email, time: None, }, committer: Entity { name: committer_name, email: committer_email, time: committer_date, }, author: Entity { name: author_name, email: author_email, time: author_date, }, } } } gix-0.69.1/src/repository/impls.rs000064400000000000000000000115741046102023000152200ustar 00000000000000use gix_object::Exists; use std::ops::DerefMut; impl Clone for crate::Repository { fn clone(&self) -> Self { let mut new = crate::Repository::from_refs_and_objects( self.refs.clone(), self.objects.clone(), self.work_tree.clone(), self.common_dir.clone(), self.config.clone(), self.options.clone(), #[cfg(feature = "index")] self.index.clone(), self.shallow_commits.clone(), #[cfg(feature = "attributes")] self.modules.clone(), ); if self.bufs.is_none() { new.bufs.take(); } new } } impl std::fmt::Debug for crate::Repository { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Repository") .field("kind", &self.kind()) .field("git_dir", &self.git_dir()) .field("work_dir", &self.work_dir()) .finish() } } impl PartialEq for crate::Repository { fn eq(&self, other: &crate::Repository) -> bool { self.git_dir().canonicalize().ok() == other.git_dir().canonicalize().ok() && self.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok()) == other.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok()) } } impl From<&crate::ThreadSafeRepository> for crate::Repository { fn from(repo: &crate::ThreadSafeRepository) -> Self { crate::Repository::from_refs_and_objects( repo.refs.clone(), gix_odb::memory::Proxy::from(gix_odb::Cache::from(repo.objects.to_handle())).with_write_passthrough(), repo.work_tree.clone(), repo.common_dir.clone(), repo.config.clone(), repo.linked_worktree_options.clone(), #[cfg(feature = "index")] repo.index.clone(), repo.shallow_commits.clone(), #[cfg(feature = "attributes")] repo.modules.clone(), ) } } impl From for crate::Repository { fn from(repo: crate::ThreadSafeRepository) -> Self { crate::Repository::from_refs_and_objects( repo.refs, gix_odb::memory::Proxy::from(gix_odb::Cache::from(repo.objects.to_handle())).with_write_passthrough(), repo.work_tree, repo.common_dir, repo.config, repo.linked_worktree_options, #[cfg(feature = "index")] repo.index, repo.shallow_commits, #[cfg(feature = "attributes")] repo.modules.clone(), ) } } impl From for crate::ThreadSafeRepository { fn from(r: crate::Repository) -> Self { crate::ThreadSafeRepository { refs: r.refs, objects: r.objects.into_inner().store(), work_tree: r.work_tree, common_dir: r.common_dir, config: r.config, linked_worktree_options: r.options, #[cfg(feature = "index")] index: r.index, #[cfg(feature = "attributes")] modules: r.modules, shallow_commits: r.shallow_commits, } } } impl gix_object::Write for crate::Repository { fn write(&self, object: &dyn gix_object::WriteTo) -> Result { let mut buf = self.empty_reusable_buffer(); object.write_to(buf.deref_mut())?; self.write_buf(object.kind(), &buf) } fn write_buf(&self, object: gix_object::Kind, from: &[u8]) -> Result { let oid = gix_object::compute_hash(self.object_hash(), object, from); if self.objects.exists(&oid) { return Ok(oid); } self.objects.write_buf(object, from) } fn write_stream( &self, kind: gix_object::Kind, size: u64, from: &mut dyn std::io::Read, ) -> Result { let mut buf = self.empty_reusable_buffer(); let bytes = std::io::copy(from, buf.deref_mut())?; if size != bytes { return Err(format!("Found {bytes} bytes in stream, but had {size} bytes declared").into()); } self.write_buf(kind, &buf) } } impl gix_object::FindHeader for crate::Repository { fn try_header(&self, id: &gix_hash::oid) -> Result, gix_object::find::Error> { self.objects.try_header(id) } } impl gix_object::Find for crate::Repository { fn try_find<'a>( &self, id: &gix_hash::oid, buffer: &'a mut Vec, ) -> Result>, gix_object::find::Error> { self.objects.try_find(id, buffer) } } impl gix_object::Exists for crate::Repository { fn exists(&self, id: &gix_hash::oid) -> bool { self.objects.exists(id) } } gix-0.69.1/src/repository/index.rs000064400000000000000000000167341046102023000152060ustar 00000000000000use crate::{ config::cache::util::ApplyLeniencyDefault, config::tree::Index, worktree, worktree::IndexPersistedOrInMemory, }; /// Index access impl crate::Repository { /// Open a new copy of the index file and decode it entirely. /// /// It will use the `index.threads` configuration key to learn how many threads to use. /// Note that it may fail if there is no index. pub fn open_index(&self) -> Result { let thread_limit = self .config .resolved .string(Index::THREADS) .map(|value| crate::config::tree::Index::THREADS.try_into_index_threads(value)) .transpose() .with_lenient_default(self.config.lenient_config)?; let skip_hash = self .config .resolved .boolean(Index::SKIP_HASH) .map(|res| crate::config::tree::Index::SKIP_HASH.enrich_error(res)) .transpose() .with_lenient_default(self.config.lenient_config)? .unwrap_or_default(); let index = gix_index::File::at( self.index_path(), self.object_hash(), skip_hash, gix_index::decode::Options { thread_limit, min_extension_block_in_bytes_for_threading: 0, expected_checksum: None, }, )?; Ok(index) } /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file /// on disk has changed. /// /// ### Notes /// /// * This will fail if the file doesn't exist, like in a newly initialized repository. If that is the case, use /// [index_or_empty()](Self::index_or_empty) or [try_index()](Self::try_index) instead. /// /// The index file is shared across all clones of this repository. pub fn index(&self) -> Result { self.try_index().and_then(|opt| match opt { Some(index) => Ok(index), None => Err(worktree::open_index::Error::IndexFile( gix_index::file::init::Error::Io(std::io::Error::new( std::io::ErrorKind::NotFound, format!("Could not find index file at {:?} for opening.", self.index_path()), )), )), }) } /// Return the shared worktree index if present, or return a new empty one which has an association to the place where the index would be. pub fn index_or_empty(&self) -> Result { Ok(self.try_index()?.unwrap_or_else(|| { worktree::Index::new(gix_fs::FileSnapshot::new(gix_index::File::from_state( gix_index::State::new(self.object_hash()), self.index_path(), ))) })) } /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file /// on disk has changed, or `None` if no such file exists. /// /// The index file is shared across all clones of this repository. pub fn try_index(&self) -> Result, worktree::open_index::Error> { self.index.recent_snapshot( || self.index_path().metadata().and_then(|m| m.modified()).ok(), || { self.open_index().map(Some).or_else(|err| match err { worktree::open_index::Error::IndexFile(gix_index::file::init::Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => { Ok(None) } err => Err(err), }) }, ) } /// Open the persisted worktree index or generate it from the current `HEAD^{tree}` to live in-memory only. /// /// Use this method to get an index in any repository, even bare ones that don't have one naturally. /// /// ### Note /// /// * The locally stored index is not guaranteed to represent `HEAD^{tree}` if this repository is bare - bare repos /// don't naturally have an index and if an index is present it must have been generated by hand. /// * This method will fail on unborn repositories as `HEAD` doesn't point to a reference yet, which is needed to resolve /// the revspec. If that is a concern, use [`Self::index_or_load_from_head_or_empty()`] instead. pub fn index_or_load_from_head( &self, ) -> Result { Ok(match self.try_index()? { Some(index) => IndexPersistedOrInMemory::Persisted(index), None => { let tree = self.head_commit()?.tree_id()?; IndexPersistedOrInMemory::InMemory(self.index_from_tree(&tree)?) } }) } /// Open the persisted worktree index or generate it from the current `HEAD^{tree}` to live in-memory only, /// or resort to an empty index if `HEAD` is unborn. /// /// Use this method to get an index in any repository, even bare ones that don't have one naturally, or those /// that are in a state where `HEAD` is invalid or points to an unborn reference. pub fn index_or_load_from_head_or_empty( &self, ) -> Result { Ok(match self.try_index()? { Some(index) => IndexPersistedOrInMemory::Persisted(index), None => match self.head()?.id() { Some(id) => { let head_tree_id = id.object()?.peel_to_commit()?.tree_id()?; IndexPersistedOrInMemory::InMemory(self.index_from_tree(&head_tree_id)?) } None => IndexPersistedOrInMemory::InMemory(gix_index::File::from_state( gix_index::State::new(self.object_hash()), self.index_path(), )), }, }) } /// Create new index-file, which would live at the correct location, in memory from the given `tree`. /// /// Note that this is an expensive operation as it requires recursively traversing the entire tree to unpack it into the index. pub fn index_from_tree(&self, tree: &gix_hash::oid) -> Result { Ok(gix_index::File::from_state( gix_index::State::from_tree(tree, &self.objects, self.config.protect_options()?).map_err(|err| { super::index_from_tree::Error::IndexFromTree { id: tree.into(), source: err, } })?, self.git_dir().join("index"), )) } } impl std::ops::Deref for IndexPersistedOrInMemory { type Target = gix_index::File; fn deref(&self) -> &Self::Target { match self { IndexPersistedOrInMemory::Persisted(i) => i, IndexPersistedOrInMemory::InMemory(i) => i, } } } impl IndexPersistedOrInMemory { /// Consume this instance and turn it into an owned index file. /// /// Note that this will cause the persisted index to be cloned, which would happen whenever the repository has a worktree. pub fn into_owned(self) -> gix_index::File { match self { IndexPersistedOrInMemory::Persisted(i) => gix_index::File::clone(&i), IndexPersistedOrInMemory::InMemory(i) => i, } } } gix-0.69.1/src/repository/init.rs000064400000000000000000000046401046102023000150330ustar 00000000000000use std::cell::RefCell; impl crate::Repository { #[allow(clippy::too_many_arguments)] pub(crate) fn from_refs_and_objects( refs: crate::RefStore, mut objects: crate::OdbHandle, work_tree: Option, common_dir: Option, config: crate::config::Cache, linked_worktree_options: crate::open::Options, #[cfg(feature = "index")] index: crate::worktree::IndexStorage, shallow_commits: crate::shallow::CommitsStorage, #[cfg(feature = "attributes")] modules: crate::submodule::ModulesFileStorage, ) -> Self { setup_objects(&mut objects, &config); crate::Repository { bufs: Some(RefCell::new(Vec::with_capacity(4))), work_tree, common_dir, objects, refs, config, options: linked_worktree_options, #[cfg(feature = "index")] index, shallow_commits, #[cfg(feature = "attributes")] modules, } } /// Convert this instance into a [`ThreadSafeRepository`][crate::ThreadSafeRepository] by dropping all thread-local data. pub fn into_sync(self) -> crate::ThreadSafeRepository { self.into() } } #[cfg_attr(not(feature = "max-performance-safe"), allow(unused_variables, unused_mut))] pub(crate) fn setup_objects(objects: &mut crate::OdbHandle, config: &crate::config::Cache) { #[cfg(feature = "max-performance-safe")] { match config.pack_cache_bytes { None => match config.static_pack_cache_limit_bytes { None => objects.set_pack_cache(|| Box::>::default()), Some(limit) => { objects.set_pack_cache(move || Box::new(gix_pack::cache::lru::StaticLinkedList::<64>::new(limit))); } }, Some(0) => objects.unset_pack_cache(), Some(bytes) => objects.set_pack_cache(move || -> Box { Box::new(gix_pack::cache::lru::MemoryCappedHashmap::new(bytes)) }), }; if config.object_cache_bytes == 0 { objects.unset_object_cache(); } else { let bytes = config.object_cache_bytes; objects.set_object_cache(move || Box::new(gix_pack::cache::object::MemoryCappedHashmap::new(bytes))); } } } gix-0.69.1/src/repository/kind.rs000064400000000000000000000015371046102023000150170ustar 00000000000000use crate::repository::Kind; impl Kind { /// Returns true if this is a bare repository, one without a work tree. pub fn is_bare(&self) -> bool { matches!(self, Kind::Bare) } } impl From for Kind { fn from(v: gix_discover::repository::Kind) -> Self { match v { gix_discover::repository::Kind::Submodule { .. } | gix_discover::repository::Kind::SubmoduleGitDir => { Kind::WorkTree { is_linked: false } } gix_discover::repository::Kind::PossiblyBare => Kind::Bare, gix_discover::repository::Kind::WorkTreeGitDir { .. } => Kind::WorkTree { is_linked: true }, gix_discover::repository::Kind::WorkTree { linked_git_dir } => Kind::WorkTree { is_linked: linked_git_dir.is_some(), }, } } } gix-0.69.1/src/repository/location.rs000064400000000000000000000071771046102023000157100ustar 00000000000000use std::path::{Path, PathBuf}; use gix_path::realpath::MAX_SYMLINKS; impl crate::Repository { /// Return the path to the repository itself, containing objects, references, configuration, and more. /// /// Synonymous to [`path()`][crate::Repository::path()]. pub fn git_dir(&self) -> &std::path::Path { self.refs.git_dir() } /// The trust we place in the git-dir, with lower amounts of trust causing access to configuration to be limited. pub fn git_dir_trust(&self) -> gix_sec::Trust { self.options.git_dir_trust.expect("definitely set by now") } /// Return the current working directory as present during the instantiation of this repository. /// /// Note that this should be preferred over manually obtaining it as this may have been adjusted to /// deal with `core.precomposeUnicode`. pub fn current_dir(&self) -> &Path { self.options .current_dir .as_deref() .expect("BUG: cwd is always set after instantiation") } /// Returns the main git repository if this is a repository on a linked work-tree, or the `git_dir` itself. pub fn common_dir(&self) -> &std::path::Path { self.common_dir.as_deref().unwrap_or_else(|| self.git_dir()) } /// Return the path to the worktree index file, which may or may not exist. pub fn index_path(&self) -> PathBuf { self.git_dir().join("index") } /// The path to the `.gitmodules` file in the worktree, if a worktree is available. #[cfg(feature = "attributes")] pub fn modules_path(&self) -> Option { self.work_dir().map(|wtd| wtd.join(crate::submodule::MODULES_FILE)) } /// The path to the `.git` directory itself, or equivalent if this is a bare repository. pub fn path(&self) -> &std::path::Path { self.git_dir() } /// Return the work tree containing all checked out files, if there is one. #[doc(alias = "workdir", alias = "git2")] pub fn work_dir(&self) -> Option<&std::path::Path> { self.work_tree.as_deref() } // TODO: tests, respect precomposeUnicode /// The directory of the binary path of the current process. pub fn install_dir(&self) -> std::io::Result { crate::path::install_dir() } /// Returns the relative path which is the components between the working tree and the current working dir (CWD). /// Note that it may be `None` if there is no work tree, or if CWD isn't inside of the working tree directory. /// /// Note that the CWD is obtained once upon instantiation of the repository. // TODO: tests, details - there is a lot about environment variables to change things around. pub fn prefix(&self) -> Result, gix_path::realpath::Error> { let (root, current_dir) = match self.work_dir().zip(self.options.current_dir.as_deref()) { Some((work_dir, cwd)) => (work_dir, cwd), None => return Ok(None), }; let root = gix_path::realpath_opts(root, current_dir, MAX_SYMLINKS)?; Ok(current_dir.strip_prefix(&root).ok()) } /// Return the kind of repository, either bare or one with a work tree. pub fn kind(&self) -> crate::repository::Kind { match self.worktree() { Some(wt) => { if gix_discover::is_submodule_git_dir(self.git_dir()) { crate::repository::Kind::Submodule } else { crate::repository::Kind::WorkTree { is_linked: !wt.is_main(), } } } None => crate::repository::Kind::Bare, } } } gix-0.69.1/src/repository/mailmap.rs000064400000000000000000000073241046102023000155120ustar 00000000000000use crate::config::tree::{Key, Mailmap}; use crate::Id; impl crate::Repository { // TODO: tests /// Similar to [`open_mailmap_into()`][crate::Repository::open_mailmap_into()], but ignores all errors and returns at worst /// an empty mailmap, e.g. if there is no mailmap or if there were errors loading them. /// /// This represents typical usage within git, which also works with what's there without considering a populated mailmap /// a reason to abort an operation, considering it optional. pub fn open_mailmap(&self) -> gix_mailmap::Snapshot { let mut out = gix_mailmap::Snapshot::default(); self.open_mailmap_into(&mut out).ok(); out } // TODO: tests /// Try to merge mailmaps from the following locations into `target`: /// /// - read the `.mailmap` file without following symlinks from the working tree, if present /// - OR read `HEAD:.mailmap` if this repository is bare (i.e. has no working tree), if the `mailmap.blob` is not set. /// - read the mailmap as configured in `mailmap.blob`, if set. /// - read the file as configured by `mailmap.file`, following symlinks, if set. /// /// Only the first error will be reported, and as many source mailmaps will be merged into `target` as possible. /// Parsing errors will be ignored. pub fn open_mailmap_into(&self, target: &mut gix_mailmap::Snapshot) -> Result<(), crate::mailmap::load::Error> { let mut err = None::; let mut buf = Vec::new(); let mut blob_id = self.config.resolved.string(Mailmap::BLOB).and_then(|spec| { self.rev_parse_single(spec.as_ref()) .map_err(|e| err.get_or_insert(e.into())) .map(Id::detach) .ok() }); match self.work_dir() { None => { blob_id = blob_id.or_else(|| { self.head().ok().and_then(|mut head| { let commit = head.peel_to_commit_in_place().ok()?; let tree = commit.tree().ok()?; tree.find_entry(".mailmap").map(|e| e.object_id()) }) }); } Some(root) => { if let Ok(mut file) = gix_features::fs::open_options_no_follow() .read(true) .open(root.join(".mailmap")) .map_err(|e| { if e.kind() != std::io::ErrorKind::NotFound { err.get_or_insert(e.into()); } }) { buf.clear(); std::io::copy(&mut file, &mut buf) .map_err(|e| err.get_or_insert(e.into())) .ok(); target.merge(gix_mailmap::parse_ignore_errors(&buf)); } } } if let Some(blob) = blob_id.and_then(|id| self.find_object(id).map_err(|e| err.get_or_insert(e.into())).ok()) { target.merge(gix_mailmap::parse_ignore_errors(&blob.data)); } let configured_path = self .config_snapshot() .trusted_path(Mailmap::FILE.logical_name().as_str()) .and_then(|res| res.map_err(|e| err.get_or_insert(e.into())).ok()); if let Some(mut file) = configured_path.and_then(|path| std::fs::File::open(path).map_err(|e| err.get_or_insert(e.into())).ok()) { buf.clear(); std::io::copy(&mut file, &mut buf) .map_err(|e| err.get_or_insert(e.into())) .ok(); target.merge(gix_mailmap::parse_ignore_errors(&buf)); } err.map_or(Ok(()), Err) } } gix-0.69.1/src/repository/merge.rs000064400000000000000000000322501046102023000151650ustar 00000000000000use crate::config::cache::util::ApplyLeniencyDefault; use crate::config::tree; use crate::prelude::ObjectIdExt; use crate::repository::{ blob_merge_options, merge_commits, merge_resource_cache, merge_trees, tree_merge_options, virtual_merge_base, virtual_merge_base_with_graph, }; use crate::Repository; use gix_merge::blob::builtin_driver::text; use gix_object::Write; use std::borrow::Cow; /// Merge-utilities impl Repository { /// Create a resource cache that can hold the three resources needed for a three-way merge. `worktree_roots` /// determines which side of the merge is read from the worktree, or from which worktree. /// /// The platform can be used to set up resources and finally perform a merge among blobs. /// /// Note that the current index is used for attribute queries. pub fn merge_resource_cache( &self, worktree_roots: gix_merge::blob::pipeline::WorktreeRoots, ) -> Result { let index = self.index_or_load_from_head_or_empty()?; let mode = { let renormalize = self .config .resolved .boolean(&tree::Merge::RENORMALIZE) .map(|res| { tree::Merge::RENORMALIZE .enrich_error(res) .with_lenient_default(self.config.lenient_config) }) .transpose()? .unwrap_or_default(); if renormalize { gix_merge::blob::pipeline::Mode::Renormalize } else { gix_merge::blob::pipeline::Mode::ToGit } }; let attrs = self .attributes_only( &index, if worktree_roots.is_unset() { gix_worktree::stack::state::attributes::Source::IdMapping } else { gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping }, )? .inner; let filter = gix_filter::Pipeline::new(self.command_context()?, crate::filter::Pipeline::options(self)?); let filter = gix_merge::blob::Pipeline::new(worktree_roots, filter, self.config.merge_pipeline_options()?); let options = gix_merge::blob::platform::Options { default_driver: self.config.resolved.string(&tree::Merge::DEFAULT).map(Cow::into_owned), }; let drivers = self.config.merge_drivers()?; Ok(gix_merge::blob::Platform::new(filter, mode, attrs, drivers, options)) } /// Return options for use with [`gix_merge::blob::PlatformRef::merge()`], accessible through /// [merge_resource_cache()](Self::merge_resource_cache). pub fn blob_merge_options(&self) -> Result { Ok(gix_merge::blob::platform::merge::Options { is_virtual_ancestor: false, resolve_binary_with: None, text: gix_merge::blob::builtin_driver::text::Options { diff_algorithm: self.diff_algorithm()?, conflict: text::Conflict::Keep { style: self .config .resolved .string(&tree::Merge::CONFLICT_STYLE) .map(|value| { tree::Merge::CONFLICT_STYLE .try_into_conflict_style(value) .with_lenient_default(self.config.lenient_config) }) .transpose()? .unwrap_or_default(), marker_size: text::Conflict::DEFAULT_MARKER_SIZE.try_into().unwrap(), }, }, }) } /// Read all relevant configuration options to instantiate options for use in [`merge_trees()`](Self::merge_trees). pub fn tree_merge_options(&self) -> Result { let (mut rewrites, mut is_configured) = crate::diff::utils::new_rewrites_inner( &self.config.resolved, self.config.lenient_config, &tree::Merge::RENAMES, &tree::Merge::RENAME_LIMIT, )?; if !is_configured { (rewrites, is_configured) = crate::diff::utils::new_rewrites(&self.config.resolved, self.config.lenient_config)?; } if !is_configured { rewrites = Some(Default::default()); } Ok(gix_merge::tree::Options { rewrites, blob_merge: self.blob_merge_options()?, blob_merge_command_ctx: self.command_context()?, fail_on_conflict: None, marker_size_multiplier: 0, symlink_conflicts: None, tree_conflicts: None, } .into()) } /// Merge `our_tree` and `their_tree` together, assuming they have the same `ancestor_tree`, to yield a new tree /// which is provided as [tree editor](crate::object::tree::Editor) to inspect and finalize results at will. /// No change to the worktree or index is made, but objects may be written to the object database as merge results /// are stored. /// If these changes should not be observable outside of this instance, consider [enabling object memory](Self::with_object_memory). /// /// Note that `ancestor_tree` can be the [empty tree hash](gix_hash::ObjectId::empty_tree) to indicate no common ancestry. /// /// `labels` are typically chosen to identify the refs or names for `our_tree` and `their_tree` and `ancestor_tree` respectively. /// /// `options` should be initialized with [`tree_merge_options()`](Self::tree_merge_options()). /// /// ### Performance /// /// It's highly recommended to [set an object cache](Repository::compute_object_cache_size_for_tree_diffs) /// to avoid extracting the same object multiple times. pub fn merge_trees( &self, ancestor_tree: impl AsRef, our_tree: impl AsRef, their_tree: impl AsRef, labels: gix_merge::blob::builtin_driver::text::Labels<'_>, options: crate::merge::tree::Options, ) -> Result, merge_trees::Error> { let mut diff_cache = self.diff_resource_cache_for_tree_diff()?; let mut blob_merge = self.merge_resource_cache(Default::default())?; let gix_merge::tree::Outcome { tree, conflicts, failed_on_first_unresolved_conflict, } = gix_merge::tree( ancestor_tree.as_ref(), our_tree.as_ref(), their_tree.as_ref(), labels, self, |buf| self.write_buf(gix_object::Kind::Blob, buf), &mut Default::default(), &mut diff_cache, &mut blob_merge, options.into(), )?; let validate = self.config.protect_options()?; Ok(crate::merge::tree::Outcome { tree: crate::object::tree::Editor { inner: tree, validate, repo: self, }, conflicts, failed_on_first_unresolved_conflict, }) } /// Merge `our_commit` and `their_commit` together to yield a new tree which is provided as [tree editor](crate::object::tree::Editor) /// to inspect and finalize results at will. The merge-base will be determined automatically between both commits, along with special /// handling in case there are multiple merge-bases. /// No change to the worktree or index is made, but objects may be written to the object database as merge results /// are stored. /// If these changes should not be observable outside of this instance, consider [enabling object memory](Self::with_object_memory). /// /// `labels` are typically chosen to identify the refs or names for `our_commit` and `their_commit`, with the ancestor being set /// automatically as part of the merge-base handling. /// /// `options` should be initialized with [`Repository::tree_merge_options().into()`](Self::tree_merge_options()). /// /// ### Performance /// /// It's highly recommended to [set an object cache](Repository::compute_object_cache_size_for_tree_diffs) /// to avoid extracting the same object multiple times. pub fn merge_commits( &self, our_commit: impl Into, their_commit: impl Into, labels: gix_merge::blob::builtin_driver::text::Labels<'_>, options: crate::merge::commit::Options, ) -> Result, merge_commits::Error> { let mut diff_cache = self.diff_resource_cache_for_tree_diff()?; let mut blob_merge = self.merge_resource_cache(Default::default())?; let commit_graph = self.commit_graph_if_enabled()?; let mut graph = self.revision_graph(commit_graph.as_ref()); let gix_merge::commit::Outcome { tree_merge: gix_merge::tree::Outcome { tree, conflicts, failed_on_first_unresolved_conflict, }, merge_base_tree_id, merge_bases, virtual_merge_bases, } = gix_merge::commit( our_commit.into(), their_commit.into(), labels, &mut graph, &mut diff_cache, &mut blob_merge, self, &mut |id| id.to_owned().attach(self).shorten_or_id().to_string(), options.into(), )?; let validate = self.config.protect_options()?; let tree_merge = crate::merge::tree::Outcome { tree: crate::object::tree::Editor { inner: tree, validate, repo: self, }, conflicts, failed_on_first_unresolved_conflict, }; Ok(crate::merge::commit::Outcome { tree_merge, merge_base_tree_id, merge_bases, virtual_merge_bases, }) } /// Create a single virtual merge-base by merging all `merge_bases` into one. /// If the list is empty, an error will be returned as the histories are then unrelated. /// If there is only one commit in the list, it is returned directly with this case clearly marked in the outcome. /// /// Note that most of `options` are overwritten to match the requirements of a merge-base merge, but they can be useful /// to control the diff algorithm or rewrite tracking, for example. /// /// This method is useful in conjunction with [`Self::merge_trees()`], as the ancestor tree can be produced here. // TODO: test pub fn virtual_merge_base( &self, merge_bases: impl IntoIterator>, options: crate::merge::tree::Options, ) -> Result, virtual_merge_base::Error> { let commit_graph = self.commit_graph_if_enabled()?; let mut graph = self.revision_graph(commit_graph.as_ref()); Ok(self.virtual_merge_base_with_graph(merge_bases, &mut graph, options)?) } /// Like [`Self::virtual_merge_base()`], but also allows to reuse a `graph` for faster merge-base calculation, /// particularly if `graph` was used to find the `merge_bases`. pub fn virtual_merge_base_with_graph( &self, merge_bases: impl IntoIterator>, graph: &mut gix_revwalk::Graph<'_, '_, gix_revwalk::graph::Commit>, options: crate::merge::tree::Options, ) -> Result, virtual_merge_base_with_graph::Error> { let mut merge_bases: Vec<_> = merge_bases.into_iter().map(Into::into).collect(); let first = merge_bases .pop() .ok_or(virtual_merge_base_with_graph::Error::MissingCommit)?; let Some(second) = merge_bases.pop() else { let tree_id = self.find_commit(first)?.tree_id()?; let commit_id = first.attach(self); return Ok(crate::merge::virtual_merge_base::Outcome { virtual_merge_bases: Vec::new(), commit_id, tree_id, }); }; let mut diff_cache = self.diff_resource_cache_for_tree_diff()?; let mut blob_merge = self.merge_resource_cache(Default::default())?; let gix_merge::commit::virtual_merge_base::Outcome { virtual_merge_bases, commit_id, tree_id, } = gix_merge::commit::virtual_merge_base( first, second, merge_bases, graph, &mut diff_cache, &mut blob_merge, self, &mut |id| id.to_owned().attach(self).shorten_or_id().to_string(), options.into(), )?; Ok(crate::merge::virtual_merge_base::Outcome { virtual_merge_bases: virtual_merge_bases.into_iter().map(|id| id.attach(self)).collect(), commit_id: commit_id.attach(self), tree_id: tree_id.attach(self), }) } } gix-0.69.1/src/repository/mod.rs000064400000000000000000000363651046102023000146600ustar 00000000000000//! #![allow(clippy::empty_docs)] /// The kind of repository. #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum Kind { /// A submodule worktree, whose `git` repository lives in `.git/modules/**/` of the parent repository. /// /// Note that 'old-form' submodule will register as `Worktree {is_linked: false}`. Submodule, /// A bare repository does not have a work tree, that is files on disk beyond the `git` repository itself. Bare, /// A `git` repository along with a checked out files in a work tree. WorkTree { /// If true, this is the git dir associated with this _linked_ worktree, otherwise it is a repository with _main_ worktree. is_linked: bool, }, } #[cfg(any(feature = "attributes", feature = "excludes"))] pub mod attributes; mod cache; mod config; /// #[cfg(feature = "blob-diff")] mod diff; /// #[cfg(feature = "dirwalk")] mod dirwalk; /// #[cfg(feature = "attributes")] pub mod filter; /// pub mod freelist; mod graph; pub(crate) mod identity; mod impls; #[cfg(feature = "index")] mod index; pub(crate) mod init; mod kind; mod location; #[cfg(feature = "mailmap")] mod mailmap; /// #[cfg(feature = "merge")] mod merge; mod object; #[cfg(feature = "attributes")] mod pathspec; mod reference; mod remote; mod revision; mod shallow; mod state; #[cfg(feature = "attributes")] mod submodule; mod thread_safe; mod worktree; /// #[cfg(feature = "blob-diff")] pub mod diff_tree_to_tree { /// The error returned by [Repository::diff_tree_to_tree()](crate::Repository::diff_tree_to_tree()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] DiffOptions(#[from] crate::diff::options::init::Error), #[error(transparent)] CreateResourceCache(#[from] super::diff_resource_cache::Error), #[error(transparent)] TreeDiff(#[from] gix_diff::tree_with_rewrites::Error), } } /// #[cfg(feature = "merge")] pub mod blob_merge_options { /// The error returned by [Repository::blob_merge_options()](crate::Repository::blob_merge_options()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] DiffAlgorithm(#[from] crate::config::diff::algorithm::Error), #[error(transparent)] ConflictStyle(#[from] crate::config::key::GenericErrorWithValue), } } /// #[cfg(feature = "merge")] pub mod merge_resource_cache { /// The error returned by [Repository::merge_resource_cache()](crate::Repository::merge_resource_cache()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] RenormalizeConfig(#[from] crate::config::boolean::Error), #[error(transparent)] PipelineOptions(#[from] crate::config::merge::pipeline_options::Error), #[error(transparent)] Index(#[from] crate::repository::index_or_load_from_head_or_empty::Error), #[error(transparent)] AttributeStack(#[from] crate::config::attribute_stack::Error), #[error(transparent)] CommandContext(#[from] crate::config::command_context::Error), #[error(transparent)] FilterPipeline(#[from] crate::filter::pipeline::options::Error), #[error(transparent)] DriversConfig(#[from] crate::config::merge::drivers::Error), } } /// #[cfg(feature = "merge")] pub mod merge_trees { /// The error returned by [Repository::merge_trees()](crate::Repository::merge_trees()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] MergeResourceCache(#[from] super::merge_resource_cache::Error), #[error(transparent)] DiffResourceCache(#[from] super::diff_resource_cache::Error), #[error(transparent)] TreeMerge(#[from] gix_merge::tree::Error), #[error(transparent)] ValidationOptions(#[from] crate::config::boolean::Error), } } /// #[cfg(feature = "merge")] pub mod merge_commits { /// The error returned by [Repository::merge_commits()](crate::Repository::merge_commits()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenCommitGraph(#[from] super::commit_graph_if_enabled::Error), #[error(transparent)] MergeResourceCache(#[from] super::merge_resource_cache::Error), #[error(transparent)] DiffResourceCache(#[from] super::diff_resource_cache::Error), #[error(transparent)] CommitMerge(#[from] gix_merge::commit::Error), #[error(transparent)] ValidationOptions(#[from] crate::config::boolean::Error), } } /// #[cfg(feature = "merge")] pub mod virtual_merge_base { /// The error returned by [Repository::virtual_merge_base()](crate::Repository::virtual_merge_base()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenCommitGraph(#[from] super::commit_graph_if_enabled::Error), #[error(transparent)] VirtualMergeBase(#[from] super::virtual_merge_base_with_graph::Error), } } /// #[cfg(feature = "merge")] pub mod virtual_merge_base_with_graph { /// The error returned by [Repository::virtual_merge_base_with_graph()](crate::Repository::virtual_merge_base_with_graph()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("No commit was provided as merge-base")] MissingCommit, #[error(transparent)] MergeResourceCache(#[from] super::merge_resource_cache::Error), #[error(transparent)] DiffResourceCache(#[from] super::diff_resource_cache::Error), #[error(transparent)] CommitMerge(#[from] gix_merge::commit::Error), #[error(transparent)] FindCommit(#[from] crate::object::find::existing::with_conversion::Error), #[error(transparent)] DecodeCommit(#[from] gix_object::decode::Error), } } /// #[cfg(feature = "revision")] pub mod merge_base_octopus_with_graph { /// The error returned by [Repository::merge_base_octopus_with_graph()](crate::Repository::merge_base_octopus_with_graph()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("No commit was provided")] MissingCommit, #[error("No merge base was found between the given commits")] NoMergeBase, #[error(transparent)] MergeBase(#[from] gix_revision::merge_base::Error), } } /// #[cfg(feature = "revision")] pub mod merge_base_octopus { /// The error returned by [Repository::merge_base_octopus()](crate::Repository::merge_base_octopus()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenCache(#[from] crate::repository::commit_graph_if_enabled::Error), #[error(transparent)] MergeBaseOctopus(#[from] super::merge_base_octopus_with_graph::Error), } } /// #[cfg(feature = "merge")] pub mod tree_merge_options { /// The error returned by [Repository::tree_merge_options()](crate::Repository::tree_merge_options()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] BlobMergeOptions(#[from] super::blob_merge_options::Error), #[error(transparent)] RewritesConfig(#[from] crate::diff::new_rewrites::Error), #[error(transparent)] CommandContext(#[from] crate::config::command_context::Error), } } /// #[cfg(feature = "blob-diff")] pub mod diff_resource_cache { /// The error returned by [Repository::diff_resource_cache()](crate::Repository::diff_resource_cache()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not obtain resource cache for diffing")] ResourceCache(#[from] crate::diff::resource_cache::Error), #[error(transparent)] Index(#[from] crate::repository::index_or_load_from_head_or_empty::Error), #[error(transparent)] AttributeStack(#[from] crate::config::attribute_stack::Error), } } /// #[cfg(feature = "tree-editor")] pub mod edit_tree { /// The error returned by [Repository::edit_tree()](crate::Repository::edit_tree). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindTree(#[from] crate::object::find::existing::with_conversion::Error), #[error(transparent)] InitEditor(#[from] crate::object::tree::editor::init::Error), } } /// #[cfg(feature = "revision")] pub mod merge_base { /// The error returned by [Repository::merge_base()](crate::Repository::merge_base()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenCache(#[from] crate::repository::commit_graph_if_enabled::Error), #[error(transparent)] FindMergeBase(#[from] gix_revision::merge_base::Error), #[error("Could not find a merge-base between commits {first} and {second}")] NotFound { first: gix_hash::ObjectId, second: gix_hash::ObjectId, }, } } /// #[cfg(feature = "revision")] pub mod merge_base_with_graph { /// The error returned by [Repository::merge_base_with_cache()](crate::Repository::merge_base_with_graph()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindMergeBase(#[from] gix_revision::merge_base::Error), #[error("Could not find a merge-base between commits {first} and {second}")] NotFound { first: gix_hash::ObjectId, second: gix_hash::ObjectId, }, } } /// pub mod commit_graph_if_enabled { /// The error returned by [Repository::commit_graph_if_enabled()](crate::Repository::commit_graph_if_enabled()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigBoolean(#[from] crate::config::boolean::Error), #[error(transparent)] OpenCommitGraph(#[from] gix_commitgraph::init::Error), } } /// #[cfg(feature = "index")] pub mod index_from_tree { /// The error returned by [Repository::index_from_tree()](crate::Repository::index_from_tree). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not create index from tree at {id}")] IndexFromTree { id: gix_hash::ObjectId, source: gix_index::init::from_tree::Error, }, #[error("Couldn't obtain configuration for core.protect*")] BooleanConfig(#[from] crate::config::boolean::Error), } } /// pub mod branch_remote_ref_name { /// The error returned by [Repository::branch_remote_ref_name()](crate::Repository::branch_remote_ref_name()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The configured name of the remote ref to merge wasn't valid")] ValidateFetchRemoteRefName(#[from] gix_validate::reference::name::Error), #[error(transparent)] PushDefault(#[from] crate::config::key::GenericErrorWithValue), #[error(transparent)] FindPushRemote(#[from] crate::remote::find::existing::Error), } } /// pub mod branch_remote_tracking_ref_name { /// The error returned by [Repository::branch_remote_tracking_ref_name()](crate::Repository::branch_remote_tracking_ref_name()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("The name of the tracking reference was invalid")] ValidateTrackingRef(#[from] gix_validate::reference::name::Error), #[error("Could not get the remote reference to translate into the local tracking branch")] RemoteRef(#[from] super::branch_remote_ref_name::Error), #[error("Couldn't find remote to obtain fetch-specs for mapping to the tracking reference")] FindRemote(#[from] crate::remote::find::existing::Error), } } /// #[cfg(feature = "attributes")] pub mod pathspec_defaults_ignore_case { /// The error returned by [Repository::pathspec_defaults_ignore_case()](crate::Repository::pathspec_defaults_inherit_ignore_case()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Filesystem configuration could not be obtained to learn about case sensitivity")] FilesystemConfig(#[from] crate::config::boolean::Error), #[error(transparent)] Defaults(#[from] gix_pathspec::defaults::from_environment::Error), } } /// #[cfg(feature = "index")] pub mod index_or_load_from_head { /// The error returned by [`Repository::index_or_load_from_head()`](crate::Repository::index_or_load_from_head()). #[derive(thiserror::Error, Debug)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] HeadCommit(#[from] crate::reference::head_commit::Error), #[error(transparent)] TreeId(#[from] gix_object::decode::Error), #[error(transparent)] TraverseTree(#[from] crate::repository::index_from_tree::Error), #[error(transparent)] OpenIndex(#[from] crate::worktree::open_index::Error), } } /// #[cfg(feature = "index")] pub mod index_or_load_from_head_or_empty { /// The error returned by [`Repository::index_or_load_from_head_or_empty()`](crate::Repository::index_or_load_from_head_or_empty()). #[derive(thiserror::Error, Debug)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ReadHead(#[from] crate::reference::find::existing::Error), #[error(transparent)] FindCommit(#[from] crate::object::find::existing::Error), #[error(transparent)] PeelToTree(#[from] crate::object::peel::to_kind::Error), #[error(transparent)] TreeId(#[from] gix_object::decode::Error), #[error(transparent)] TraverseTree(#[from] crate::repository::index_from_tree::Error), #[error(transparent)] OpenIndex(#[from] crate::worktree::open_index::Error), } } /// #[cfg(feature = "worktree-stream")] pub mod worktree_stream { /// The error returned by [`Repository::worktree_stream()`](crate::Repository::worktree_stream()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] FindTree(#[from] crate::object::find::existing::Error), #[error(transparent)] OpenTree(#[from] crate::repository::index_from_tree::Error), #[error(transparent)] AttributesCache(#[from] crate::config::attribute_stack::Error), #[error(transparent)] FilterPipeline(#[from] crate::filter::pipeline::options::Error), #[error(transparent)] CommandContext(#[from] crate::config::command_context::Error), #[error("Needed {id} to be a tree to turn into a workspace stream, got {actual}")] NotATree { id: gix_hash::ObjectId, actual: gix_object::Kind, }, } } /// #[cfg(feature = "worktree-archive")] pub mod worktree_archive { /// The error returned by [`Repository::worktree_archive()`](crate::Repository::worktree_archive()). pub type Error = gix_archive::Error; } gix-0.69.1/src/repository/object.rs000064400000000000000000000367131046102023000153440ustar 00000000000000#![allow(clippy::result_large_err)] use std::ops::DerefMut; use gix_hash::ObjectId; use gix_object::{Exists, Find, FindExt, Write}; use gix_odb::{Header, HeaderExt}; use gix_ref::{ transaction::{LogChange, PreviousValue, RefLog}, FullName, }; use smallvec::SmallVec; use crate::{commit, ext::ObjectIdExt, object, tag, Blob, Commit, Id, Object, Reference, Tag, Tree}; /// Tree editing #[cfg(feature = "tree-editor")] impl crate::Repository { /// Return an editor for adjusting the tree at `id`. /// /// This can be the [empty tree id](ObjectId::empty_tree) to build a tree from scratch. #[doc(alias = "treebuilder", alias = "git2")] pub fn edit_tree( &self, id: impl Into, ) -> Result, crate::repository::edit_tree::Error> { let tree = self.find_tree(id)?; Ok(tree.edit()?) } } /// Find objects of various kins impl crate::Repository { /// Find the object with `id` in the object database or return an error if it could not be found. /// /// There are various legitimate reasons for an object to not be present, which is why /// [`try_find_object(…)`][crate::Repository::try_find_object()] might be preferable instead. /// /// # Performance Note /// /// In order to get the kind of the object, is must be fully decoded from storage if it is packed with deltas. /// Loose object could be partially decoded, even though that's not implemented. pub fn find_object(&self, id: impl Into) -> Result, object::find::existing::Error> { let id = id.into(); if id == ObjectId::empty_tree(self.object_hash()) { return Ok(Object { id, kind: gix_object::Kind::Tree, data: Vec::new(), repo: self, }); } let mut buf = self.free_buf(); let kind = self.objects.find(&id, &mut buf)?.kind; Ok(Object::from_data(id, kind, buf, self)) } /// Find a commit with `id` or fail if there was no object or the object wasn't a commit. pub fn find_commit( &self, id: impl Into, ) -> Result, object::find::existing::with_conversion::Error> { Ok(self.find_object(id)?.try_into_commit()?) } /// Find a tree with `id` or fail if there was no object or the object wasn't a tree. pub fn find_tree( &self, id: impl Into, ) -> Result, object::find::existing::with_conversion::Error> { Ok(self.find_object(id)?.try_into_tree()?) } /// Find an annotated tag with `id` or fail if there was no object or the object wasn't a tag. pub fn find_tag(&self, id: impl Into) -> Result, object::find::existing::with_conversion::Error> { Ok(self.find_object(id)?.try_into_tag()?) } /// Find a blob with `id` or fail if there was no object or the object wasn't a blob. pub fn find_blob( &self, id: impl Into, ) -> Result, object::find::existing::with_conversion::Error> { Ok(self.find_object(id)?.try_into_blob()?) } /// Obtain information about an object without fully decoding it, or fail if the object doesn't exist. /// /// Note that despite being cheaper than [`Self::find_object()`], there is still some effort traversing delta-chains. #[doc(alias = "read_header", alias = "git2")] pub fn find_header(&self, id: impl Into) -> Result { let id = id.into(); if id == ObjectId::empty_tree(self.object_hash()) { return Ok(gix_odb::find::Header::Loose { kind: gix_object::Kind::Tree, size: 0, }); } self.objects.header(id) } /// Return `true` if `id` exists in the object database. /// /// # Performance /// /// This method can be slow if the underlying [object database](crate::Repository::objects) has /// an unsuitable [RefreshMode](gix_odb::store::RefreshMode) and `id` is not likely to exist. /// Use [`repo.objects.refresh_never()`](gix_odb::store::Handle::refresh_never) to avoid expensive /// IO-bound refreshes if an object wasn't found. #[doc(alias = "exists", alias = "git2")] pub fn has_object(&self, id: impl AsRef) -> bool { let id = id.as_ref(); if id.to_owned().is_empty_tree() { true } else { self.objects.exists(id) } } /// Obtain information about an object without fully decoding it, or `None` if the object doesn't exist. /// /// Note that despite being cheaper than [`Self::try_find_object()`], there is still some effort traversing delta-chains. pub fn try_find_header( &self, id: impl Into, ) -> Result, object::find::Error> { let id = id.into(); if id == ObjectId::empty_tree(self.object_hash()) { return Ok(Some(gix_odb::find::Header::Loose { kind: gix_object::Kind::Tree, size: 0, })); } self.objects.try_header(&id).map_err(Into::into) } /// Try to find the object with `id` or return `None` if it wasn't found. pub fn try_find_object(&self, id: impl Into) -> Result>, object::find::Error> { let id = id.into(); if id == ObjectId::empty_tree(self.object_hash()) { return Ok(Some(Object { id, kind: gix_object::Kind::Tree, data: Vec::new(), repo: self, })); } let mut buf = self.free_buf(); match self.objects.try_find(&id, &mut buf)? { Some(obj) => { let kind = obj.kind; Ok(Some(Object::from_data(id, kind, buf, self))) } None => Ok(None), } } } /// Write objects of any type. impl crate::Repository { /// Write the given object into the object database and return its object id. /// /// Note that we hash the object in memory to avoid storing objects that are already present. That way, /// we avoid writing duplicate objects using slow disks that will eventually have to be garbage collected. pub fn write_object(&self, object: impl gix_object::WriteTo) -> Result, object::write::Error> { let mut buf = self.empty_reusable_buffer(); object .write_to(buf.deref_mut()) .map_err(|err| Box::new(err) as Box)?; self.write_object_inner(&buf, object.kind()) } fn write_object_inner(&self, buf: &[u8], kind: gix_object::Kind) -> Result, object::write::Error> { let oid = gix_object::compute_hash(self.object_hash(), kind, buf); if self.objects.exists(&oid) { return Ok(oid.attach(self)); } self.objects .write_buf(kind, buf) .map(|oid| oid.attach(self)) .map_err(Into::into) } /// Write a blob from the given `bytes`. /// /// We avoid writing duplicate objects to slow disks that will eventually have to be garbage collected by /// pre-hashing the data, and checking if the object is already present. pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result, object::write::Error> { let bytes = bytes.as_ref(); let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, bytes); if self.objects.exists(&oid) { return Ok(oid.attach(self)); } self.objects .write_buf(gix_object::Kind::Blob, bytes) .map_err(Into::into) .map(|oid| oid.attach(self)) } /// Write a blob from the given `Read` implementation. /// /// Note that we hash the object in memory to avoid storing objects that are already present. That way, /// we avoid writing duplicate objects using slow disks that will eventually have to be garbage collected. /// /// If that is prohibitive, use the object database directly. pub fn write_blob_stream( &self, mut bytes: impl std::io::Read + std::io::Seek, ) -> Result, object::write::Error> { let mut buf = self.empty_reusable_buffer(); std::io::copy(&mut bytes, buf.deref_mut()).expect("write to memory works"); self.write_blob_stream_inner(&buf) } fn write_blob_stream_inner(&self, buf: &[u8]) -> Result, object::write::Error> { let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, buf); if self.objects.exists(&oid) { return Ok(oid.attach(self)); } self.objects .write_buf(gix_object::Kind::Blob, buf) .map_err(Into::into) .map(|oid| oid.attach(self)) } } /// Create commits and tags impl crate::Repository { /// Create a tag reference named `name` (without `refs/tags/` prefix) pointing to a newly created tag object /// which in turn points to `target` and return the newly created reference. /// /// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist] /// or to [force overwriting a possibly existing tag](PreviousValue::Any). pub fn tag( &self, name: impl AsRef, target: impl AsRef, target_kind: gix_object::Kind, tagger: Option>, message: impl AsRef, constraint: PreviousValue, ) -> Result, tag::Error> { let tag = gix_object::Tag { target: target.as_ref().into(), target_kind, name: name.as_ref().into(), tagger: tagger.map(|t| t.to_owned()), message: message.as_ref().into(), pgp_signature: None, }; let tag_id = self.write_object(&tag)?; self.tag_reference(name, tag_id, constraint).map_err(Into::into) } /// Similar to [`commit(…)`][crate::Repository::commit()], but allows to create the commit with `committer` and `author` specified. /// /// This forces setting the commit time and author time by hand. Note that typically, committer and author are the same. pub fn commit_as<'a, 'c, Name, E>( &self, committer: impl Into>, author: impl Into>, reference: Name, message: impl AsRef, tree: impl Into, parents: impl IntoIterator>, ) -> Result, commit::Error> where Name: TryInto, commit::Error: From, { self.commit_as_inner( committer.into(), author.into(), reference.try_into()?, message.as_ref(), tree.into(), parents.into_iter().map(Into::into).collect(), ) } fn commit_as_inner( &self, committer: gix_actor::SignatureRef<'_>, author: gix_actor::SignatureRef<'_>, reference: FullName, message: &str, tree: ObjectId, parents: SmallVec<[ObjectId; 1]>, ) -> Result, commit::Error> { use gix_ref::{ transaction::{Change, RefEdit}, Target, }; // TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway. // This can be made vastly more efficient though if we wanted to, so we lie in the API let commit = gix_object::Commit { message: message.into(), tree, author: author.into(), committer: committer.into(), encoding: None, parents, extra_headers: Default::default(), }; let commit_id = self.write_object(&commit)?; self.edit_reference(RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: crate::reference::log::message("commit", commit.message.as_ref(), commit.parents.len()), }, expected: match commit.parents.first().map(|p| Target::Object(*p)) { Some(previous) => { if reference.as_bstr() == "HEAD" { PreviousValue::MustExistAndMatch(previous) } else { PreviousValue::ExistingMustMatch(previous) } } None => PreviousValue::MustNotExist, }, new: Target::Object(commit_id.inner), }, name: reference, deref: true, })?; Ok(commit_id) } /// Create a new commit object with `message` referring to `tree` with `parents`, and point `reference` /// to it. The commit is written without message encoding field, which can be assumed to be UTF-8. /// `author` and `committer` fields are pre-set from the configuration, which can be altered /// [temporarily][crate::Repository::config_snapshot_mut()] before the call if required. /// /// `reference` will be created if it doesn't exist, and can be `"HEAD"` to automatically write-through to the symbolic reference /// that `HEAD` points to if it is not detached. For this reason, detached head states cannot be created unless the `HEAD` is detached /// already. The reflog will be written as canonical git would do, like ` ():

`. /// /// The first parent id in `parents` is expected to be the current target of `reference` and the operation will fail if it is not. /// If there is no parent, the `reference` is expected to not exist yet. /// /// The method fails immediately if a `reference` lock can't be acquired. pub fn commit( &self, reference: Name, message: impl AsRef, tree: impl Into, parents: impl IntoIterator>, ) -> Result, commit::Error> where Name: TryInto, commit::Error: From, { let author = self.author().ok_or(commit::Error::AuthorMissing)??; let committer = self.committer().ok_or(commit::Error::CommitterMissing)??; self.commit_as(committer, author, reference, message, tree, parents) } /// Return an empty tree object, suitable for [getting changes](Tree::changes()). /// /// Note that the returned object is special and doesn't necessarily physically exist in the object database. /// This means that this object can be used in an uninitialized, empty repository which would report to have no objects at all. pub fn empty_tree(&self) -> Tree<'_> { self.find_object(ObjectId::empty_tree(self.object_hash())) .expect("always present") .into_tree() } /// Return an empty blob object. /// /// Note that the returned object is special and doesn't necessarily physically exist in the object database. /// This means that this object can be used in an uninitialized, empty repository which would report to have no objects at all. pub fn empty_blob(&self) -> Blob<'_> { Blob { id: gix_hash::ObjectId::empty_blob(self.object_hash()), data: Vec::new(), repo: self, } } } gix-0.69.1/src/repository/pathspec.rs000064400000000000000000000060321046102023000156740ustar 00000000000000use gix_pathspec::MagicSignature; use crate::{bstr::BStr, config::cache::util::ApplyLeniencyDefault, AttributeStack, Pathspec, Repository}; impl Repository { /// Create a new pathspec abstraction that allows to conduct searches using `patterns`. /// `inherit_ignore_case` should be `true` if `patterns` will match against files on disk, or `false` otherwise, for more natural matching /// (but also note that `git` does not do that). /// `index` may be needed to load attributes which is required only if `patterns` refer to attributes via `:(attr:…)` syntax. /// In the same vein, `attributes_source` affects where `.gitattributes` files are read from if pathspecs need to match against attributes. /// If `empty_patterns_match_prefix` is `true`, then even empty patterns will match only what's inside of the prefix. Otherwise /// they will match everything. /// /// It will be initialized exactly how it would, and attribute matching will be conducted by reading the worktree first if available. /// If that is not desirable, consider calling [`Pathspec::new()`] directly. #[doc(alias = "Pathspec", alias = "git2")] pub fn pathspec( &self, empty_patterns_match_prefix: bool, patterns: impl IntoIterator>, inherit_ignore_case: bool, index: &gix_index::State, attributes_source: gix_worktree::stack::state::attributes::Source, ) -> Result, crate::pathspec::init::Error> { Pathspec::new(self, empty_patterns_match_prefix, patterns, inherit_ignore_case, || { self.attributes_only(index, attributes_source) .map(AttributeStack::detach) .map_err(Into::into) }) } /// Return default settings that are required when [parsing pathspecs](gix_pathspec::parse()) by hand. /// /// These are stemming from environment variables which have been converted to [config settings](crate::config::tree::gitoxide::Pathspec), /// which now serve as authority for configuration. pub fn pathspec_defaults(&self) -> Result { self.config.pathspec_defaults() } /// Similar to [Self::pathspec_defaults()], but will automatically configure the returned defaults to match case-insensitively if the underlying /// filesystem is also configured to be case-insensitive according to `core.ignoreCase`, and `inherit_ignore_case` is `true`. pub fn pathspec_defaults_inherit_ignore_case( &self, inherit_ignore_case: bool, ) -> Result { let mut defaults = self.config.pathspec_defaults()?; if inherit_ignore_case && self .config .fs_capabilities() .with_lenient_default(self.config.lenient_config)? .ignore_case { defaults.signature |= MagicSignature::ICASE; } Ok(defaults) } } gix-0.69.1/src/repository/permissions.rs000064400000000000000000000166441046102023000164520ustar 00000000000000//! Various permissions to define what can be done when operating a [`Repository`][crate::Repository]. use crate::open::Permissions; use gix_sec::Trust; /// Configure from which sources git configuration may be loaded. /// /// Note that configuration from inside of the repository is always loaded as it's definitely required for correctness. #[derive(Copy, Clone, Ord, PartialOrd, PartialEq, Eq, Debug, Hash)] pub struct Config { /// The git binary may come with configuration as part of its configuration, and if this is true (default false) /// we will load the configuration of the git binary, if present and not a duplicate of the ones below. /// /// It's disabled by default as it may involve executing the git binary once per execution of the application. pub git_binary: bool, /// Whether to use the system configuration. /// This is defined as `$(prefix)/etc/gitconfig` on unix. pub system: bool, /// Whether to use the git application configuration. /// /// A platform defined location for where a user's git application configuration should be located. /// If `$XDG_CONFIG_HOME` is not set or empty, `$HOME/.config/git/config` will be used /// on unix. pub git: bool, /// Whether to use the user configuration. /// This is usually `~/.gitconfig` on unix. pub user: bool, /// Whether to use the configuration from environment variables. pub env: bool, /// Whether to follow include files are encountered in loaded configuration, /// via `include` and `includeIf` sections. pub includes: bool, } impl Config { /// Allow everything which usually relates to a fully trusted environment pub fn all() -> Self { Config { git_binary: false, system: true, git: true, user: true, env: true, includes: true, } } /// Load only configuration local to the git repository. pub fn isolated() -> Self { Config { git_binary: false, system: false, git: false, user: false, env: false, includes: false, } } } impl Default for Config { fn default() -> Self { Self::all() } } /// Configure from which `gitattribute` files may be loaded. /// /// Note that `.gitattribute` files from within the repository are always loaded. #[derive(Copy, Clone, Ord, PartialOrd, PartialEq, Eq, Debug, Hash)] pub struct Attributes { /// The git binary may come with attribute configuration in its installation directory, and if this is true (default false) /// we will load the configuration of the git binary. /// /// It's disabled by default as it involves executing the git binary once per execution of the application. pub git_binary: bool, /// Whether to use the system configuration. /// This is typically defined as `$(prefix)/etc/gitconfig`. pub system: bool, /// Whether to use the git application configuration. /// /// A platform defined location for where a user's git application configuration should be located. /// If `$XDG_CONFIG_HOME` is not set or empty, `$HOME/.config/git/attributes` will be used /// on unix. pub git: bool, } impl Attributes { /// Allow everything which usually relates to a fully trusted environment pub fn all() -> Self { Attributes { git_binary: false, system: true, git: true, } } /// Allow loading attributes that are local to the git repository. pub fn isolated() -> Self { Attributes { git_binary: false, system: false, git: false, } } } impl Default for Attributes { fn default() -> Self { Self::all() } } /// Permissions related to the usage of environment variables #[derive(Debug, Clone, Copy)] pub struct Environment { /// Control whether resources pointed to by `XDG_CONFIG_HOME` can be used when looking up common configuration values. /// /// Note that [`gix_sec::Permission::Forbid`] will cause the operation to abort if a resource is set via the XDG config environment. pub xdg_config_home: gix_sec::Permission, /// Control the way resources pointed to by the home directory (similar to `xdg_config_home`) may be used. pub home: gix_sec::Permission, /// Control if environment variables to configure the HTTP transport, like `http_proxy` may be used. /// /// Note that http-transport related environment variables prefixed with `GIT_` may also be included here /// if they match this category like `GIT_HTTP_USER_AGENT`. pub http_transport: gix_sec::Permission, /// Control if the `EMAIL` environment variables may be read. /// /// Note that identity related environment variables prefixed with `GIT_` may also be included here /// if they match this category. pub identity: gix_sec::Permission, /// Control if environment variables related to the object database are handled. This includes features and performance /// options alike. pub objects: gix_sec::Permission, /// Control if resources pointed to by `GIT_*` prefixed environment variables can be used, **but only** if they /// are not contained in any other category. This is a catch-all section. pub git_prefix: gix_sec::Permission, /// Control if resources pointed to by `SSH_*` prefixed environment variables can be used (like `SSH_ASKPASS`) pub ssh_prefix: gix_sec::Permission, } impl Environment { /// Allow access to the entire environment. pub fn all() -> Self { let allow = gix_sec::Permission::Allow; Environment { xdg_config_home: allow, home: allow, git_prefix: allow, ssh_prefix: allow, http_transport: allow, identity: allow, objects: allow, } } /// Don't allow loading any environment variables. pub fn isolated() -> Self { let deny = gix_sec::Permission::Deny; Environment { xdg_config_home: deny, home: deny, ssh_prefix: deny, git_prefix: deny, http_transport: deny, identity: deny, objects: deny, } } } impl Permissions { /// Secure permissions are similar to `all()` pub fn secure() -> Self { Permissions { env: Environment::all(), config: Config::all(), attributes: Attributes::all(), } } /// Everything is allowed with this set of permissions, thus we read all configuration and do what git typically /// does with owned repositories. pub fn all() -> Self { Permissions { env: Environment::all(), config: Config::all(), attributes: Attributes::all(), } } /// Don't read any but the local git configuration and deny reading any environment variables. pub fn isolated() -> Self { Permissions { config: Config::isolated(), attributes: Attributes::isolated(), env: Environment::isolated(), } } } impl gix_sec::trust::DefaultForLevel for Permissions { fn default_for_level(level: Trust) -> Self { match level { Trust::Full => Permissions::all(), Trust::Reduced => Permissions::secure(), } } } impl Default for Permissions { fn default() -> Self { Permissions::secure() } } gix-0.69.1/src/repository/reference.rs000064400000000000000000000276711046102023000160370ustar 00000000000000use gix_hash::ObjectId; use gix_ref::{ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog}, FullName, PartialNameRef, Target, }; use crate::{bstr::BString, ext::ReferenceExt, reference, Reference}; /// Obtain and alter references comfortably impl crate::Repository { /// Create a lightweight tag with given `name` (and without `refs/tags/` prefix) pointing to the given `target`, and return it as reference. /// /// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist] /// or to [force overwriting a possibly existing tag](PreviousValue::Any). pub fn tag_reference( &self, name: impl AsRef, target: impl Into, constraint: PreviousValue, ) -> Result, reference::edit::Error> { let id = target.into(); let mut edits = self.edit_reference(RefEdit { change: Change::Update { log: Default::default(), expected: constraint, new: Target::Object(id), }, name: format!("refs/tags/{}", name.as_ref()).try_into()?, deref: false, })?; assert_eq!(edits.len(), 1, "reference splits should ever happen"); let edit = edits.pop().expect("exactly one item"); Ok(Reference { inner: gix_ref::Reference { name: edit.name, target: id.into(), peeled: None, }, repo: self, }) } /// Returns the currently set namespace for references, or `None` if it is not set. /// /// Namespaces allow to partition references, and is configured per `Easy`. pub fn namespace(&self) -> Option<&gix_ref::Namespace> { self.refs.namespace.as_ref() } /// Remove the currently set reference namespace and return it, affecting only this `Easy`. pub fn clear_namespace(&mut self) -> Option { self.refs.namespace.take() } /// Set the reference namespace to the given value, like `"foo"` or `"foo/bar"`. /// /// Note that this value is shared across all `Easy…` instances as the value is stored in the shared `Repository`. pub fn set_namespace<'a, Name, E>( &mut self, namespace: Name, ) -> Result, gix_validate::reference::name::Error> where Name: TryInto<&'a PartialNameRef, Error = E>, gix_validate::reference::name::Error: From, { let namespace = gix_ref::namespace::expand(namespace)?; Ok(self.refs.namespace.replace(namespace)) } // TODO: more tests or usage /// Create a new reference with `name`, like `refs/heads/branch`, pointing to `target`, adhering to `constraint` /// during creation and writing `log_message` into the reflog. Note that a ref-log will be written even if `log_message` is empty. /// /// The newly created Reference is returned. pub fn reference( &self, name: Name, target: impl Into, constraint: PreviousValue, log_message: impl Into, ) -> Result, reference::edit::Error> where Name: TryInto, gix_validate::reference::name::Error: From, { self.reference_inner( name.try_into().map_err(gix_validate::reference::name::Error::from)?, target.into(), constraint, log_message.into(), ) } fn reference_inner( &self, name: FullName, id: ObjectId, constraint: PreviousValue, log_message: BString, ) -> Result, reference::edit::Error> { let mut edits = self.edit_reference(RefEdit { change: Change::Update { log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, message: log_message, }, expected: constraint, new: Target::Object(id), }, name, deref: false, })?; assert_eq!( edits.len(), 1, "only one reference can be created, splits aren't possible" ); Ok(gix_ref::Reference { name: edits.pop().expect("exactly one edit").name, target: Target::Object(id), peeled: None, } .attach(self)) } /// Edit a single reference as described in `edit`, and write reference logs as `log_committer`. /// /// One or more `RefEdit`s are returned - symbolic reference splits can cause more edits to be performed. All edits have the previous /// reference values set to the ones encountered at rest after acquiring the respective reference's lock. pub fn edit_reference(&self, edit: RefEdit) -> Result, reference::edit::Error> { self.edit_references(Some(edit)) } /// Edit one or more references as described by their `edits`. /// Note that one can set the committer name for use in the ref-log by temporarily /// [overriding the git-config][crate::Repository::config_snapshot_mut()]. /// /// Returns all reference edits, which might be more than where provided due the splitting of symbolic references, and /// whose previous (_old_) values are the ones seen on in storage after the reference was locked. pub fn edit_references( &self, edits: impl IntoIterator, ) -> Result, reference::edit::Error> { let (file_lock_fail, packed_refs_lock_fail) = self.config.lock_timeout()?; self.refs .transaction() .prepare(edits, file_lock_fail, packed_refs_lock_fail)? .commit(self.committer().transpose()?) .map_err(Into::into) } /// Return the repository head, an abstraction to help dealing with the `HEAD` reference. /// /// The `HEAD` reference can be in various states, for more information, the documentation of [`Head`][crate::Head]. pub fn head(&self) -> Result, reference::find::existing::Error> { let head = self.find_reference("HEAD")?; Ok(match head.inner.target { Target::Symbolic(branch) => match self.find_reference(&branch) { Ok(r) => crate::head::Kind::Symbolic(r.detach()), Err(reference::find::existing::Error::NotFound { .. }) => crate::head::Kind::Unborn(branch), Err(err) => return Err(err), }, Target::Object(target) => crate::head::Kind::Detached { target, peeled: head.inner.peeled, }, } .attach(self)) } /// Resolve the `HEAD` reference, follow and peel its target and obtain its object id, /// following symbolic references and tags until a commit is found. /// /// Note that this may fail for various reasons, most notably because the repository /// is freshly initialized and doesn't have any commits yet. /// /// Also note that the returned id is likely to point to a commit, but could also /// point to a tree or blob. It won't, however, point to a tag as these are always peeled. pub fn head_id(&self) -> Result, reference::head_id::Error> { Ok(self.head()?.into_peeled_id()?) } /// Return the name to the symbolic reference `HEAD` points to, or `None` if the head is detached. /// /// The difference to [`head_ref()`][Self::head_ref()] is that the latter requires the reference to exist, /// whereas here we merely return a the name of the possibly unborn reference. pub fn head_name(&self) -> Result, reference::find::existing::Error> { Ok(self.head()?.referent_name().map(std::borrow::ToOwned::to_owned)) } /// Return the reference that `HEAD` points to, or `None` if the head is detached or unborn. pub fn head_ref(&self) -> Result>, reference::find::existing::Error> { Ok(self.head()?.try_into_referent()) } /// Return the commit object the `HEAD` reference currently points to after peeling it fully, /// following symbolic references and tags until a commit is found. /// /// Note that this may fail for various reasons, most notably because the repository /// is freshly initialized and doesn't have any commits yet. It could also fail if the /// head does not point to a commit. pub fn head_commit(&self) -> Result, reference::head_commit::Error> { Ok(self.head()?.peel_to_commit_in_place()?) } /// Return the tree id the `HEAD` reference currently points to after peeling it fully, /// following symbolic references and tags until a commit is found. /// /// Note that this may fail for various reasons, most notably because the repository /// is freshly initialized and doesn't have any commits yet. It could also fail if the /// head does not point to a commit. pub fn head_tree_id(&self) -> Result, reference::head_tree_id::Error> { Ok(self.head_commit()?.tree_id()?) } /// Return the tree object the `HEAD^{tree}` reference currently points to after peeling it fully, /// following symbolic references and tags until a tree is found. /// /// Note that this may fail for various reasons, most notably because the repository /// is freshly initialized and doesn't have any commits yet. It could also fail if the /// head does not point to a tree, unlikely but possible. pub fn head_tree(&self) -> Result, reference::head_tree::Error> { Ok(self.head_commit()?.tree()?) } /// Find the reference with the given partial or full `name`, like `main`, `HEAD`, `heads/branch` or `origin/other`, /// or return an error if it wasn't found. /// /// Consider [`try_find_reference(…)`][crate::Repository::try_find_reference()] if the reference might not exist /// without that being considered an error. pub fn find_reference<'a, Name, E>(&self, name: Name) -> Result, reference::find::existing::Error> where Name: TryInto<&'a PartialNameRef, Error = E> + Clone, gix_ref::file::find::Error: From, { // TODO: is there a way to just pass `partial_name` to `try_find_reference()`? Compiler freaks out then // as it still wants to see `E` there, not `Infallible`. let partial_name = name .clone() .try_into() .map_err(|err| reference::find::Error::Find(gix_ref::file::find::Error::from(err)))?; self.try_find_reference(name)? .ok_or_else(|| reference::find::existing::Error::NotFound { name: partial_name.to_owned(), }) } /// Return a platform for iterating references. /// /// Common kinds of iteration are [all][crate::reference::iter::Platform::all()] or [prefixed][crate::reference::iter::Platform::prefixed()] /// references. pub fn references(&self) -> Result, reference::iter::Error> { Ok(reference::iter::Platform { platform: self.refs.iter()?, repo: self, }) } /// Try to find the reference named `name`, like `main`, `heads/branch`, `HEAD` or `origin/other`, and return it. /// /// Otherwise return `None` if the reference wasn't found. /// If the reference is expected to exist, use [`find_reference()`][crate::Repository::find_reference()]. pub fn try_find_reference<'a, Name, E>(&self, name: Name) -> Result>, reference::find::Error> where Name: TryInto<&'a PartialNameRef, Error = E>, gix_ref::file::find::Error: From, { match self.refs.try_find(name) { Ok(r) => match r { Some(r) => Ok(Some(Reference::from_ref(r, self))), None => Ok(None), }, Err(err) => Err(err.into()), } } } gix-0.69.1/src/repository/remote.rs000064400000000000000000000227251046102023000153670ustar 00000000000000#![allow(clippy::result_large_err)] use crate::{bstr::BStr, config, remote, remote::find, Remote}; impl crate::Repository { /// Create a new remote available at the given `url`. /// /// It's configured to fetch included tags by default, similar to git. /// See [`with_fetch_tags(…)`][Remote::with_fetch_tags()] for a way to change it. pub fn remote_at(&self, url: Url) -> Result, remote::init::Error> where Url: TryInto, gix_url::parse::Error: From, { Remote::from_fetch_url(url, true, self) } /// Create a new remote available at the given `url` similarly to [`remote_at()`][crate::Repository::remote_at()], /// but don't rewrite the url according to rewrite rules. /// This eliminates a failure mode in case the rewritten URL is faulty, allowing to selectively [apply rewrite /// rules][Remote::rewrite_urls()] later and do so non-destructively. pub fn remote_at_without_url_rewrite(&self, url: Url) -> Result, remote::init::Error> where Url: TryInto, gix_url::parse::Error: From, { Remote::from_fetch_url(url, false, self) } /// Find the configured remote with the given `name_or_url` or report an error, /// similar to [`try_find_remote(…)`][Self::try_find_remote()]. /// /// Note that we will obtain remotes only if we deem them [trustworthy][crate::open::Options::filter_config_section()]. pub fn find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Result, find::existing::Error> { let name_or_url = name_or_url.into(); Ok(self .try_find_remote(name_or_url) .ok_or_else(|| find::existing::Error::NotFound { name: name_or_url.into(), })??) } /// Find the default remote as configured, or `None` if no such configuration could be found. /// /// See [`remote_default_name()`](Self::remote_default_name()) for more information on the `direction` parameter. pub fn find_default_remote( &self, direction: remote::Direction, ) -> Option, find::existing::Error>> { self.remote_default_name(direction) .map(|name| self.find_remote(name.as_ref())) } /// Find the configured remote with the given `name_or_url` or return `None` if it doesn't exist, /// for the purpose of fetching or pushing data. /// /// There are various error kinds related to partial information or incorrectly formatted URLs or ref-specs. /// Also note that the created `Remote` may have neither fetch nor push ref-specs set at all. /// /// Note that ref-specs are de-duplicated right away which may change their order. This doesn't affect matching in any way /// as negations/excludes are applied after includes. /// /// We will only include information if we deem it [trustworthy][crate::open::Options::filter_config_section()]. pub fn try_find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Option, find::Error>> { self.try_find_remote_inner(name_or_url.into(), true) } /// This method emulate what `git fetch ` does in order to obtain a remote to fetch from. /// /// As such, with `name_or_url` being `Some`, it will: /// /// * use `name_or_url` verbatim if it is a URL, creating a new remote in memory as needed. /// * find the named remote if `name_or_url` is a remote name /// /// If `name_or_url` is `None`: /// /// * use the current `HEAD` branch to find a configured remote /// * fall back to either a generally configured remote or the only configured remote. /// /// Fail if no remote could be found despite all of the above. pub fn find_fetch_remote(&self, name_or_url: Option<&BStr>) -> Result, find::for_fetch::Error> { Ok(match name_or_url { Some(name) => match self.try_find_remote(name).and_then(Result::ok) { Some(remote) => remote, None => self.remote_at(gix_url::parse(name)?)?, }, None => self .head()? .into_remote(remote::Direction::Fetch) .transpose()? .map(Ok) .or_else(|| self.find_default_remote(remote::Direction::Fetch)) .ok_or(find::for_fetch::Error::ExactlyOneRemoteNotAvailable)??, }) } /// Similar to [`try_find_remote()`][Self::try_find_remote()], but removes a failure mode if rewritten URLs turn out to be invalid /// as it skips rewriting them. /// Use this in conjunction with [`Remote::rewrite_urls()`] to non-destructively apply the rules and keep the failed urls unchanged. pub fn try_find_remote_without_url_rewrite<'a>( &self, name_or_url: impl Into<&'a BStr>, ) -> Option, find::Error>> { self.try_find_remote_inner(name_or_url.into(), false) } fn try_find_remote_inner<'a>( &self, name_or_url: impl Into<&'a BStr>, rewrite_urls: bool, ) -> Option, find::Error>> { fn config_spec( specs: Vec>, name_or_url: &BStr, key: &'static config::tree::keys::Any, op: gix_refspec::parse::Operation, ) -> Result, find::Error> { let kind = key.name; specs .into_iter() .map(|spec| { key.try_into_refspec(spec, op).map_err(|err| find::Error::RefSpec { remote_name: name_or_url.into(), kind, source: err, }) }) .collect::, _>>() .map(|mut specs| { specs.sort(); specs.dedup(); specs }) } let mut filter = self.filter_config_section(); let name_or_url = name_or_url.into(); let mut config_url = |key: &'static config::tree::keys::Url, kind: &'static str| { self.config .resolved .string_filter(format!("remote.{}.{}", name_or_url, key.name), &mut filter) .map(|url| { key.try_into_url(url).map_err(|err| find::Error::Url { kind, remote_name: name_or_url.into(), source: err, }) }) }; let url = config_url(&config::tree::Remote::URL, "fetch"); let push_url = config_url(&config::tree::Remote::PUSH_URL, "push"); let config = &self.config.resolved; let fetch_specs = config .strings_filter(format!("remote.{}.{}", name_or_url, "fetch"), &mut filter) .map(|specs| { config_spec( specs, name_or_url, &config::tree::Remote::FETCH, gix_refspec::parse::Operation::Fetch, ) }); let push_specs = config .strings_filter(format!("remote.{}.{}", name_or_url, "push"), &mut filter) .map(|specs| { config_spec( specs, name_or_url, &config::tree::Remote::PUSH, gix_refspec::parse::Operation::Push, ) }); let fetch_tags = config .string_filter(format!("remote.{}.{}", name_or_url, "tagOpt"), &mut filter) .map(|value| { config::tree::Remote::TAG_OPT .try_into_tag_opt(value) .map_err(Into::into) }); let fetch_tags = match fetch_tags { Some(Ok(v)) => v, Some(Err(err)) => return Some(Err(err)), None => Default::default(), }; match (url, fetch_specs, push_url, push_specs) { (None, None, None, None) => None, (None, _, None, _) => Some(Err(find::Error::UrlMissing)), (url, fetch_specs, push_url, push_specs) => { let url = match url { Some(Ok(v)) => Some(v), Some(Err(err)) => return Some(Err(err)), None => None, }; let push_url = match push_url { Some(Ok(v)) => Some(v), Some(Err(err)) => return Some(Err(err)), None => None, }; let fetch_specs = match fetch_specs { Some(Ok(v)) => v, Some(Err(err)) => return Some(Err(err)), None => Vec::new(), }; let push_specs = match push_specs { Some(Ok(v)) => v, Some(Err(err)) => return Some(Err(err)), None => Vec::new(), }; Some( Remote::from_preparsed_config( Some(name_or_url.to_owned()), url, push_url, fetch_specs, push_specs, rewrite_urls, fetch_tags, self, ) .map_err(Into::into), ) } } } } gix-0.69.1/src/repository/revision.rs000064400000000000000000000150231046102023000157230ustar 00000000000000use crate::revision; #[cfg(feature = "revision")] use crate::{bstr::BStr, Id}; /// Methods for resolving revisions by spec or working with the commit graph. impl crate::Repository { /// Parse a revision specification and turn it into the object(s) it describes, similar to `git rev-parse`. /// /// # Deviation /// /// - `@` actually stands for `HEAD`, whereas `git` resolves it to the object pointed to by `HEAD` without making the /// `HEAD` ref available for lookups. #[doc(alias = "revparse", alias = "git2")] #[cfg(feature = "revision")] pub fn rev_parse<'a>(&self, spec: impl Into<&'a BStr>) -> Result, revision::spec::parse::Error> { revision::Spec::from_bstr( spec, self, revision::spec::parse::Options { object_kind_hint: self.config.object_kind_hint, ..Default::default() }, ) } /// Parse a revision specification and return single object id as represented by this instance. #[doc(alias = "revparse_single", alias = "git2")] #[cfg(feature = "revision")] pub fn rev_parse_single<'repo, 'a>( &'repo self, spec: impl Into<&'a BStr>, ) -> Result, revision::spec::parse::single::Error> { let spec = spec.into(); self.rev_parse(spec)? .single() .ok_or(revision::spec::parse::single::Error::RangedRev { spec: spec.into() }) } /// Obtain the best merge-base between commit `one` and `two`, or fail if there is none. /// /// # Performance /// For repeated calls, prefer [`merge_base_with_cache()`](crate::Repository::merge_base_with_graph()). /// Also be sure to [set an object cache](crate::Repository::object_cache_size_if_unset) to accelerate repeated commit lookups. #[cfg(feature = "revision")] pub fn merge_base( &self, one: impl Into, two: impl Into, ) -> Result, super::merge_base::Error> { use crate::prelude::ObjectIdExt; let one = one.into(); let two = two.into(); let cache = self.commit_graph_if_enabled()?; let mut graph = self.revision_graph(cache.as_ref()); let bases = gix_revision::merge_base(one, &[two], &mut graph)?.ok_or(super::merge_base::Error::NotFound { first: one, second: two, })?; Ok(bases[0].attach(self)) } /// Obtain the best merge-base between commit `one` and `two`, or fail if there is none, providing a /// commit-graph `graph` to potentially greatly accelerate the operation by reusing graphs from previous runs. /// /// # Performance /// Be sure to [set an object cache](crate::Repository::object_cache_size_if_unset) to accelerate repeated commit lookups. #[cfg(feature = "revision")] pub fn merge_base_with_graph( &self, one: impl Into, two: impl Into, graph: &mut gix_revwalk::Graph<'_, '_, gix_revwalk::graph::Commit>, ) -> Result, super::merge_base_with_graph::Error> { use crate::prelude::ObjectIdExt; let one = one.into(); let two = two.into(); let bases = gix_revision::merge_base(one, &[two], graph)?.ok_or(super::merge_base_with_graph::Error::NotFound { first: one, second: two, })?; Ok(bases[0].attach(self)) } /// Obtain all merge-bases between commit `one` and `others`, or an empty list if there is none, providing a /// commit-graph `graph` to potentially greatly accelerate the operation. /// /// # Performance /// Be sure to [set an object cache](crate::Repository::object_cache_size_if_unset) to accelerate repeated commit lookups. #[doc(alias = "merge_bases_many", alias = "git2")] #[cfg(feature = "revision")] pub fn merge_bases_many_with_graph( &self, one: impl Into, others: &[gix_hash::ObjectId], graph: &mut gix_revwalk::Graph<'_, '_, gix_revwalk::graph::Commit>, ) -> Result>, gix_revision::merge_base::Error> { use crate::prelude::ObjectIdExt; let one = one.into(); Ok(gix_revision::merge_base(one, others, graph)? .unwrap_or_default() .into_iter() .map(|id| id.attach(self)) .collect()) } /// Return the best merge-base among all `commits`, or fail if `commits` yields no commit or no merge-base was found. /// /// Use `graph` to speed up repeated calls. #[cfg(feature = "revision")] pub fn merge_base_octopus_with_graph( &self, commits: impl IntoIterator>, graph: &mut gix_revwalk::Graph<'_, '_, gix_revwalk::graph::Commit>, ) -> Result, crate::repository::merge_base_octopus_with_graph::Error> { use crate::prelude::ObjectIdExt; use crate::repository::merge_base_octopus_with_graph; let commits: Vec<_> = commits.into_iter().map(Into::into).collect(); let first = commits .first() .copied() .ok_or(merge_base_octopus_with_graph::Error::MissingCommit)?; gix_revision::merge_base::octopus(first, &commits[1..], graph)? .ok_or(merge_base_octopus_with_graph::Error::NoMergeBase) .map(|id| id.attach(self)) } /// Return the best merge-base among all `commits`, or fail if `commits` yields no commit or no merge-base was found. /// /// For repeated calls, prefer [`Self::merge_base_octopus_with_graph()`] for cache-reuse. #[cfg(feature = "revision")] pub fn merge_base_octopus( &self, commits: impl IntoIterator>, ) -> Result, crate::repository::merge_base_octopus::Error> { let cache = self.commit_graph_if_enabled()?; let mut graph = self.revision_graph(cache.as_ref()); Ok(self.merge_base_octopus_with_graph(commits, &mut graph)?) } /// Create the baseline for a revision walk by initializing it with the `tips` to start iterating on. /// /// It can be configured further before starting the actual walk. #[doc(alias = "revwalk", alias = "git2")] pub fn rev_walk( &self, tips: impl IntoIterator>, ) -> revision::walk::Platform<'_> { revision::walk::Platform::new(tips, self) } } gix-0.69.1/src/repository/shallow.rs000064400000000000000000000035501046102023000155400ustar 00000000000000use std::{borrow::Cow, path::PathBuf}; use crate::{ config::tree::{gitoxide, Key}, Repository, }; impl Repository { /// Return `true` if the repository is a shallow clone, i.e. contains history only up to a certain depth. pub fn is_shallow(&self) -> bool { self.shallow_file() .metadata() .map_or(false, |m| m.is_file() && m.len() > 0) } /// Return a shared list of shallow commits which is updated automatically if the in-memory snapshot has become stale /// as the underlying file on disk has changed. /// /// The list of shallow commits represents the shallow boundary, beyond which we are lacking all (parent) commits. /// Note that the list is never empty, as `Ok(None)` is returned in that case indicating the repository /// isn't a shallow clone. /// /// The shared list is shared across all clones of this repository. pub fn shallow_commits(&self) -> Result, crate::shallow::read::Error> { self.shallow_commits.recent_snapshot( || self.shallow_file().metadata().ok().and_then(|m| m.modified().ok()), || gix_shallow::read(&self.shallow_file()), ) } /// Return the path to the `shallow` file which contains hashes, one per line, that describe commits that don't have their /// parents within this repository. /// /// Note that it may not exist if the repository isn't actually shallow. pub fn shallow_file(&self) -> PathBuf { let shallow_name = self .config .resolved .string_filter( gitoxide::Core::SHALLOW_FILE.logical_name().as_str(), &mut self.filter_config_section(), ) .unwrap_or_else(|| Cow::Borrowed("shallow".into())); self.common_dir().join(gix_path::from_bstr(shallow_name)) } } gix-0.69.1/src/repository/state.rs000064400000000000000000000034131046102023000152050ustar 00000000000000use crate::state; impl crate::Repository { /// Returns the status of an in progress operation on a repository or [`None`] /// if no operation is currently in progress. /// /// Note to be confused with the repositories 'status'. pub fn state(&self) -> Option { let git_dir = self.path(); // This is modeled on the logic from wt_status_get_state in git's wt-status.c and // ps1 from gix-prompt.sh. if git_dir.join("rebase-apply/applying").is_file() { Some(state::InProgress::ApplyMailbox) } else if git_dir.join("rebase-apply/rebasing").is_file() { Some(state::InProgress::Rebase) } else if git_dir.join("rebase-apply").is_dir() { Some(state::InProgress::ApplyMailboxRebase) } else if git_dir.join("rebase-merge/interactive").is_file() { Some(state::InProgress::RebaseInteractive) } else if git_dir.join("rebase-merge").is_dir() { Some(state::InProgress::Rebase) } else if git_dir.join("CHERRY_PICK_HEAD").is_file() { if git_dir.join("sequencer/todo").is_file() { Some(state::InProgress::CherryPickSequence) } else { Some(state::InProgress::CherryPick) } } else if git_dir.join("MERGE_HEAD").is_file() { Some(state::InProgress::Merge) } else if git_dir.join("BISECT_LOG").is_file() { Some(state::InProgress::Bisect) } else if git_dir.join("REVERT_HEAD").is_file() { if git_dir.join("sequencer/todo").is_file() { Some(state::InProgress::RevertSequence) } else { Some(state::InProgress::Revert) } } else { None } } } gix-0.69.1/src/repository/submodule.rs000064400000000000000000000100671046102023000160670ustar 00000000000000use std::rc::Rc; use crate::{submodule, Repository}; impl Repository { /// Open the `.gitmodules` file as present in the worktree, or return `None` if no such file is available. /// Note that git configuration is also contributing to the result based on the current snapshot. /// /// Note that his method will not look in other places, like the index or the `HEAD` tree. // TODO(submodule): make it use an updated snapshot instead once we have `config()`. pub fn open_modules_file(&self) -> Result, submodule::open_modules_file::Error> { let path = match self.modules_path() { Some(path) => path, None => return Ok(None), }; let buf = match std::fs::read(&path) { Ok(buf) => buf, Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None), Err(err) => return Err(err.into()), }; Ok(Some(gix_submodule::File::from_bytes( &buf, path, &self.config.resolved, )?)) } /// Return a shared [`.gitmodules` file](crate::submodule::File) which is updated automatically if the in-memory snapshot /// has become stale as the underlying file on disk has changed. The snapshot based on the file on disk is shared across all /// clones of this repository. /// /// If a file on disk isn't present, we will try to load it from the index, and finally from the current tree. /// In the latter two cases, the result will not be cached in this repository instance as we can't detect freshness anymore, /// so time this method is called a new [modules file](submodule::ModulesSnapshot) will be created. /// /// Note that git configuration is also contributing to the result based on the current snapshot. /// // TODO(submodule): make it use an updated snapshot instead once we have `config()`. pub fn modules(&self) -> Result, submodule::modules::Error> { match self.modules.recent_snapshot( || { self.modules_path() .and_then(|path| path.metadata().and_then(|m| m.modified()).ok()) }, || self.open_modules_file(), )? { Some(m) => Ok(Some(m)), None => { let id = match self.try_index()?.and_then(|index| { index .entry_by_path(submodule::MODULES_FILE.into()) .map(|entry| entry.id) }) { Some(id) => id, None => match self .head_commit()? .tree()? .find_entry(submodule::MODULES_FILE) .map(|entry| entry.inner.oid) { Some(id) => id.to_owned(), None => return Ok(None), }, }; Ok(Some(gix_features::threading::OwnShared::new( gix_submodule::File::from_bytes(&self.find_object(id)?.data, None, &self.config.resolved) .map_err(submodule::open_modules_file::Error::from)? .into(), ))) } } } /// Return the list of available submodules, or `None` if there is no submodule configuration. #[doc(alias = "git2")] pub fn submodules(&self) -> Result>>, submodule::modules::Error> { let modules = match self.modules()? { None => return Ok(None), Some(m) => m, }; let shared_state = Rc::new(submodule::SharedState::new(self, modules)); Ok(Some( shared_state .modules .names() .map(ToOwned::to_owned) .collect::>() .into_iter() .map(move |name| crate::Submodule { state: shared_state.clone(), name, }), )) } } gix-0.69.1/src/repository/thread_safe.rs000064400000000000000000000033521046102023000163340ustar 00000000000000mod access { impl crate::ThreadSafeRepository { /// Add thread-local state to an easy-to-use thread-local repository for the most convenient API. pub fn to_thread_local(&self) -> crate::Repository { self.into() } } } mod location { impl crate::ThreadSafeRepository { /// The path to the `.git` directory itself, or equivalent if this is a bare repository. pub fn path(&self) -> &std::path::Path { self.git_dir() } /// Return the path to the repository itself, containing objects, references, configuration, and more. /// /// Synonymous to [`path()`][crate::ThreadSafeRepository::path()]. pub fn git_dir(&self) -> &std::path::Path { self.refs.git_dir() } /// Return the path to the working directory if this is not a bare repository. pub fn work_dir(&self) -> Option<&std::path::Path> { self.work_tree.as_deref() } /// Return the path to the directory containing all objects. pub fn objects_dir(&self) -> &std::path::Path { self.objects.path() } } } mod impls { impl std::fmt::Debug for crate::ThreadSafeRepository { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "Repository(git = '{}', working_tree: {:?}", self.git_dir().display(), self.work_tree ) } } impl PartialEq for crate::ThreadSafeRepository { fn eq(&self, other: &crate::ThreadSafeRepository) -> bool { self.git_dir() == other.git_dir() && self.work_tree == other.work_tree } } } gix-0.69.1/src/repository/worktree.rs000064400000000000000000000151541046102023000157340ustar 00000000000000use crate::{worktree, Worktree}; /// Interact with individual worktrees and their information. impl crate::Repository { /// Return a list of all _linked_ worktrees sorted by private git dir path as a lightweight proxy. /// /// Note that these need additional processing to become usable, but provide a first glimpse a typical worktree information. pub fn worktrees(&self) -> std::io::Result>> { let mut res = Vec::new(); let iter = match std::fs::read_dir(self.common_dir().join("worktrees")) { Ok(iter) => iter, Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(res), Err(err) => return Err(err), }; for entry in iter { let entry = entry?; let worktree_git_dir = entry.path(); if worktree_git_dir.join("gitdir").is_file() { res.push(worktree::Proxy { parent: self, git_dir: worktree_git_dir, }); } } res.sort_by(|a, b| a.git_dir.cmp(&b.git_dir)); Ok(res) } /// Return the repository owning the main worktree, typically from a linked worktree. /// /// Note that it might be the one that is currently open if this repository doesn't point to a linked worktree. /// Also note that the main repo might be bare. #[allow(clippy::result_large_err)] pub fn main_repo(&self) -> Result { crate::ThreadSafeRepository::open_opts(self.common_dir(), self.options.clone()).map(Into::into) } /// Return the currently set worktree if there is one, acting as platform providing a validated worktree base path. /// /// Note that there would be `None` if this repository is `bare` and the parent [`Repository`][crate::Repository] was instantiated without /// registered worktree in the current working dir, even if no `.git` file or directory exists. /// It's merely based on configuration, see [Worktree::dot_git_exists()] for a way to perform more validation. pub fn worktree(&self) -> Option> { self.work_dir().map(|path| Worktree { parent: self, path }) } /// Return true if this repository is bare, and has no main work tree. /// /// This is not to be confused with the [`worktree()`][crate::Repository::worktree()] worktree, which may exists if this instance /// was opened in a worktree that was created separately. pub fn is_bare(&self) -> bool { self.config.is_bare && self.work_dir().is_none() } /// If `id` points to a tree, produce a stream that yields one worktree entry after the other. The index of the tree at `id` /// is returned as well as it is an intermediate byproduct that might be useful to callers. /// /// The entries will look exactly like they would if one would check them out, with filters applied. /// The `export-ignore` attribute is used to skip blobs or directories to which it applies. #[cfg(feature = "worktree-stream")] pub fn worktree_stream( &self, id: impl Into, ) -> Result<(gix_worktree_stream::Stream, gix_index::File), crate::repository::worktree_stream::Error> { use gix_odb::HeaderExt; let id = id.into(); let header = self.objects.header(id)?; if !header.kind().is_tree() { return Err(crate::repository::worktree_stream::Error::NotATree { id, actual: header.kind(), }); } // TODO(perf): potential performance improvements could be to use the index at `HEAD` if possible (`index_from_head_tree…()`) // TODO(perf): when loading a non-HEAD tree, we effectively traverse the tree twice. This is usually fast though, and sharing // an object cache between the copies of the ODB handles isn't trivial and needs a lock. let index = self.index_from_tree(&id)?; let mut cache = self .attributes_only(&index, gix_worktree::stack::state::attributes::Source::IdMapping)? .detach(); let pipeline = gix_filter::Pipeline::new(self.command_context()?, crate::filter::Pipeline::options(self)?); let objects = self.objects.clone().into_arc().expect("TBD error handling"); let stream = gix_worktree_stream::from_tree( id, objects.clone(), pipeline, move |path, mode, attrs| -> std::io::Result<()> { let entry = cache.at_entry(path, Some(mode.into()), &objects)?; entry.matching_attributes(attrs); Ok(()) }, ); Ok((stream, index)) } /// Produce an archive from the `stream` and write it to `out` according to `options`. /// Use `blob` to provide progress for each entry written to `out`, and note that it should already be initialized to the amount /// of expected entries, with `should_interrupt` being queried between each entry to abort if needed, and on each write to `out`. /// /// ### Performance /// /// Be sure that `out` is able to handle a lot of write calls. Otherwise wrap it in a [`BufWriter`][std::io::BufWriter]. /// /// ### Additional progress and fine-grained interrupt handling /// /// For additional progress reporting, wrap `out` into a writer that counts throughput on each write. /// This can also be used to react to interrupts on each write, instead of only for each entry. #[cfg(feature = "worktree-archive")] pub fn worktree_archive( &self, mut stream: gix_worktree_stream::Stream, out: impl std::io::Write + std::io::Seek, blobs: impl gix_features::progress::Count, should_interrupt: &std::sync::atomic::AtomicBool, options: gix_archive::Options, ) -> Result<(), crate::repository::worktree_archive::Error> { let mut out = gix_features::interrupt::Write { inner: out, should_interrupt, }; if options.format == gix_archive::Format::InternalTransientNonPersistable { std::io::copy(&mut stream.into_read(), &mut out)?; return Ok(()); } gix_archive::write_stream_seek( &mut stream, |stream| { if should_interrupt.load(std::sync::atomic::Ordering::Relaxed) { return Err(std::io::Error::new(std::io::ErrorKind::Other, "Cancelled by user").into()); } let res = stream.next_entry(); blobs.inc(); res }, out, options, )?; Ok(()) } } gix-0.69.1/src/revision/mod.rs000064400000000000000000000025071046102023000142660ustar 00000000000000//! Revisions is the generalized notion of a commit. //! //! This module provides utilities to walk graphs of revisions and specify revisions and ranges of revisions. #[cfg(feature = "revision")] pub use gix_revision as plumbing; /// pub mod walk; pub use walk::iter_impl::Walk; /// #[cfg(feature = "revision")] pub mod spec; /// The specification of a revision as parsed from a revision specification like `HEAD@{1}` or `v1.2.3...main`. /// It's typically created by [`repo.rev_parse()`][crate::Repository::rev_parse()]. /// /// See the [official git documentation](https://git-scm.com/docs/git-rev-parse#_specifying_revisions) for reference on how /// to specify revisions and revision ranges. #[derive(Clone, Debug)] #[cfg(feature = "revision")] pub struct Spec<'repo> { pub(crate) inner: gix_revision::Spec, /// The path we encountered in the revspec, like `@:` or `@..@~1:`. pub(crate) path: Option<(crate::bstr::BString, gix_object::tree::EntryMode)>, /// The first name of a reference as seen while parsing a `RevSpec`, for completeness. pub(crate) first_ref: Option, /// The second name of a reference as seen while parsing a `RevSpec`, for completeness. pub(crate) second_ref: Option, /// The owning repository. pub repo: &'repo crate::Repository, } gix-0.69.1/src/revision/spec/mod.rs000064400000000000000000000064511046102023000152220ustar 00000000000000use crate::bstr::BStr; use crate::{ext::ReferenceExt, revision::Spec, Id, Reference}; /// pub mod parse; mod impls { use std::ops::{Deref, DerefMut}; use crate::revision::Spec; impl Deref for Spec<'_> { type Target = gix_revision::Spec; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for Spec<'_> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl PartialEq for Spec<'_> { fn eq(&self, other: &Self) -> bool { self.inner == other.inner } } impl Eq for Spec<'_> {} } /// Initialization impl<'repo> Spec<'repo> { /// Create a single specification which points to `id`. pub fn from_id(id: Id<'repo>) -> Self { Spec { inner: gix_revision::Spec::Include(id.inner), path: None, repo: id.repo, first_ref: None, second_ref: None, } } } /// Access impl<'repo> Spec<'repo> { /// Detach the `Repository` from this instance, leaving only plain data that can be moved freely and serialized. pub fn detach(self) -> gix_revision::Spec { self.inner } /// Some revision specifications leave information about references which are returned as `(from-ref, to-ref)` here, e.g. /// `HEAD@{-1}..main` might be `(Some(refs/heads/previous-branch), Some(refs/heads/main))`, /// or `@` returns `(Some(refs/heads/main), None)`. pub fn into_references(self) -> (Option>, Option>) { let repo = self.repo; ( self.first_ref.map(|r| r.attach(repo)), self.second_ref.map(|r| r.attach(repo)), ) } /// Return the path encountered in specs like `@:` or `:`, along with the kind of object it represents. /// /// Note that there can only be one as paths always terminates further revspec parsing. pub fn path_and_mode(&self) -> Option<(&BStr, gix_object::tree::EntryMode)> { self.path.as_ref().map(|(p, mode)| (p.as_ref(), *mode)) } /// Return the name of the first reference we encountered while resolving the rev-spec, or `None` if a short hash /// was used. For example, `@` might yield `Some(HEAD)`, but `abcd` yields `None`. pub fn first_reference(&self) -> Option<&gix_ref::Reference> { self.first_ref.as_ref() } /// Return the name of the second reference we encountered while resolving the rev-spec, or `None` if a short hash /// was used or there was no second reference. For example, `..@` might yield `Some(HEAD)`, but `..abcd` or `@` /// yields `None`. pub fn second_reference(&self) -> Option<&gix_ref::Reference> { self.second_ref.as_ref() } /// Return the single included object represented by this instance, or `None` if it is a range of any kind. pub fn single(&self) -> Option> { match self.inner { gix_revision::Spec::Include(id) | gix_revision::Spec::ExcludeParents(id) => { Id::from_id(id, self.repo).into() } gix_revision::Spec::Exclude(_) | gix_revision::Spec::Range { .. } | gix_revision::Spec::Merge { .. } | gix_revision::Spec::IncludeOnlyParents { .. } => None, } } } gix-0.69.1/src/revision/spec/parse/delegate/mod.rs000064400000000000000000000234131046102023000201030ustar 00000000000000use std::collections::HashSet; use gix_hash::ObjectId; use gix_revision::spec::{parse, parse::delegate}; use smallvec::SmallVec; use super::{Delegate, Error, ObjectKindHint}; use crate::{ ext::{ObjectIdExt, ReferenceExt}, Repository, }; type Replacements = SmallVec<[(ObjectId, ObjectId); 1]>; impl<'repo> Delegate<'repo> { pub fn new(repo: &'repo Repository, opts: crate::revision::spec::parse::Options) -> Self { Delegate { refs: Default::default(), objs: Default::default(), paths: Default::default(), ambiguous_objects: Default::default(), idx: 0, kind: None, err: Vec::new(), prefix: Default::default(), last_call_was_disambiguate_prefix: Default::default(), opts, repo, } } pub fn into_err(mut self) -> Error { let repo = self.repo; for err in self .ambiguous_objects .iter_mut() .zip(self.prefix) .filter_map(|(a, b)| a.take().filter(|candidates| candidates.len() > 1).zip(b)) .map(|(candidates, prefix)| Error::ambiguous(candidates, prefix, repo)) .rev() { self.err.insert(0, err); } Error::from_errors(self.err) } pub fn into_rev_spec(mut self) -> Result, Error> { fn zero_or_one_objects_or_ambiguity_err( mut candidates: [Option>; 2], prefix: [Option; 2], mut errors: Vec, repo: &Repository, ) -> Result<[Option; 2], Error> { let mut out = [None, None]; for ((candidates, prefix), out) in candidates.iter_mut().zip(prefix).zip(out.iter_mut()) { let candidates = candidates.take(); match candidates { None => *out = None, Some(candidates) => { match candidates.len() { 0 => unreachable!( "BUG: let's avoid still being around if no candidate matched the requirements" ), 1 => { *out = candidates.into_iter().next(); } _ => { errors.insert( 0, Error::ambiguous(candidates, prefix.expect("set when obtaining candidates"), repo), ); return Err(Error::from_errors(errors)); } }; } }; } Ok(out) } fn kind_to_spec( kind: Option, [first, second]: [Option; 2], ) -> Result { use gix_revision::spec::Kind::*; Ok(match kind.unwrap_or_default() { IncludeReachable => gix_revision::Spec::Include(first.ok_or(Error::Malformed)?), ExcludeReachable => gix_revision::Spec::Exclude(first.ok_or(Error::Malformed)?), RangeBetween => gix_revision::Spec::Range { from: first.ok_or(Error::Malformed)?, to: second.ok_or(Error::Malformed)?, }, ReachableToMergeBase => gix_revision::Spec::Merge { theirs: first.ok_or(Error::Malformed)?, ours: second.ok_or(Error::Malformed)?, }, IncludeReachableFromParents => gix_revision::Spec::IncludeOnlyParents(first.ok_or(Error::Malformed)?), ExcludeReachableFromParents => gix_revision::Spec::ExcludeParents(first.ok_or(Error::Malformed)?), }) } let range = zero_or_one_objects_or_ambiguity_err(self.objs, self.prefix, self.err, self.repo)?; Ok(crate::revision::Spec { path: self.paths[0].take().or(self.paths[1].take()), first_ref: self.refs[0].take(), second_ref: self.refs[1].take(), inner: kind_to_spec(self.kind, range)?, repo: self.repo, }) } } impl parse::Delegate for Delegate<'_> { fn done(&mut self) { self.follow_refs_to_objects_if_needed(); self.disambiguate_objects_by_fallback_hint( self.kind_implies_committish() .then_some(ObjectKindHint::Committish) .or(self.opts.object_kind_hint), ); } } impl delegate::Kind for Delegate<'_> { fn kind(&mut self, kind: gix_revision::spec::Kind) -> Option<()> { use gix_revision::spec::Kind::*; self.kind = Some(kind); if self.kind_implies_committish() { self.disambiguate_objects_by_fallback_hint(ObjectKindHint::Committish.into()); } if matches!(kind, RangeBetween | ReachableToMergeBase) { self.idx += 1; } Some(()) } } impl Delegate<'_> { fn kind_implies_committish(&self) -> bool { self.kind.unwrap_or(gix_revision::spec::Kind::IncludeReachable) != gix_revision::spec::Kind::IncludeReachable } fn disambiguate_objects_by_fallback_hint(&mut self, hint: Option) { fn require_object_kind(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<(), Error> { let obj = repo.find_object(obj)?; if obj.kind == kind { Ok(()) } else { Err(Error::ObjectKind { actual: obj.kind, expected: kind, oid: obj.id.attach(repo).shorten_or_id(), }) } } if self.last_call_was_disambiguate_prefix[self.idx] { self.unset_disambiguate_call(); if let Some(objs) = self.objs[self.idx].as_mut() { let repo = self.repo; let errors: Vec<_> = match hint { Some(kind_hint) => match kind_hint { ObjectKindHint::Treeish | ObjectKindHint::Committish => { let kind = match kind_hint { ObjectKindHint::Treeish => gix_object::Kind::Tree, ObjectKindHint::Committish => gix_object::Kind::Commit, _ => unreachable!("BUG: we narrow possibilities above"), }; objs.iter() .filter_map(|obj| peel(repo, obj, kind).err().map(|err| (*obj, err))) .collect() } ObjectKindHint::Tree | ObjectKindHint::Commit | ObjectKindHint::Blob => { let kind = match kind_hint { ObjectKindHint::Tree => gix_object::Kind::Tree, ObjectKindHint::Commit => gix_object::Kind::Commit, ObjectKindHint::Blob => gix_object::Kind::Blob, _ => unreachable!("BUG: we narrow possibilities above"), }; objs.iter() .filter_map(|obj| require_object_kind(repo, obj, kind).err().map(|err| (*obj, err))) .collect() } }, None => return, }; if errors.len() == objs.len() { self.err.extend(errors.into_iter().map(|(_, err)| err)); } else { for (obj, err) in errors { objs.remove(&obj); self.err.push(err); } } } } } fn follow_refs_to_objects_if_needed(&mut self) -> Option<()> { let repo = self.repo; for (r, obj) in self.refs.iter().zip(self.objs.iter_mut()) { if let (Some(ref_), obj_opt @ None) = (r, obj) { if let Some(id) = ref_.target.try_id().map(ToOwned::to_owned).or_else(|| { match ref_.clone().attach(repo).peel_to_id_in_place() { Err(err) => { self.err.push(Error::PeelToId { name: ref_.name.clone(), source: err, }); None } Ok(id) => Some(id.detach()), } }) { obj_opt.get_or_insert_with(HashSet::default).insert(id); }; }; } Some(()) } fn unset_disambiguate_call(&mut self) { self.last_call_was_disambiguate_prefix[self.idx] = false; } } fn peel(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result { let mut obj = repo.find_object(obj)?; obj = obj.peel_to_kind(kind)?; debug_assert_eq!(obj.kind, kind, "bug in Object::peel_to_kind() which didn't deliver"); Ok(obj.id) } fn handle_errors_and_replacements( destination: &mut Vec, objs: &mut HashSet, errors: Vec<(ObjectId, Error)>, replacements: &mut Replacements, ) -> Option<()> { if errors.len() == objs.len() { destination.extend(errors.into_iter().map(|(_, err)| err)); None } else { for (obj, err) in errors { objs.remove(&obj); destination.push(err); } for (find, replace) in replacements { objs.remove(find); objs.insert(*replace); } Some(()) } } mod navigate; mod revision; gix-0.69.1/src/revision/spec/parse/delegate/navigate.rs000064400000000000000000000352161046102023000211260ustar 00000000000000use std::collections::HashSet; use gix_hash::ObjectId; use gix_index::entry::Stage; use gix_revision::spec::parse::{ delegate, delegate::{PeelTo, Traversal}, }; use crate::{ bstr::{BStr, ByteSlice}, ext::ObjectIdExt, object, revision::spec::parse::{ delegate::{handle_errors_and_replacements, peel, Replacements}, Delegate, Error, }, Object, }; impl delegate::Navigate for Delegate<'_> { fn traverse(&mut self, kind: Traversal) -> Option<()> { self.unset_disambiguate_call(); self.follow_refs_to_objects_if_needed()?; let mut replacements = Replacements::default(); let mut errors = Vec::new(); let objs = self.objs[self.idx].as_mut()?; let repo = self.repo; for obj in objs.iter() { match kind { Traversal::NthParent(num) => { match self.repo.find_object(*obj).map_err(Error::from).and_then(|obj| { obj.try_into_commit().map_err(|err| { let object::try_into::Error { actual, expected, id } = err; Error::ObjectKind { oid: id.attach(repo).shorten_or_id(), actual, expected, } }) }) { Ok(commit) => match commit.parent_ids().nth(num.saturating_sub(1)) { Some(id) => replacements.push((commit.id, id.detach())), None => errors.push(( commit.id, Error::ParentOutOfRange { oid: commit.id().shorten_or_id(), desired: num, available: commit.parent_ids().count(), }, )), }, Err(err) => errors.push((*obj, err)), } } Traversal::NthAncestor(num) => { let id = obj.attach(repo); match id .ancestors() .first_parent_only() .all() .expect("cannot fail without sorting") .skip(num) .find_map(Result::ok) { Some(commit) => replacements.push((*obj, commit.id)), None => errors.push(( *obj, Error::AncestorOutOfRange { oid: id.shorten_or_id(), desired: num, available: id .ancestors() .first_parent_only() .all() .expect("cannot fail without sorting") .skip(1) .count(), }, )), } } } } handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements) } fn peel_until(&mut self, kind: PeelTo<'_>) -> Option<()> { self.unset_disambiguate_call(); self.follow_refs_to_objects_if_needed()?; let mut replacements = Replacements::default(); let mut errors = Vec::new(); let objs = self.objs[self.idx].as_mut()?; let repo = self.repo; match kind { PeelTo::ValidObject => { for obj in objs.iter() { match repo.find_object(*obj) { Ok(_) => {} Err(err) => { errors.push((*obj, err.into())); } }; } } PeelTo::ObjectKind(kind) => { let peel = |obj| peel(repo, obj, kind); for obj in objs.iter() { match peel(obj) { Ok(replace) => replacements.push((*obj, replace)), Err(err) => errors.push((*obj, err)), } } } PeelTo::Path(path) => { let lookup_path = |obj: &ObjectId| { let tree_id = peel(repo, obj, gix_object::Kind::Tree)?; if path.is_empty() { return Ok((tree_id, gix_object::tree::EntryKind::Tree.into())); } let mut tree = repo.find_object(tree_id)?.into_tree(); let entry = tree.peel_to_entry_by_path(gix_path::from_bstr(path))? .ok_or_else(|| Error::PathNotFound { path: path.into(), object: obj.attach(repo).shorten_or_id(), tree: tree_id.attach(repo).shorten_or_id(), })?; Ok((entry.object_id(), entry.mode())) }; for obj in objs.iter() { match lookup_path(obj) { Ok((replace, mode)) => { if !path.is_empty() { // Technically this is letting the last one win, but so be it. self.paths[self.idx] = Some((path.to_owned(), mode)); } replacements.push((*obj, replace)); } Err(err) => errors.push((*obj, err)), } } } PeelTo::RecursiveTagObject => { for oid in objs.iter() { match oid.attach(repo).object().and_then(Object::peel_tags_to_end) { Ok(obj) => replacements.push((*oid, obj.id)), Err(err) => errors.push((*oid, err.into())), } } } } handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements) } fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> { self.unset_disambiguate_call(); self.follow_refs_to_objects_if_needed()?; #[cfg(not(feature = "revparse-regex"))] let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated }; #[cfg(feature = "revparse-regex")] let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) { Ok(compiled) => { let needs_regex = regex::escape(compiled.as_str()) != regex; move |message: &BStr| -> bool { if needs_regex { compiled.is_match(message) ^ negated } else { message.contains_str(regex) ^ negated } } } Err(err) => { self.err.push(err.into()); return None; } }; match self.objs[self.idx].as_mut() { Some(objs) => { let repo = self.repo; let mut errors = Vec::new(); let mut replacements = Replacements::default(); for oid in objs.iter() { match oid .attach(repo) .ancestors() .sorting(crate::revision::walk::Sorting::ByCommitTime(Default::default())) .all() { Ok(iter) => { let mut matched = false; let mut count = 0; let commits = iter.map(|res| { res.map_err(Error::from).and_then(|commit| { commit.id().object().map_err(Error::from).map(Object::into_commit) }) }); for commit in commits { count += 1; match commit { Ok(commit) => { if matches(commit.message_raw_sloppy()) { replacements.push((*oid, commit.id)); matched = true; break; } } Err(err) => errors.push((*oid, err)), } } if !matched { errors.push(( *oid, Error::NoRegexMatch { regex: regex.into(), commits_searched: count, oid: oid.attach(repo).shorten_or_id(), }, )); } } Err(err) => errors.push((*oid, err.into())), } } handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements) } None => match self.repo.references() { Ok(references) => match references.all() { Ok(references) => { match self .repo .rev_walk( references .peeled() .ok()? .filter_map(Result::ok) .filter(|r| r.id().header().ok().map_or(false, |obj| obj.kind().is_commit())) .filter_map(|r| r.detach().peeled), ) .sorting(crate::revision::walk::Sorting::ByCommitTime(Default::default())) .all() { Ok(iter) => { let mut matched = false; let mut count = 0; let commits = iter.map(|res| { res.map_err(Error::from).and_then(|commit| { commit.id().object().map_err(Error::from).map(Object::into_commit) }) }); for commit in commits { count += 1; match commit { Ok(commit) => { if matches(commit.message_raw_sloppy()) { self.objs[self.idx] .get_or_insert_with(HashSet::default) .insert(commit.id); matched = true; break; } } Err(err) => self.err.push(err), } } if matched { Some(()) } else { self.err.push(Error::NoRegexMatchAllRefs { regex: regex.into(), commits_searched: count, }); None } } Err(err) => { self.err.push(err.into()); None } } } Err(err) => { self.err.push(err.into()); None } }, Err(err) => { self.err.push(err.into()); None } }, } } fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()> { let stage = match stage { 0 => Stage::Unconflicted, 1 => Stage::Base, 2 => Stage::Ours, 3 => Stage::Theirs, _ => unreachable!( "BUG: driver will not pass invalid stages (and it uses integer to avoid gix-index as dependency)" ), }; self.unset_disambiguate_call(); match self.repo.index() { Ok(index) => match index.entry_by_path_and_stage(path, stage) { Some(entry) => { self.objs[self.idx] .get_or_insert_with(HashSet::default) .insert(entry.id); self.paths[self.idx] = Some(( path.to_owned(), entry .mode .to_tree_entry_mode() .unwrap_or(gix_object::tree::EntryKind::Blob.into()), )); Some(()) } None => { let stage_hint = [Stage::Unconflicted, Stage::Base, Stage::Ours] .iter() .filter(|our_stage| **our_stage != stage) .find_map(|stage| index.entry_index_by_path_and_stage(path, *stage).map(|_| *stage)); let exists = self .repo .work_dir() .map_or(false, |root| root.join(gix_path::from_bstr(path)).exists()); self.err.push(Error::IndexLookup { desired_path: path.into(), desired_stage: stage, exists, stage_hint, }); None } }, Err(err) => { self.err.push(err.into()); None } } } } gix-0.69.1/src/revision/spec/parse/delegate/revision.rs000064400000000000000000000262651046102023000211720ustar 00000000000000use std::collections::HashSet; use gix_hash::ObjectId; use gix_revision::spec::parse::{ delegate, delegate::{ReflogLookup, SiblingBranch}, }; use crate::{ bstr::{BStr, BString, ByteSlice}, ext::ReferenceExt, remote, revision::spec::parse::{Delegate, Error, RefsHint}, }; impl delegate::Revision for Delegate<'_> { fn find_ref(&mut self, name: &BStr) -> Option<()> { self.unset_disambiguate_call(); if !self.err.is_empty() && self.refs[self.idx].is_some() { return None; } match self.repo.refs.find(name) { Ok(r) => { assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice"); self.refs[self.idx] = Some(r); Some(()) } Err(err) => { self.err.push(err.into()); None } } } fn disambiguate_prefix( &mut self, prefix: gix_hash::Prefix, _must_be_commit: Option>, ) -> Option<()> { self.last_call_was_disambiguate_prefix[self.idx] = true; let mut candidates = Some(HashSet::default()); self.prefix[self.idx] = Some(prefix); let empty_tree_id = gix_hash::ObjectId::empty_tree(prefix.as_oid().kind()); let res = if prefix.as_oid() == empty_tree_id { candidates.as_mut().expect("set").insert(empty_tree_id); Ok(Some(Err(()))) } else { self.repo.objects.lookup_prefix(prefix, candidates.as_mut()) }; match res { Err(err) => { self.err.push(err.into()); None } Ok(None) => { self.err.push(Error::PrefixNotFound { prefix }); None } Ok(Some(Ok(_) | Err(()))) => { assert!(self.objs[self.idx].is_none(), "BUG: cannot set the same prefix twice"); let candidates = candidates.expect("set above"); match self.opts.refs_hint { RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise if prefix.hex_len() == candidates.iter().next().expect("at least one").kind().len_in_hex() => { self.ambiguous_objects[self.idx] = Some(candidates.clone()); self.objs[self.idx] = Some(candidates); Some(()) } RefsHint::PreferObject => { self.ambiguous_objects[self.idx] = Some(candidates.clone()); self.objs[self.idx] = Some(candidates); Some(()) } RefsHint::PreferRef | RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise | RefsHint::Fail => { match self.repo.refs.find(&prefix.to_string()) { Ok(ref_) => { assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice"); if self.opts.refs_hint == RefsHint::Fail { self.refs[self.idx] = Some(ref_.clone()); self.err.push(Error::AmbiguousRefAndObject { prefix, reference: ref_, }); self.err.push(Error::ambiguous(candidates, prefix, self.repo)); None } else { self.refs[self.idx] = Some(ref_); Some(()) } } Err(_) => { self.ambiguous_objects[self.idx] = Some(candidates.clone()); self.objs[self.idx] = Some(candidates); Some(()) } } } } } } } fn reflog(&mut self, query: ReflogLookup) -> Option<()> { self.unset_disambiguate_call(); let r = match &mut self.refs[self.idx] { Some(r) => r.clone().attach(self.repo), val @ None => match self.repo.head().map(crate::Head::try_into_referent) { Ok(Some(r)) => { *val = Some(r.clone().detach()); r } Ok(None) => { self.err.push(Error::UnbornHeadsHaveNoRefLog); return None; } Err(err) => { self.err.push(err.into()); return None; } }, }; let mut platform = r.log_iter(); match platform.rev().ok().flatten() { Some(mut it) => match query { ReflogLookup::Date(date) => { let mut last = None; let id_to_insert = match it .filter_map(Result::ok) .inspect(|d| { last = Some(if d.previous_oid.is_null() { d.new_oid } else { d.previous_oid }); }) .find(|l| l.signature.time.seconds <= date.seconds) { Some(closest_line) => closest_line.new_oid, None => match last { None => { self.err.push(Error::EmptyReflog); return None; } Some(id) => id, }, }; self.objs[self.idx] .get_or_insert_with(HashSet::default) .insert(id_to_insert); Some(()) } ReflogLookup::Entry(no) => match it.nth(no).and_then(Result::ok) { Some(line) => { self.objs[self.idx] .get_or_insert_with(HashSet::default) .insert(line.new_oid); Some(()) } None => { let available = platform.rev().ok().flatten().map_or(0, Iterator::count); self.err.push(Error::RefLogEntryOutOfRange { reference: r.detach(), desired: no, available, }); None } }, }, None => { self.err.push(Error::MissingRefLog { reference: r.name().as_bstr().into(), action: match query { ReflogLookup::Entry(_) => "lookup reflog entry by index", ReflogLookup::Date(_) => "lookup reflog entry by date", }, }); None } } } fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> { self.unset_disambiguate_call(); fn prior_checkouts_iter<'a>( platform: &'a mut gix_ref::file::log::iter::Platform<'static, '_>, ) -> Result + 'a, Error> { match platform.rev().ok().flatten() { Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| { line.message .strip_prefix(b"checkout: moving from ") .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos])) .map(|from_branch| (from_branch.into(), line.previous_oid)) })), None => Err(Error::MissingRefLog { reference: "HEAD".into(), action: "search prior checked out branch", }), } } let head = match self.repo.head() { Ok(head) => head, Err(err) => { self.err.push(err.into()); return None; } }; match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) { Ok(Some((ref_name, id))) => { let id = match self.repo.find_reference(ref_name.as_bstr()) { Ok(mut r) => { let id = r.peel_to_id_in_place().map(crate::Id::detach).unwrap_or(id); self.refs[self.idx] = Some(r.detach()); id } Err(_) => id, }; self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id); Some(()) } Ok(None) => { self.err.push(Error::PriorCheckoutOutOfRange { desired: branch_no, available: prior_checkouts_iter(&mut head.log_iter()) .map(Iterator::count) .unwrap_or(0), }); None } Err(err) => { self.err.push(err); None } } } fn sibling_branch(&mut self, kind: SiblingBranch) -> Option<()> { self.unset_disambiguate_call(); let reference = match &mut self.refs[self.idx] { val @ None => match self.repo.head().map(crate::Head::try_into_referent) { Ok(Some(r)) => { *val = Some(r.clone().detach()); r } Ok(None) => { self.err.push(Error::UnbornHeadForSibling); return None; } Err(err) => { self.err.push(err.into()); return None; } }, Some(r) => r.clone().attach(self.repo), }; let direction = match kind { SiblingBranch::Upstream => remote::Direction::Fetch, SiblingBranch::Push => remote::Direction::Push, }; match reference.remote_tracking_ref_name(direction) { None => self.err.push(Error::NoTrackingBranch { name: reference.inner.name, direction, }), Some(Err(err)) => self.err.push(Error::GetTrackingBranch { name: reference.inner.name, direction, source: Box::new(err), }), Some(Ok(name)) => match self.repo.find_reference(name.as_ref()) { Err(err) => self.err.push(Error::GetTrackingBranch { name: reference.inner.name, direction, source: Box::new(err), }), Ok(r) => { self.refs[self.idx] = r.inner.into(); return Some(()); } }, }; None } } gix-0.69.1/src/revision/spec/parse/error.rs000064400000000000000000000112331046102023000167000ustar 00000000000000use std::collections::HashSet; use gix_hash::ObjectId; use super::Error; use crate::{bstr, bstr::BString, ext::ObjectIdExt, Repository}; /// Additional information about candidates that caused ambiguity. #[derive(Debug)] pub enum CandidateInfo { /// An error occurred when looking up the object in the database. FindError { /// The reported error. source: crate::object::find::existing::Error, }, /// The candidate is an object of the given `kind`. Object { /// The kind of the object. kind: gix_object::Kind, }, /// The candidate is a tag. Tag { /// The name of the tag. name: BString, }, /// The candidate is a commit. Commit { /// The date of the commit. date: gix_date::Time, /// The subject line. title: BString, }, } impl std::fmt::Display for CandidateInfo { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { CandidateInfo::FindError { source } => write!(f, "lookup error: {source}"), CandidateInfo::Tag { name } => write!(f, "tag {name:?}"), CandidateInfo::Object { kind } => std::fmt::Display::fmt(kind, f), CandidateInfo::Commit { date, title } => { write!(f, "commit {} {title:?}", date.format(gix_date::time::format::SHORT)) } } } } impl Error { pub(crate) fn ambiguous(candidates: HashSet, prefix: gix_hash::Prefix, repo: &Repository) -> Self { #[derive(PartialOrd, Ord, Eq, PartialEq, Copy, Clone)] enum Order { Tag, Commit, Tree, Blob, Invalid, } let candidates = { let mut c: Vec<_> = candidates .into_iter() .map(|oid| { let obj = repo.find_object(oid); let order = match &obj { Err(_) => Order::Invalid, Ok(obj) => match obj.kind { gix_object::Kind::Tag => Order::Tag, gix_object::Kind::Commit => Order::Commit, gix_object::Kind::Tree => Order::Tree, gix_object::Kind::Blob => Order::Blob, }, }; (oid, obj, order) }) .collect(); c.sort_by(|lhs, rhs| lhs.2.cmp(&rhs.2).then_with(|| lhs.0.cmp(&rhs.0))); c }; Error::AmbiguousPrefix { prefix, info: candidates .into_iter() .map(|(oid, find_result, _)| { let info = match find_result { Ok(obj) => match obj.kind { gix_object::Kind::Tree | gix_object::Kind::Blob => CandidateInfo::Object { kind: obj.kind }, gix_object::Kind::Tag => { let tag = obj.to_tag_ref(); CandidateInfo::Tag { name: tag.name.into() } } gix_object::Kind::Commit => { use bstr::ByteSlice; let commit = obj.to_commit_ref(); CandidateInfo::Commit { date: commit.committer().time, title: commit.message().title.trim().into(), } } }, Err(err) => CandidateInfo::FindError { source: err }, }; (oid.attach(repo).shorten().unwrap_or_else(|_| oid.into()), info) }) .collect(), } } pub(crate) fn from_errors(errors: Vec) -> Self { assert!(!errors.is_empty()); match errors.len() { 0 => unreachable!( "BUG: cannot create something from nothing, must have recorded some errors to call from_errors()" ), 1 => errors.into_iter().next().expect("one"), _ => { let mut it = errors.into_iter().rev(); let mut recent = Error::Multi { current: Box::new(it.next().expect("at least one error")), next: None, }; for err in it { recent = Error::Multi { current: Box::new(err), next: Some(Box::new(recent)), } } recent } } } } gix-0.69.1/src/revision/spec/parse/mod.rs000064400000000000000000000043011046102023000163240ustar 00000000000000use std::collections::HashSet; use gix_hash::ObjectId; use gix_revision::spec::parse; use crate::{bstr::BStr, revision::Spec, Repository}; mod types; use crate::bstr::BString; pub use types::{Error, ObjectKindHint, Options, RefsHint}; /// pub mod single { use crate::bstr::BString; /// The error returned by [`crate::Repository::rev_parse_single()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Parse(#[from] super::Error), #[error("revspec {spec:?} did not resolve to a single object")] RangedRev { spec: BString }, } } /// pub mod error; impl<'repo> Spec<'repo> { /// Parse `spec` and use information from `repo` to resolve it, using `opts` to learn how to deal with ambiguity. /// /// Note that it's easier and to use [`repo.rev_parse()`][Repository::rev_parse()] instead. pub fn from_bstr<'a>(spec: impl Into<&'a BStr>, repo: &'repo Repository, opts: Options) -> Result { let mut delegate = Delegate::new(repo, opts); match gix_revision::spec::parse(spec.into(), &mut delegate) { Err(parse::Error::Delegate) => Err(delegate.into_err()), Err(err) => Err(err.into()), Ok(()) => delegate.into_rev_spec(), } } } struct Delegate<'repo> { refs: [Option; 2], objs: [Option>; 2], /// Path specified like `@:` or `:` for later use when looking up specs. /// Note that it terminates spec parsing, so it's either `0` or `1`, never both. paths: [Option<(BString, gix_object::tree::EntryMode)>; 2], /// The originally encountered ambiguous objects for potential later use in errors. ambiguous_objects: [Option>; 2], idx: usize, kind: Option, opts: Options, err: Vec, /// The ambiguous prefix obtained during a call to `disambiguate_prefix()`. prefix: [Option; 2], /// If true, we didn't try to do any other transformation which might have helped with disambiguation. last_call_was_disambiguate_prefix: [bool; 2], repo: &'repo Repository, } mod delegate; gix-0.69.1/src/revision/spec/parse/types.rs000064400000000000000000000205431046102023000167170ustar 00000000000000use crate::{bstr::BString, object, reference, remote}; /// A hint to know what to do if refs and object names are equal. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] pub enum RefsHint { /// This is the default, and leads to specs that look like objects identified by full hex sha and are objects to be used /// instead of similarly named references. The latter is not typical but can absolutely happen by accident. /// If the object prefix is shorter than the maximum hash length of the repository, use the reference instead, which is /// preferred as there are many valid object names like `beef` and `cafe` that are short and both valid and typical prefixes /// for objects. /// Git chooses this as default as well, even though it means that every object prefix is also looked up as ref. #[default] PreferObjectOnFullLengthHexShaUseRefOtherwise, /// No matter what, if it looks like an object prefix and has an object, use it. /// Note that no ref-lookup is made here which is the fastest option. PreferObject, /// When an object is found for a given prefix, also check if a reference exists with that name and if it does, /// use that moving forward. PreferRef, /// If there is an ambiguous situation, instead of silently choosing one over the other, fail instead. Fail, } /// A hint to know which object kind to prefer if multiple objects match a prefix. /// /// This disambiguation mechanism is applied only if there is no disambiguation hints in the spec itself. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ObjectKindHint { /// Pick objects that are commits themselves. Commit, /// Pick objects that can be peeled into a commit, i.e. commits themselves or tags which are peeled until a commit is found. Committish, /// Pick objects that are trees themselves. Tree, /// Pick objects that can be peeled into a tree, i.e. trees themselves or tags which are peeled until a tree is found or commits /// whose tree is chosen. Treeish, /// Pick objects that are blobs. Blob, } /// Options for use in [`revision::Spec::from_bstr()`][crate::revision::Spec::from_bstr()]. #[derive(Debug, Default, Copy, Clone)] pub struct Options { /// What to do if both refs and object names match the same input. pub refs_hint: RefsHint, /// The hint to use when encountering multiple object matching a prefix. /// /// If `None`, the rev-spec itself must disambiguate the object by drilling down to desired kinds or applying /// other disambiguating transformations. pub object_kind_hint: Option, } /// The error returned by [`crate::Repository::rev_parse()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("Could not peel '{}' to obtain its target", name)] PeelToId { name: gix_ref::FullName, source: reference::peel::Error, }, #[error("The rev-spec is malformed and misses a ref name")] Malformed, #[error("Unborn heads do not have a reflog yet")] UnbornHeadsHaveNoRefLog, #[error("Unborn heads cannot have push or upstream tracking branches")] UnbornHeadForSibling, #[error("Branch named {name} does not have a {} tracking branch configured", direction.as_str())] NoTrackingBranch { name: gix_ref::FullName, direction: remote::Direction, }, #[error("Error when obtaining {} tracking branch for {name}", direction.as_str())] GetTrackingBranch { name: gix_ref::FullName, direction: remote::Direction, source: Box, }, #[error("Reference {reference:?} does not have a reference log, cannot {action}")] MissingRefLog { reference: BString, action: &'static str }, #[error("HEAD has {available} prior checkouts and checkout number {desired} is out of range")] PriorCheckoutOutOfRange { desired: usize, available: usize }, #[error("Reference {:?} has {available} ref-log entries and entry number {desired} is out of range", reference.name.as_bstr())] RefLogEntryOutOfRange { reference: gix_ref::Reference, desired: usize, available: usize, }, #[error( "Commit {oid} has {available} ancestors along the first parent and ancestor number {desired} is out of range" )] AncestorOutOfRange { oid: gix_hash::Prefix, desired: usize, available: usize, }, #[error("Commit {oid} has {available} parents and parent number {desired} is out of range")] ParentOutOfRange { oid: gix_hash::Prefix, desired: usize, available: usize, }, #[error("Path {desired_path:?} did not exist in index at stage {}{}{}", *desired_stage as u8, stage_hint.map(|actual|format!(". It does exist at stage {}", actual as u8)).unwrap_or_default(), exists.then(|| ". It exists on disk").unwrap_or(". It does not exist on disk"))] IndexLookup { desired_path: BString, desired_stage: gix_index::entry::Stage, stage_hint: Option, exists: bool, }, #[error(transparent)] FindHead(#[from] reference::find::existing::Error), #[error(transparent)] Index(#[from] crate::worktree::open_index::Error), #[error(transparent)] RevWalkIterInit(#[from] crate::reference::iter::init::Error), #[error(transparent)] RevWalkAllReferences(#[from] gix_ref::packed::buffer::open::Error), #[cfg(feature = "revparse-regex")] #[error(transparent)] InvalidRegex(#[from] regex::Error), #[cfg_attr( feature = "revparse-regex", error("None of {commits_searched} commits from {oid} matched regex {regex:?}") )] #[cfg_attr( not(feature = "revparse-regex"), error("None of {commits_searched} commits from {oid} matched text {regex:?}") )] NoRegexMatch { regex: BString, oid: gix_hash::Prefix, commits_searched: usize, }, #[cfg_attr( feature = "revparse-regex", error("None of {commits_searched} commits reached from all references matched regex {regex:?}") )] #[cfg_attr( not(feature = "revparse-regex"), error("None of {commits_searched} commits reached from all references matched text {regex:?}") )] NoRegexMatchAllRefs { regex: BString, commits_searched: usize }, #[error( "The short hash {prefix} matched both the reference {} and at least one object", reference.name)] AmbiguousRefAndObject { /// The prefix to look for. prefix: gix_hash::Prefix, /// The reference matching the prefix. reference: gix_ref::Reference, }, #[error(transparent)] IdFromHex(#[from] gix_hash::decode::Error), #[error(transparent)] FindReference(#[from] gix_ref::file::find::existing::Error), #[error(transparent)] FindObject(#[from] object::find::existing::Error), #[error(transparent)] LookupPrefix(#[from] gix_odb::store::prefix::lookup::Error), #[error(transparent)] PeelToKind(#[from] object::peel::to_kind::Error), #[error("Object {oid} was a {actual}, but needed it to be a {expected}")] ObjectKind { oid: gix_hash::Prefix, actual: gix_object::Kind, expected: gix_object::Kind, }, #[error(transparent)] Parse(#[from] gix_revision::spec::parse::Error), #[error("An object prefixed {prefix} could not be found")] PrefixNotFound { prefix: gix_hash::Prefix }, #[error("Short id {prefix} is ambiguous. Candidates are:\n{}", info.iter().map(|(oid, info)| format!("\t{oid} {info}")).collect::>().join("\n"))] AmbiguousPrefix { prefix: gix_hash::Prefix, info: Vec<(gix_hash::Prefix, super::error::CandidateInfo)>, }, #[error("Could not find path {path:?} in tree {tree} of parent object {object}")] PathNotFound { object: gix_hash::Prefix, tree: gix_hash::Prefix, path: BString, }, #[error("{current}")] Multi { current: Box, #[source] next: Option>, }, #[error(transparent)] Traverse(#[from] crate::revision::walk::iter::Error), #[error(transparent)] Walk(#[from] crate::revision::walk::Error), #[error("Spec does not contain a single object id")] SingleNotFound, #[error("Reflog does not contain any entries")] EmptyReflog, } gix-0.69.1/src/revision/walk.rs000064400000000000000000000346631046102023000144550ustar 00000000000000use gix_hash::ObjectId; use gix_object::FindExt; use gix_traverse::commit::simple::CommitTimeOrder; use crate::{ext::ObjectIdExt, revision, Repository}; /// The error returned by [`Platform::all()`] and [`Platform::selected()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] SimpleTraversal(#[from] gix_traverse::commit::simple::Error), #[error(transparent)] ShallowCommits(#[from] crate::shallow::read::Error), #[error(transparent)] ConfigBoolean(#[from] crate::config::boolean::Error), } /// Specify how to sort commits during a [revision::Walk] traversal. /// /// ### Sample History /// /// The following history will be referred to for explaining how the sort order works, with the number denoting the commit timestamp /// (*their X-alignment doesn't matter*). /// /// ```text /// ---1----2----4----7 <- second parent of 8 /// \ \ /// 3----5----6----8--- /// ``` #[derive(Default, Debug, Copy, Clone)] pub enum Sorting { /// Commits are sorted as they are mentioned in the commit graph. /// /// In the *sample history* the order would be `8, 6, 7, 5, 4, 3, 2, 1` /// /// ### Note /// /// This is not to be confused with `git log/rev-list --topo-order`, which is notably different from /// as it avoids overlapping branches. #[default] BreadthFirst, /// Commits are sorted by their commit time in the order specified, either newest or oldest first. /// /// The sorting applies to all currently queued commit ids and thus is full. /// /// In the *sample history* the order would be `8, 7, 6, 5, 4, 3, 2, 1` for [`NewestFirst`](CommitTimeOrder::NewestFirst), /// or `1, 2, 3, 4, 5, 6, 7, 8` for [`OldestFirst`](CommitTimeOrder::OldestFirst). /// /// # Performance /// /// This mode benefits greatly from having an [object cache](crate::Repository::object_cache_size) configured /// to avoid having to look up each commit twice. ByCommitTime(CommitTimeOrder), /// This sorting is similar to [`ByCommitTime`](Sorting::ByCommitTimeCutoff), but adds a cutoff to not return commits older than /// a given time, stopping the iteration once no younger commits is queued to be traversed. /// /// As the query is usually repeated with different cutoff dates, this search mode benefits greatly from an object cache. /// /// In the *sample history* and a cut-off date of 4, the returned list of commits would be `8, 7, 6, 4` ByCommitTimeCutoff { /// The order in wich to prioritize lookups order: CommitTimeOrder, /// The amount of seconds since unix epoch to use as cut-off time. seconds: gix_date::SecondsSinceUnixEpoch, }, } impl Sorting { fn into_simple(self) -> Option { Some(match self { Sorting::BreadthFirst => gix_traverse::commit::simple::Sorting::BreadthFirst, Sorting::ByCommitTime(order) => gix_traverse::commit::simple::Sorting::ByCommitTime(order), Sorting::ByCommitTimeCutoff { seconds, order } => { gix_traverse::commit::simple::Sorting::ByCommitTimeCutoff { order, seconds } } }) } } /// Information about a commit that we obtained naturally as part of the iteration. #[derive(Debug, Clone)] pub struct Info<'repo> { /// The detached id of the commit. pub id: gix_hash::ObjectId, /// All parent ids we have encountered. Note that these will be at most one if [`Parents::First`][gix_traverse::commit::Parents::First] is enabled. pub parent_ids: gix_traverse::commit::ParentIds, /// The time at which the commit was created. It will only be `Some(_)` if the chosen traversal was /// taking dates into consideration. pub commit_time: Option, repo: &'repo Repository, } /// Access impl<'repo> Info<'repo> { /// Provide an attached version of our [`id`][Info::id] field. pub fn id(&self) -> crate::Id<'repo> { self.id.attach(self.repo) } /// Read the whole object from the object database. /// /// Note that this is an expensive operation which shouldn't be performed unless one needs more than parent ids /// and commit time. pub fn object(&self) -> Result, crate::object::find::existing::Error> { Ok(self.id().object()?.into_commit()) } /// Provide an iterator yielding attached versions of our [`parent_ids`][Info::parent_ids] field. pub fn parent_ids(&self) -> impl Iterator> + '_ { self.parent_ids.iter().map(|id| id.attach(self.repo)) } /// Returns the commit-time of this commit. /// /// ### Panics /// /// If the iteration wasn't ordered by date. pub fn commit_time(&self) -> gix_date::SecondsSinceUnixEpoch { self.commit_time.expect("traversal involving date caused it to be set") } } /// Initialization and detachment impl<'repo> Info<'repo> { /// Create a new instance that represents `info`, but is attached to `repo` as well. pub fn new(info: gix_traverse::commit::Info, repo: &'repo Repository) -> Self { Info { id: info.id, parent_ids: info.parent_ids, commit_time: info.commit_time, repo, } } /// Consume this instance and remove the reference to the underlying repository. /// /// This is useful for sending instances across threads, for example. pub fn detach(self) -> gix_traverse::commit::Info { gix_traverse::commit::Info { id: self.id, parent_ids: self.parent_ids, commit_time: self.commit_time, } } } /// A platform to traverse the revision graph by adding starting points as well as points which shouldn't be crossed, /// returned by [`Repository::rev_walk()`]. /// /// **Note that we automatically leverage the commitgraph data structure**, but if you know that additional information like /// author or commit messages will be required of *all* commits traversed here, it should be better to avoid trying to load it /// by [turning commit-graph support off][Platform::use_commit_graph()]. This certainly is a micro-optimization though. pub struct Platform<'repo> { /// The owning repository. pub repo: &'repo Repository, pub(crate) tips: Vec, pub(crate) prune: Vec, pub(crate) sorting: Sorting, pub(crate) parents: gix_traverse::commit::Parents, pub(crate) use_commit_graph: Option, pub(crate) commit_graph: Option, } impl<'repo> Platform<'repo> { pub(crate) fn new(tips: impl IntoIterator>, repo: &'repo Repository) -> Self { revision::walk::Platform { repo, tips: tips.into_iter().map(Into::into).collect(), sorting: Default::default(), parents: Default::default(), use_commit_graph: None, commit_graph: None, prune: Vec::new(), } } } /// Create-time builder methods impl Platform<'_> { /// Set the sort mode for commits to the given value. The default is to order topologically breadth-first. pub fn sorting(mut self, sorting: Sorting) -> Self { self.sorting = sorting; self } /// Only traverse the first parent of the commit graph. pub fn first_parent_only(mut self) -> Self { self.parents = gix_traverse::commit::Parents::First; self } /// Allow using the commitgraph, if present, if `toggle` is `true`, or disallow it with `false`. Set it to `None` to leave /// control over this to the configuration of `core.commitGraph` (the default). /// /// Errors when loading the graph lead to falling back to the object database, it's treated as optional cache. pub fn use_commit_graph(mut self, toggle: impl Into>) -> Self { self.use_commit_graph = toggle.into(); self } /// Set or unset the commit-graph to use for the iteration. This is useful if the caller wants to check if a commit-graph exists /// and refer different implementations depending on the outcome. /// /// It interacts with [`use_commit_graph`][Platform::use_commit_graph()] as one would expect, but it's worth noting that if `None`, /// with [`use_commit_graph`][Platform::use_commit_graph()] being `true`, a graph will still be used for iteration. /// To turn the commit-graph off, call [`use_commit_graph(false)`][Platform::use_commit_graph()] instead. pub fn with_commit_graph(mut self, graph: Option) -> Self { self.commit_graph = graph; self } /// Prune the commit with the given `ids` such that they won't be returned, and such that none of their ancestors is returned either. /// /// Note that this forces the [sorting](Self::sorting) to /// [`ByCommitTimeCutoff`](Sorting::ByCommitTimeCutoff) configured with /// the oldest available commit time, ensuring that no commits older than the oldest of `ids` will be returned either. /// /// Also note that commits that can't be accessed or are missing are simply ignored for the purpose of obtaining the cutoff date. #[doc(alias = "hide", alias = "git2")] pub fn with_pruned(mut self, ids: impl IntoIterator>) -> Self { let (mut cutoff, order) = match self.sorting { Sorting::ByCommitTimeCutoff { seconds, order } => (Some(seconds), order), Sorting::ByCommitTime(order) => (None, order), Sorting::BreadthFirst => (None, CommitTimeOrder::default()), }; for id in ids.into_iter() { let id = id.into(); if !self.prune.contains(&id) { if let Some(time) = self.repo.find_commit(id).ok().and_then(|c| c.time().ok()) { if cutoff.is_none() || cutoff > Some(time.seconds) { cutoff = time.seconds.into(); } } self.prune.push(id); } } if let Some(cutoff) = cutoff { self.sorting = Sorting::ByCommitTimeCutoff { seconds: cutoff, order } } self } } /// Produce the iterator impl<'repo> Platform<'repo> { /// For each commit, let `filter` return `true` if it and its parents should be included in the traversal, or `false` /// if the traversal should exclude it and its ancestry entirely. /// /// If `filter` is None, no pruning of the graph will be performed which is the default. pub fn selected( self, mut filter: impl FnMut(&gix_hash::oid) -> bool + 'repo, ) -> Result, Error> { let Platform { repo, tips, sorting, parents, use_commit_graph, commit_graph, mut prune, } = self; prune.sort(); Ok(revision::Walk { repo, inner: Box::new( gix_traverse::commit::Simple::filtered(tips, &repo.objects, { // Note that specific shallow handling for commit-graphs isn't needed as these contain // all information there is, and exclude shallow parents to be structurally consistent. let shallow_commits = repo.shallow_commits()?; let mut grafted_parents_to_skip = Vec::new(); let mut buf = Vec::new(); move |id| { if !filter(id) { return false; } let id = id.to_owned(); if prune.binary_search(&id).is_ok() { return false; } match shallow_commits.as_ref() { Some(commits) => { if let Ok(idx) = grafted_parents_to_skip.binary_search(&id) { grafted_parents_to_skip.remove(idx); return false; }; if commits.binary_search(&id).is_ok() { if let Ok(commit) = repo.objects.find_commit_iter(&id, &mut buf) { grafted_parents_to_skip.extend(commit.parent_ids()); grafted_parents_to_skip.sort(); } }; true } None => true, } } }) .sorting(sorting.into_simple().expect("for now there is nothing else"))? .parents(parents) .commit_graph( commit_graph.or(use_commit_graph .map_or_else(|| self.repo.config.may_use_commit_graph(), Ok)? .then(|| self.repo.commit_graph().ok()) .flatten()), ) .map(|res| res.map_err(iter::Error::from)), ), }) } /// Return an iterator to traverse all commits reachable as configured by the [Platform]. /// /// # Performance /// /// It's highly recommended to set an [`object cache`](Repository::object_cache_size()) on the parent repo /// to greatly speed up performance if the returned id is supposed to be looked up right after. pub fn all(self) -> Result, Error> { self.selected(|_| true) } } /// pub mod iter { /// The error returned by the [Walk](crate::revision::Walk) iterator. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] SimpleTraversal(#[from] gix_traverse::commit::simple::Error), } } pub(crate) mod iter_impl { /// The iterator returned by [`crate::revision::walk::Platform::all()`]. pub struct Walk<'repo> { /// The owning repository. pub repo: &'repo crate::Repository, pub(crate) inner: Box> + 'repo>, } impl<'repo> Iterator for Walk<'repo> { type Item = Result, super::iter::Error>; fn next(&mut self) -> Option { self.inner .next() .map(|res| res.map(|info| super::Info::new(info, self.repo))) } } } gix-0.69.1/src/shallow.rs000064400000000000000000000007501046102023000133200ustar 00000000000000pub(crate) type CommitsStorage = gix_features::threading::OwnShared>>; /// A lazily loaded and auto-updated list of commits which are at the shallow boundary (behind which there are no commits available), /// sorted to allow bisecting. pub type Commits = gix_fs::SharedFileSnapshot>; /// pub mod read { pub use gix_shallow::read::Error; } /// pub mod write { pub use gix_shallow::write::Error; } gix-0.69.1/src/state.rs000064400000000000000000000013231046102023000127640ustar 00000000000000/// Tell what operation is currently in progress. #[derive(Debug, PartialEq, Eq)] pub enum InProgress { /// A mailbox is being applied. ApplyMailbox, /// A rebase is happening while a mailbox is being applied. // TODO: test ApplyMailboxRebase, /// A git bisect operation has not yet been concluded. Bisect, /// A cherry pick operation. CherryPick, /// A cherry pick with multiple commits pending. CherryPickSequence, /// A merge operation. Merge, /// A rebase operation. Rebase, /// An interactive rebase operation. RebaseInteractive, /// A revert operation. Revert, /// A revert operation with multiple commits pending. RevertSequence, } gix-0.69.1/src/status/index_worktree.rs000064400000000000000000001112051046102023000162210ustar 00000000000000use crate::bstr::{BStr, BString}; use crate::{config, Repository}; use gix_status::index_as_worktree::traits::{CompareBlobs, SubmoduleStatus}; use std::sync::atomic::AtomicBool; /// The error returned by [Repository::index_worktree_status()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error("A working tree is required to perform a directory walk")] MissingWorkDir, #[error(transparent)] AttributesAndExcludes(#[from] crate::repository::attributes::Error), #[error(transparent)] Pathspec(#[from] crate::pathspec::init::Error), #[error(transparent)] Prefix(#[from] gix_path::realpath::Error), #[error(transparent)] FilesystemOptions(#[from] config::boolean::Error), #[error(transparent)] IndexAsWorktreeWithRenames(#[from] gix_status::index_as_worktree_with_renames::Error), #[error(transparent)] StatOptions(#[from] config::stat_options::Error), #[error(transparent)] ResourceCache(#[from] crate::diff::resource_cache::Error), } /// Options for use with [Repository::index_worktree_status()]. #[derive(Default, Debug, Clone, Copy, PartialEq)] pub struct Options { /// The way all output should be sorted. /// /// If `None`, and depending on the `rewrites` field, output will be immediate but the output order /// isn't determined, and may differ between two runs. `rewrites` also depend on the order of entries that /// are presented to it, hence for deterministic results, sorting needs to be enabled. /// /// If `Some(_)`, all entries are collected beforehand, so they can be sorted before outputting any of them /// to the user. /// /// If immediate output of entries in any order is desired, this should be `None`, /// along with `rewrites` being `None` as well. pub sorting: Option, /// If not `None`, the options to configure the directory walk, determining how its results will look like. /// /// If `None`, only modification checks are performed. /// /// Can be instantiated with [Repository::dirwalk_options()]. pub dirwalk_options: Option, /// If `Some(_)`, along with `Some(_)` in `dirwalk_options`, rewrite tracking will be performed between the /// index and the working tree. /// Note that there is no git-configuration specific to index-worktree rename tracking. /// When rewrite tracking is enabled, there will be a delay for some entries as they partake in the rename-analysis. pub rewrites: Option, /// If set, don't use more than this amount of threads for the tracked modification check. /// Otherwise, usually use as many threads as there are logical cores. /// A value of 0 is interpreted as no-limit pub thread_limit: Option, } impl Repository { /// Obtain the status between the index and the worktree, involving modification checks /// for all tracked files along with information about untracked (and posisbly ignored) files (if configured). /// /// * `index` /// - The index to use for modification checks, and to know which files are tacked when applying the dirwalk. /// * `patterns` /// - Optional patterns to use to limit the paths to look at. If empty, all paths are considered. /// * `delegate` /// - The sink for receiving all status data. /// * `compare` /// - The implementations for fine-grained control over what happens if a hash must be recalculated. /// * `submodule` /// - Control what kind of information to retrieve when a submodule is encountered while traversing the index. /// * `progress` /// - A progress indication for index modification checks. /// * `should_interrupt` /// - A flag to stop the whole operation. /// * `options` /// - Additional configuration for all parts of the operation. /// /// ### Note /// /// This is a lower-level method, prefer the [`status`](Repository::status()) method for greater ease of use. #[allow(clippy::too_many_arguments)] pub fn index_worktree_status<'index, T, U, E>( &self, index: &'index gix_index::State, patterns: impl IntoIterator>, delegate: &mut impl gix_status::index_as_worktree_with_renames::VisitEntry< 'index, ContentChange = T, SubmoduleStatus = U, >, compare: impl CompareBlobs + Send + Clone, submodule: impl SubmoduleStatus + Send + Clone, progress: &mut dyn gix_features::progress::Progress, should_interrupt: &AtomicBool, options: Options, ) -> Result where T: Send + Clone, U: Send + Clone, E: std::error::Error + Send + Sync + 'static, { let _span = gix_trace::coarse!("gix::index_worktree_status"); let workdir = self.work_dir().ok_or(Error::MissingWorkDir)?; let attrs_and_excludes = self.attributes( index, crate::worktree::stack::state::attributes::Source::WorktreeThenIdMapping, crate::worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped, None, )?; let pathspec = crate::Pathspec::new( self, options .dirwalk_options .as_ref() .map_or(false, |opts| opts.empty_patterns_match_prefix), patterns, true, /* inherit ignore case */ || Ok(attrs_and_excludes.clone()), )?; let cwd = self.current_dir(); let git_dir_realpath = crate::path::realpath_opts(self.git_dir(), cwd, crate::path::realpath::MAX_SYMLINKS)?; let fs_caps = self.filesystem_options()?; let accelerate_lookup = fs_caps.ignore_case.then(|| index.prepare_icase_backing()); let resource_cache = crate::diff::resource_cache( self, gix_diff::blob::pipeline::Mode::ToGit, attrs_and_excludes.inner, gix_diff::blob::pipeline::WorktreeRoots { old_root: None, new_root: Some(workdir.to_owned()), }, )?; let out = gix_status::index_as_worktree_with_renames( index, workdir, delegate, compare, submodule, self.objects.clone().into_arc().expect("arc conversion always works"), progress, gix_status::index_as_worktree_with_renames::Context { pathspec: pathspec.search, resource_cache, should_interrupt, dirwalk: gix_status::index_as_worktree_with_renames::DirwalkContext { git_dir_realpath: git_dir_realpath.as_path(), current_dir: cwd, ignore_case_index_lookup: accelerate_lookup.as_ref(), }, }, gix_status::index_as_worktree_with_renames::Options { sorting: options.sorting, object_hash: self.object_hash(), tracked_file_modifications: gix_status::index_as_worktree::Options { fs: fs_caps, thread_limit: options.thread_limit, stat: self.stat_options()?, }, dirwalk: options.dirwalk_options.map(Into::into), rewrites: options.rewrites, }, )?; Ok(out) } } /// An implementation of a trait to use with [`Repository::index_worktree_status()`] to compute the submodule status /// using [Submodule::status()](crate::Submodule::status()). #[derive(Clone)] pub struct BuiltinSubmoduleStatus { mode: crate::status::Submodule, #[cfg(feature = "parallel")] repo: crate::ThreadSafeRepository, #[cfg(not(feature = "parallel"))] git_dir: std::path::PathBuf, submodule_paths: Vec, } /// mod submodule_status { use crate::bstr; use crate::bstr::BStr; use crate::status::index_worktree::BuiltinSubmoduleStatus; use crate::status::Submodule; use std::borrow::Cow; impl BuiltinSubmoduleStatus { /// Create a new instance from a `repo` and a `mode` to control how the submodule status will be obtained. pub fn new( repo: crate::ThreadSafeRepository, mode: Submodule, ) -> Result { let local_repo = repo.to_thread_local(); let submodule_paths = match local_repo.submodules()? { Some(sm) => { let mut v: Vec<_> = sm.filter_map(|sm| sm.path().ok().map(Cow::into_owned)).collect(); v.sort(); v } None => Vec::new(), }; Ok(Self { mode, #[cfg(feature = "parallel")] repo, #[cfg(not(feature = "parallel"))] git_dir: local_repo.git_dir().to_owned(), submodule_paths, }) } } /// The error returned submodule status checks. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] SubmoduleStatus(#[from] crate::submodule::status::Error), #[error(transparent)] IgnoreConfig(#[from] crate::submodule::config::Error), } impl gix_status::index_as_worktree::traits::SubmoduleStatus for BuiltinSubmoduleStatus { type Output = crate::submodule::Status; type Error = Error; fn status(&mut self, _entry: &gix_index::Entry, rela_path: &BStr) -> Result, Self::Error> { use bstr::ByteSlice; if self .submodule_paths .binary_search_by(|path| path.as_bstr().cmp(rela_path)) .is_err() { return Ok(None); } #[cfg(feature = "parallel")] let repo = self.repo.to_thread_local(); #[cfg(not(feature = "parallel"))] let Ok(repo) = crate::open(&self.git_dir) else { return Ok(None); }; let Ok(Some(mut submodules)) = repo.submodules() else { return Ok(None); }; let Some(sm) = submodules.find(|sm| sm.path().map_or(false, |path| path == rela_path)) else { return Ok(None); }; let (ignore, check_dirty) = match self.mode { Submodule::AsConfigured { check_dirty } => (sm.ignore()?.unwrap_or_default(), check_dirty), Submodule::Given { ignore, check_dirty } => (ignore, check_dirty), }; let status = sm.status(ignore, check_dirty)?; Ok(status.is_dirty().and_then(|dirty| dirty.then_some(status))) } } } /// An iterator for changes between the index and the worktree. /// /// Note that depending on the underlying configuration, there might be a significant delay until the first /// item is received due to the buffering necessary to perform rename tracking and/or sorting. /// /// ### Submodules /// /// Note that submodules can be set to 'inactive', which will not exclude them from the status operation, similar to /// how `git status` includes them. /// /// ### Index Changes /// /// Changes to the index are collected and it's possible to write the index back using [iter::Outcome::write_changes()]. /// Note that these changes are not observable, they will always be kept. /// /// ### Parallel Operation /// /// Note that without the `parallel` feature, the iterator becomes 'serial', which means all status will be computed in advance /// and it's non-interruptible, yielding worse performance for is-dirty checks for instance as interruptions won't happen. /// It's a crutch that is just there to make single-threaded applications possible at all, as it's not really an iterator /// anymore. If this matters, better run [Repository::index_worktree_status()] by hand as it provides all control one would need, /// just not as an iterator. /// /// Also, even with `parallel` set, the first call to `next()` will block until there is an item available, without a chance /// to interrupt unless [`status::Platform::should_interrupt_*()`](crate::status::Platform::should_interrupt_shared()) was /// configured. pub struct Iter { #[cfg(feature = "parallel")] #[allow(clippy::type_complexity)] rx_and_join: Option<( std::sync::mpsc::Receiver, std::thread::JoinHandle>, )>, #[cfg(feature = "parallel")] should_interrupt: crate::status::OwnedOrStaticAtomicBool, /// Without parallelization, the iterator has to buffer all changes in advance. #[cfg(not(feature = "parallel"))] items: std::vec::IntoIter, /// The outcome of the operation, only available once the operation has ended. out: Option, /// The set of `(entry_index, change)` we extracted in order to potentially write back the index with the changes applied. changes: Vec<(usize, iter::ApplyChange)>, } /// pub mod iter { use crate::bstr::{BStr, BString}; use crate::config::cache::util::ApplyLeniencyDefault; use crate::status::index_worktree::{iter, BuiltinSubmoduleStatus}; use crate::status::{index_worktree, Platform}; use crate::worktree::IndexPersistedOrInMemory; use gix_status::index_as_worktree::{Change, EntryStatus}; pub use gix_status::index_as_worktree_with_renames::Summary; pub(super) enum ApplyChange { SetSizeToZero, NewStat(crate::index::entry::Stat), } /// The data the thread sends over to the receiving iterator. pub struct Outcome { /// The outcome of the index-to-worktree comparison operation. pub index_worktree: gix_status::index_as_worktree_with_renames::Outcome, /// The index that was used for the operation. pub index: crate::worktree::IndexPersistedOrInMemory, skip_hash: bool, changes: Option>, } impl Outcome { /// Returns `true` if the index has received currently unapplied changes that *should* be written back. /// /// If they are not written back, subsequent `status` operations will take longer to complete, whereas the /// additional work can be prevented by writing the changes back to the index. pub fn has_changes(&self) -> bool { self.changes.as_ref().map_or(false, |changes| !changes.is_empty()) } /// Write the changes if there are any back to the index file. /// This can only be done once as the changes are consumed in the process, if there were any. pub fn write_changes(&mut self) -> Option> { let _span = gix_features::trace::coarse!("gix::status::index_worktree::iter::Outcome::write_changes()"); let changes = self.changes.take()?; let mut index = match &self.index { IndexPersistedOrInMemory::Persisted(persisted) => (***persisted).clone(), IndexPersistedOrInMemory::InMemory(index) => index.clone(), }; let entries = index.entries_mut(); for (entry_index, change) in changes { let entry = &mut entries[entry_index]; match change { ApplyChange::SetSizeToZero => { entry.stat.size = 0; } ApplyChange::NewStat(new_stat) => { entry.stat = new_stat; } } } Some(index.write(crate::index::write::Options { extensions: Default::default(), skip_hash: self.skip_hash, })) } } /// Either an index entry for renames or another directory entry in case of copies. #[derive(Clone, PartialEq, Debug)] pub enum RewriteSource { /// The source originates in the index and is detected as missing in the working tree. /// This can also happen for copies. RewriteFromIndex { /// The entry that is the source of the rewrite, which means it was removed on disk, /// equivalent to [Change::Removed]. /// /// Note that the [entry-id](gix_index::Entry::id) is the content-id of the source of the rewrite. source_entry: gix_index::Entry, /// The index of the `source_entry` for lookup in [`gix_index::State::entries()`] - useful to look at neighbors. source_entry_index: usize, /// The repository-relative path of the `source_entry`. source_rela_path: BString, /// The computed status of the `source_entry`. source_status: gix_status::index_as_worktree::EntryStatus<(), crate::submodule::Status>, }, /// This source originates in the directory tree and is always the source of copies. CopyFromDirectoryEntry { /// The source of the copy operation, which is also an entry of the directory walk. /// /// Note that its [`rela_path`](gix_dir::EntryRef::rela_path) is the source of the rewrite. source_dirwalk_entry: gix_dir::Entry, /// `collapsed_directory_status` is `Some(dir_status)` if this `source_dirwalk_entry` was part of a directory with the given /// `dir_status` that wasn't the same as the one of `source_dirwalk_entry` and /// if [gix_dir::walk::Options::emit_collapsed] was [CollapsedEntriesEmissionMode::OnStatusMismatch](gix_dir::walk::CollapsedEntriesEmissionMode::OnStatusMismatch). /// It will also be `Some(dir_status)` if that option was [CollapsedEntriesEmissionMode::All](gix_dir::walk::CollapsedEntriesEmissionMode::All). source_dirwalk_entry_collapsed_directory_status: Option, /// The object id as it would appear if the entry was written to the object database. /// It's the same as [`dirwalk_entry_id`](Item::Rewrite), or `diff` is `Some(_)` to indicate that the copy /// was determined by similarity, not by content equality. source_dirwalk_entry_id: gix_hash::ObjectId, }, } /// Access impl RewriteSource { /// The repository-relative path of this source. pub fn rela_path(&self) -> &BStr { match self { RewriteSource::RewriteFromIndex { source_rela_path, .. } => source_rela_path.as_ref(), RewriteSource::CopyFromDirectoryEntry { source_dirwalk_entry, .. } => source_dirwalk_entry.rela_path.as_ref(), } } } impl<'index> From> for RewriteSource { fn from(value: gix_status::index_as_worktree_with_renames::RewriteSource<'index, (), SubmoduleStatus>) -> Self { match value { gix_status::index_as_worktree_with_renames::RewriteSource::RewriteFromIndex { index_entries: _, source_entry, source_entry_index, source_rela_path, source_status, } => RewriteSource::RewriteFromIndex { source_entry: source_entry.clone(), source_entry_index, source_rela_path: source_rela_path.to_owned(), source_status, }, gix_status::index_as_worktree_with_renames::RewriteSource::CopyFromDirectoryEntry { source_dirwalk_entry, source_dirwalk_entry_collapsed_directory_status, source_dirwalk_entry_id, } => RewriteSource::CopyFromDirectoryEntry { source_dirwalk_entry, source_dirwalk_entry_collapsed_directory_status, source_dirwalk_entry_id, }, } } } /// The item produced by the iterator #[derive(Clone, PartialEq, Debug)] pub enum Item { /// A tracked file was modified, and index-specific information is passed. Modification { /// The entry with modifications. entry: gix_index::Entry, /// The index of the `entry` for lookup in [`gix_index::State::entries()`] - useful to look at neighbors. entry_index: usize, /// The repository-relative path of the entry. rela_path: BString, /// The computed status of the entry. status: gix_status::index_as_worktree::EntryStatus<(), SubmoduleStatus>, }, /// An entry returned by the directory walk, without any relation to the index. /// /// This can happen if ignored files are returned as well, or if rename-tracking is disabled. DirectoryContents { /// The entry found during the disk traversal. entry: gix_dir::Entry, /// `collapsed_directory_status` is `Some(dir_status)` if this `entry` was part of a directory with the given /// `dir_status` that wasn't the same as the one of `entry` and if [gix_dir::walk::Options::emit_collapsed] was /// [CollapsedEntriesEmissionMode::OnStatusMismatch](gix_dir::walk::CollapsedEntriesEmissionMode::OnStatusMismatch). /// It will also be `Some(dir_status)` if that option was [CollapsedEntriesEmissionMode::All](gix_dir::walk::CollapsedEntriesEmissionMode::All). collapsed_directory_status: Option, }, /// The rewrite tracking discovered a match between a deleted and added file, and considers them equal enough, /// depending on the tracker settings. /// /// Note that the source of the rewrite is always the index as it detects the absence of entries, something that /// can't be done during a directory walk. Rewrite { /// The source of the rewrite operation. source: RewriteSource, /// The untracked entry found during the disk traversal, the destination of the rewrite. /// /// Note that its [`rela_path`](gix_dir::EntryRef::rela_path) is the destination of the rewrite, and the current /// location of the entry. dirwalk_entry: gix_dir::Entry, /// `collapsed_directory_status` is `Some(dir_status)` if this `dirwalk_entry` was part of a directory with the given /// `dir_status` that wasn't the same as the one of `dirwalk_entry` and if [gix_dir::walk::Options::emit_collapsed] was /// [CollapsedEntriesEmissionMode::OnStatusMismatch](gix_dir::walk::CollapsedEntriesEmissionMode::OnStatusMismatch). /// It will also be `Some(dir_status)` if that option was [CollapsedEntriesEmissionMode::All](gix_dir::walk::CollapsedEntriesEmissionMode::All). dirwalk_entry_collapsed_directory_status: Option, /// The object id after the rename, specifically hashed in order to determine equality. dirwalk_entry_id: gix_hash::ObjectId, /// It's `None` if the 'source.id' is equal to `dirwalk_entry_id`, as identity made an actual diff computation unnecessary. /// Otherwise, and if enabled, it's `Some(stats)` to indicate how similar both entries were. diff: Option, /// If true, this rewrite is created by copy, and 'source.id' is pointing to its source. /// Otherwise, it's a rename, and 'source.id' points to a deleted object, /// as renames are tracked as deletions and additions of the same or similar content. copy: bool, }, } impl Item { /// Return a simplified summary of the item as digest of its status, or `None` if this item is /// created from the directory walk and is *not untracked*, or if it is merely to communicate /// a needed update to the index entry. pub fn summary(&self) -> Option { use gix_status::index_as_worktree_with_renames::Summary::*; Some(match self { Item::Modification { status, .. } => match status { EntryStatus::Conflict(_) => Conflict, EntryStatus::Change(change) => match change { Change::Removed => Removed, Change::Type => TypeChange, Change::Modification { .. } | Change::SubmoduleModification(_) => Modified, }, EntryStatus::NeedsUpdate(_) => return None, EntryStatus::IntentToAdd => IntentToAdd, }, Item::DirectoryContents { entry, .. } => { if matches!(entry.status, gix_dir::entry::Status::Untracked) { Added } else { return None; } } Item::Rewrite { copy, .. } => { if *copy { Copied } else { Renamed } } }) } /// The repository-relative path of the entry contained in this item. pub fn rela_path(&self) -> &BStr { match self { Item::Modification { rela_path, .. } => rela_path.as_ref(), Item::DirectoryContents { entry, .. } => entry.rela_path.as_ref(), Item::Rewrite { dirwalk_entry, .. } => dirwalk_entry.rela_path.as_ref(), } } } impl<'index> From> for Item { fn from(value: gix_status::index_as_worktree_with_renames::Entry<'index, (), SubmoduleStatus>) -> Self { match value { gix_status::index_as_worktree_with_renames::Entry::Modification { entries: _, entry, entry_index, rela_path, status, } => Item::Modification { entry: entry.clone(), entry_index, rela_path: rela_path.to_owned(), status, }, gix_status::index_as_worktree_with_renames::Entry::DirectoryContents { entry, collapsed_directory_status, } => Item::DirectoryContents { entry, collapsed_directory_status, }, gix_status::index_as_worktree_with_renames::Entry::Rewrite { source, dirwalk_entry, dirwalk_entry_collapsed_directory_status, dirwalk_entry_id, diff, copy, } => Item::Rewrite { source: source.into(), dirwalk_entry, dirwalk_entry_collapsed_directory_status, dirwalk_entry_id, diff, copy, }, } } } type SubmoduleStatus = crate::submodule::Status; /// The error returned by [Platform::into_index_worktree_iter()](crate::status::Platform::into_index_worktree_iter()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Index(#[from] crate::worktree::open_index::Error), #[error("Failed to spawn producer thread")] #[cfg(feature = "parallel")] SpawnThread(#[source] std::io::Error), #[error(transparent)] #[cfg(not(feature = "parallel"))] IndexWorktreeStatus(#[from] crate::status::index_worktree::Error), #[error(transparent)] ConfigSkipHash(#[from] crate::config::boolean::Error), #[error(transparent)] PrepareSubmodules(#[from] crate::submodule::modules::Error), } /// Lifecycle impl Platform<'_, Progress> where Progress: gix_features::progress::Progress, { /// Turn the platform into an iterator for changes between the index and the working tree. /// /// * `patterns` /// - Optional patterns to use to limit the paths to look at. If empty, all paths are considered. #[doc(alias = "diff_index_to_workdir", alias = "git2")] pub fn into_index_worktree_iter( self, patterns: impl IntoIterator, ) -> Result { let index = match self.index { None => IndexPersistedOrInMemory::Persisted(self.repo.index_or_empty()?), Some(index) => index, }; let skip_hash = self .repo .config .resolved .boolean(crate::config::tree::Index::SKIP_HASH) .map(|res| crate::config::tree::Index::SKIP_HASH.enrich_error(res)) .transpose() .with_lenient_default(self.repo.config.lenient_config)? .unwrap_or_default(); let should_interrupt = self.should_interrupt.clone().unwrap_or_default(); let submodule = BuiltinSubmoduleStatus::new(self.repo.clone().into_sync(), self.submodules)?; #[cfg(feature = "parallel")] { let (tx, rx) = std::sync::mpsc::channel(); let mut collect = Collect { tx }; let patterns: Vec<_> = patterns.into_iter().collect(); let join = std::thread::Builder::new() .name("gix::status::index_worktree::iter::producer".into()) .spawn({ let repo = self.repo.clone().into_sync(); let options = self.index_worktree_options; let should_interrupt = should_interrupt.clone(); let mut progress = self.progress; move || -> Result<_, crate::status::index_worktree::Error> { let repo = repo.to_thread_local(); let out = repo.index_worktree_status( &index, patterns, &mut collect, gix_status::index_as_worktree::traits::FastEq, submodule, &mut progress, &should_interrupt, options, )?; Ok(Outcome { index_worktree: out, index, changes: None, skip_hash, }) } }) .map_err(Error::SpawnThread)?; Ok(super::Iter { rx_and_join: Some((rx, join)), should_interrupt, changes: Vec::new(), out: None, }) } #[cfg(not(feature = "parallel"))] { let mut collect = Collect { items: Vec::new() }; let repo = self.repo.clone().into_sync(); let options = self.index_worktree_options; let mut progress = self.progress; let repo = repo.to_thread_local(); let out = repo.index_worktree_status( &index, patterns, &mut collect, gix_status::index_as_worktree::traits::FastEq, submodule, &mut progress, &should_interrupt, options, )?; let mut out = Outcome { index_worktree: out, index, changes: None, skip_hash, }; let mut iter = super::Iter { items: Vec::new().into_iter(), changes: Vec::new(), out: None, }; let items = collect .items .into_iter() .filter_map(|item| iter.maybe_keep_index_change(item)) .collect::>(); out.changes = (!iter.changes.is_empty()).then(|| std::mem::take(&mut iter.changes)); iter.items = items.into_iter(); iter.out = Some(out); Ok(iter) } } } impl Iterator for super::Iter { type Item = Result; fn next(&mut self) -> Option { #[cfg(feature = "parallel")] loop { let (rx, _join) = self.rx_and_join.as_ref()?; match rx.recv().ok() { Some(item) => { if let Some(item) = self.maybe_keep_index_change(item) { break Some(Ok(item)); } continue; } None => { let (_rx, handle) = self.rx_and_join.take()?; break match handle.join().expect("no panic") { Ok(mut out) => { out.changes = Some(std::mem::take(&mut self.changes)); self.out = Some(out); None } Err(err) => Some(Err(err)), }; } } } #[cfg(not(feature = "parallel"))] self.items.next().map(Ok) } } /// Access impl super::Iter { /// Return the outcome of the iteration, or `None` if the iterator isn't fully consumed. pub fn outcome_mut(&mut self) -> Option<&mut Outcome> { self.out.as_mut() } /// Turn the iterator into the iteration outcome, which is `None` on error or if the iteration /// isn't complete. pub fn into_outcome(mut self) -> Option { self.out.take() } } impl super::Iter { fn maybe_keep_index_change(&mut self, item: Item) -> Option { let change = match item { Item::Modification { status: gix_status::index_as_worktree::EntryStatus::NeedsUpdate(stat), entry_index, .. } => (entry_index, ApplyChange::NewStat(stat)), Item::Modification { status: gix_status::index_as_worktree::EntryStatus::Change( gix_status::index_as_worktree::Change::Modification { set_entry_stat_size_zero, .. }, ), entry_index, .. } if set_entry_stat_size_zero => (entry_index, ApplyChange::SetSizeToZero), _ => return Some(item), }; self.changes.push(change); None } } #[cfg(feature = "parallel")] impl Drop for super::Iter { fn drop(&mut self) { crate::util::parallel_iter_drop(self.rx_and_join.take(), &self.should_interrupt); } } struct Collect { #[cfg(feature = "parallel")] tx: std::sync::mpsc::Sender, #[cfg(not(feature = "parallel"))] items: Vec, } impl<'index> gix_status::index_as_worktree_with_renames::VisitEntry<'index> for Collect { type ContentChange = ::Output; type SubmoduleStatus = ::Output; fn visit_entry( &mut self, entry: gix_status::index_as_worktree_with_renames::Entry< 'index, Self::ContentChange, Self::SubmoduleStatus, >, ) { // NOTE: we assume that the receiver triggers interruption so the operation will stop if the receiver is down. #[cfg(feature = "parallel")] self.tx.send(entry.into()).ok(); #[cfg(not(feature = "parallel"))] self.items.push(entry.into()); } } } gix-0.69.1/src/status/mod.rs000064400000000000000000000153741046102023000137610ustar 00000000000000use crate::config::cache::util::ApplyLeniencyDefault; use crate::util::OwnedOrStaticAtomicBool; use crate::{config, Repository}; pub use gix_status as plumbing; /// A structure to hold options configuring the status request, which can then be turned into an iterator. pub struct Platform<'repo, Progress> where Progress: gix_features::progress::Progress + 'static, { repo: &'repo Repository, progress: Progress, index: Option, submodules: Submodule, index_worktree_options: index_worktree::Options, should_interrupt: Option, } /// How to obtain a submodule's status. #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub enum Submodule { /// Use the ['ignore' value](crate::Submodule::ignore) to determine which submodules /// participate in the status query, and to which extent. AsConfigured { /// If `true`, default `false`, the computation will stop once the first in a ladder operations /// ordered from cheap to expensive shows that the submodule is dirty. /// Thus, submodules that are clean will still impose the complete set of computation, as configured. check_dirty: bool, }, /// Instead of the configuration, use the given ['ignore' value](crate::submodule::config::Ignore). /// This makes it possible to fine-tune the amount of work invested in this status, while allowing /// to turn off all submodule status information. Given { /// The portion of the submodule status to ignore. ignore: crate::submodule::config::Ignore, /// If `true`, default `false`, the computation will stop once the first in a ladder operations /// ordered from cheap to expensive shows that the submodule is dirty. /// Thus, submodules that are clean will still impose the complete set of computation, as given. check_dirty: bool, }, } /// How untracked files should be handled. #[derive(Default, Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub enum UntrackedFiles { /// Do not show any untracked files. /// /// This can mean no directory walk is performed. None, /// If possible, collapse files into their parent folders to reduce the amount of /// emitted untracked files. #[default] Collapsed, /// Show each individual untracked file or directory (if empty directories are emitted) that the dirwalk encountered . Files, } impl Default for Submodule { fn default() -> Self { Submodule::AsConfigured { check_dirty: false } } } /// The error returned by [status()](Repository::status). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] DirwalkOptions(#[from] config::boolean::Error), #[error(transparent)] ConfigureUntrackedFiles(#[from] config::key::GenericErrorWithValue), } /// Status impl Repository { /// Obtain a platform for configuring iterators for traversing git repository status information. /// /// By default, this is set to the fastest and most immediate way of obtaining a status, /// which is most similar to /// /// `git status --ignored=no` /// /// which implies that submodule information is provided by default. /// /// Note that `status.showUntrackedFiles` is respected, which leads to untracked files being /// collapsed by default. If that needs to be controlled, /// [configure the directory walk explicitly](Platform::dirwalk_options) or more [implicitly](Platform::untracked_files). /// /// Pass `progress` to receive progress information on file modifications on this repository. /// Use [`progress::Discard`](crate::progress::Discard) to discard all progress information. /// /// ### Deviation /// /// Whereas Git runs the index-modified check before the directory walk to set entries /// as up-to-date to (potentially) safe some disk-access, we run both in parallel which /// ultimately is much faster. pub fn status

(&self, progress: P) -> Result, Error> where P: gix_features::progress::Progress + 'static, { let platform = Platform { repo: self, progress, index: None, submodules: Submodule::default(), should_interrupt: None, index_worktree_options: index_worktree::Options { sorting: None, dirwalk_options: Some(self.dirwalk_options()?), rewrites: None, thread_limit: None, }, }; let untracked = self .config .resolved .string(config::tree::Status::SHOW_UNTRACKED_FILES) .map(|value| { config::tree::Status::SHOW_UNTRACKED_FILES .try_into_show_untracked_files(value) .with_lenient_default(self.config.lenient_config) }) .transpose()? .unwrap_or_default(); Ok(platform.untracked_files(untracked)) } } /// pub mod is_dirty { use crate::Repository; /// The error returned by [Repository::is_dirty()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] StatusPlatform(#[from] crate::status::Error), #[error(transparent)] CreateStatusIterator(#[from] crate::status::index_worktree::iter::Error), } impl Repository { /// Returns `true` if the repository is dirty. /// This means it's changed in one of the following ways: /// /// * the index was changed in comparison to its working tree /// * the working tree was changed in comparison to the index /// * submodules are taken in consideration, along with their `ignore` and `isActive` configuration /// /// Note that *untracked files* do *not* affect this flag. /// /// ### Incomplete Implementation Warning /// /// Currently, this does not compute changes between the head and the index. // TODO: use iterator which also tests for head->index changes. pub fn is_dirty(&self) -> Result { let is_dirty = self .status(gix_features::progress::Discard)? .index_worktree_rewrites(None) .index_worktree_submodules(crate::status::Submodule::AsConfigured { check_dirty: true }) .index_worktree_options_mut(|opts| { opts.dirwalk_options = None; }) .into_index_worktree_iter(Vec::new())? .take_while(Result::is_ok) .next() .is_some(); Ok(is_dirty) } } } mod platform; /// pub mod index_worktree; gix-0.69.1/src/status/platform.rs000064400000000000000000000121761046102023000150230ustar 00000000000000use crate::status::{index_worktree, OwnedOrStaticAtomicBool, Platform, Submodule, UntrackedFiles}; use std::sync::atomic::AtomicBool; /// Builder impl Platform<'_, Progress> where Progress: gix_features::progress::Progress, { /// Call `cb` on dirwalk options if these are set (which is the default when created through [`Repository::status()`](crate::Repository::status())). /// The directory walk is used to find untracked files or ignored files. /// /// `cb` will be able to run builder-methods on the passed dirwalk options. pub fn dirwalk_options(mut self, cb: impl FnOnce(crate::dirwalk::Options) -> crate::dirwalk::Options) -> Self { if let Some(opts) = self.index_worktree_options.dirwalk_options.take() { self.index_worktree_options.dirwalk_options = Some(cb(opts)); } self } /// Like [dirwalk_options()](Self::dirwalk_options), but taking a mutable instance instead. pub fn dirwalk_options_mut(&mut self, cb: impl FnOnce(&mut crate::dirwalk::Options)) -> &mut Self { if let Some(opts) = self.index_worktree_options.dirwalk_options.as_mut() { cb(opts); } self } /// A simple way to explicitly set the desired way of listing `untracked_files`, overriding any value /// set by the git configuration. /// /// Note that if [`None`](UntrackedFiles::None) is used, the directory walk will be disabled entirely /// after this call. Further, if no dirwalk options are present anymore, this call has no effect. pub fn untracked_files(mut self, untracked_files: UntrackedFiles) -> Self { let mode = match untracked_files { UntrackedFiles::None => { self.index_worktree_options.dirwalk_options.take(); return self; } UntrackedFiles::Collapsed => gix_dir::walk::EmissionMode::CollapseDirectory, UntrackedFiles::Files => gix_dir::walk::EmissionMode::Matching, }; self.dirwalk_options(|cb| cb.emit_untracked(mode)) } /// Set the interrupt flag to `should_interrupt`, which typically is an application-wide flag /// that is ultimately controlled by user interrupts. /// /// If it is `true`, the iteration will stop immediately. pub fn should_interrupt_shared(mut self, should_interrupt: &'static AtomicBool) -> Self { self.should_interrupt = Some(OwnedOrStaticAtomicBool::Shared(should_interrupt)); self } /// Set the interrupt flag to `should_interrupt`, as controlled by the caller. /// /// If it is `true`, the iteration will stop immediately. pub fn should_interrupt_owned(mut self, should_interrupt: std::sync::Arc) -> Self { self.should_interrupt = Some(OwnedOrStaticAtomicBool::Owned { flag: should_interrupt, private: false, }); self } /// Configure how the `submodule_status` is obtained when looking at submodules that are still mentioned in the index. // If `None` is given, no submodule status check is performed. pub fn index_worktree_submodules(mut self, submodules: impl Into>) -> Self { let submodules = submodules.into(); self.submodules = match submodules { None => Submodule::Given { ignore: crate::submodule::config::Ignore::All, check_dirty: false, }, Some(status) => status, }; self } /// Set the `index` to use when making comparisons to the worktree and the head revision. /// /// Defaults to the current index, or an empty one if it doesn't exist (yet). pub fn index(mut self, index: crate::worktree::IndexPersistedOrInMemory) -> Self { self.index = Some(index); self } /// Configure the index-to-worktree rename tracking with `rewrites`, which is `None` by default. /// /// Note that Git does not have configuration related to rename tracking of changes between the index /// and the worktree. The closest there is can be obtained using [`crate::diff::new_rewrites()`], which refers /// to rename tracking between trees. /// /// Also note that if `rewrites` are `Some()`, [`sorting`](index_worktree::Options::sorting) will automatically be /// configured to assure deterministic outcomes for rewrite solutions. pub fn index_worktree_rewrites(mut self, rewrites: impl Into>) -> Self { let rewrites = rewrites.into(); self.index_worktree_options.rewrites = rewrites; if rewrites.is_some() && self.index_worktree_options.sorting.is_none() { self.index_worktree_options.sorting = Some(gix_status::index_as_worktree_with_renames::Sorting::ByPathCaseSensitive); } self } /// Adjust all options related to the index-worktree status. /// This is a catch-all in case there are no more specific methods that could be used instead to change /// the respective option. pub fn index_worktree_options_mut(mut self, cb: impl FnOnce(&mut index_worktree::Options)) -> Self { cb(&mut self.index_worktree_options); self } } gix-0.69.1/src/submodule/errors.rs000064400000000000000000000073001046102023000151600ustar 00000000000000/// pub mod open_modules_file { /// The error returned by [Repository::open_modules_file()](crate::Repository::open_modules_file()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Configuration(#[from] gix_config::parse::Error), #[error("Could not read '.gitmodules' file")] Io(#[from] std::io::Error), } } /// pub mod modules { /// The error returned by [Repository::modules()](crate::Repository::modules()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenModulesFile(#[from] crate::submodule::open_modules_file::Error), #[error(transparent)] OpenIndex(#[from] crate::worktree::open_index::Error), #[error("Could not find the .gitmodules file by id in the object database")] FindExistingBlob(#[from] crate::object::find::existing::Error), #[error("Did not find commit in current HEAD to access its tree")] FindHeadCommit(#[from] crate::reference::head_commit::Error), #[error(transparent)] TreeFromCommit(#[from] crate::object::commit::Error), } } /// pub mod is_active { /// The error returned by [Submodule::is_active()](crate::Submodule::is_active()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] InitIsActivePlatform(#[from] gix_submodule::is_active_platform::Error), #[error(transparent)] QueryIsActive(#[from] gix_config::value::Error), #[error(transparent)] InitAttributes(#[from] crate::config::attribute_stack::Error), #[error(transparent)] InitPathspecDefaults(#[from] gix_pathspec::defaults::from_environment::Error), #[error(transparent)] ObtainIndex(#[from] crate::repository::index_or_load_from_head::Error), } } /// pub mod fetch_recurse { /// The error returned by [Submodule::fetch_recurse()](crate::Submodule::fetch_recurse()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ModuleBoolean(#[from] gix_submodule::config::Error), #[error(transparent)] ConfigurationFallback(#[from] crate::config::key::GenericErrorWithValue), } } /// pub mod open { /// The error returned by [Submodule::open()](crate::Submodule::open()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenRepository(#[from] crate::open::Error), #[error(transparent)] PathConfiguration(#[from] gix_submodule::config::path::Error), } } /// pub mod index_id { /// The error returned by [Submodule::index_id()](crate::Submodule::index_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] PathConfiguration(#[from] gix_submodule::config::path::Error), #[error(transparent)] Index(#[from] crate::repository::index_or_load_from_head::Error), } } /// pub mod head_id { /// The error returned by [Submodule::head_id()](crate::Submodule::head_id()). #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] HeadCommit(#[from] crate::reference::head_commit::Error), #[error("Could not get tree of head commit")] CommitTree(#[from] crate::object::commit::Error), #[error("Could not peel tree to submodule path")] PeelTree(#[from] crate::object::find::existing::Error), #[error(transparent)] PathConfiguration(#[from] gix_submodule::config::path::Error), } } gix-0.69.1/src/submodule/mod.rs000064400000000000000000000471071046102023000144340ustar 00000000000000#![allow(clippy::result_large_err)] //! Submodule plumbing and abstractions //! use std::{ borrow::Cow, cell::{Ref, RefCell, RefMut}, path::PathBuf, }; pub use gix_submodule::*; use crate::{bstr::BStr, is_dir_to_mode, worktree::IndexPersistedOrInMemory, Repository, Submodule}; pub(crate) type ModulesFileStorage = gix_features::threading::OwnShared>; /// A lazily loaded and auto-updated worktree index. pub type ModulesSnapshot = gix_fs::SharedFileSnapshot; /// The name of the file containing (sub) module information. pub(crate) const MODULES_FILE: &str = ".gitmodules"; mod errors; pub use errors::*; /// A platform maintaining state needed to interact with submodules, created by [`Repository::submodules()]. pub(crate) struct SharedState<'repo> { pub repo: &'repo Repository, pub(crate) modules: ModulesSnapshot, is_active: RefCell>, index: RefCell>, } impl<'repo> SharedState<'repo> { pub(crate) fn new(repo: &'repo Repository, modules: ModulesSnapshot) -> Self { SharedState { repo, modules, is_active: RefCell::new(None), index: RefCell::new(None), } } fn index(&self) -> Result, crate::repository::index_or_load_from_head::Error> { { let mut state = self.index.borrow_mut(); if state.is_none() { *state = self.repo.index_or_load_from_head()?.into(); } } Ok(Ref::map(self.index.borrow(), |opt| { opt.as_ref().expect("just initialized") })) } fn active_state_mut( &self, ) -> Result<(RefMut<'_, IsActivePlatform>, RefMut<'_, gix_worktree::Stack>), is_active::Error> { let mut state = self.is_active.borrow_mut(); if state.is_none() { let platform = self .modules .is_active_platform(&self.repo.config.resolved, self.repo.config.pathspec_defaults()?)?; let index = self.index()?; let attributes = self .repo .attributes_only( &index, gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping .adjust_for_bare(self.repo.is_bare()), )? .detach(); *state = Some(IsActiveState { platform, attributes }); } Ok(RefMut::map_split(state, |opt| { let state = opt.as_mut().expect("populated above"); (&mut state.platform, &mut state.attributes) })) } } struct IsActiveState { platform: IsActivePlatform, attributes: gix_worktree::Stack, } ///Access impl Submodule<'_> { /// Return the submodule's name. pub fn name(&self) -> &BStr { self.name.as_ref() } /// Return the path at which the submodule can be found, relative to the repository. /// /// For details, see [gix_submodule::File::path()]. pub fn path(&self) -> Result, config::path::Error> { self.state.modules.path(self.name()) } /// Return the url from which to clone or update the submodule. /// /// This method takes into consideration submodule configuration overrides. pub fn url(&self) -> Result { self.state.modules.url(self.name()) } /// Return the `update` field from this submodule's configuration, if present, or `None`. /// /// This method takes into consideration submodule configuration overrides. pub fn update(&self) -> Result, config::update::Error> { self.state.modules.update(self.name()) } /// Return the `branch` field from this submodule's configuration, if present, or `None`. /// /// This method takes into consideration submodule configuration overrides. pub fn branch(&self) -> Result, config::branch::Error> { self.state.modules.branch(self.name()) } /// Return the `fetchRecurseSubmodules` field from this submodule's configuration, or retrieve the value from `fetch.recurseSubmodules` if unset. pub fn fetch_recurse(&self) -> Result, fetch_recurse::Error> { Ok(match self.state.modules.fetch_recurse(self.name())? { Some(val) => Some(val), None => self .state .repo .config .resolved .boolean("fetch.recurseSubmodules") .map(|res| crate::config::tree::Fetch::RECURSE_SUBMODULES.try_into_recurse_submodules(res)) .transpose()?, }) } /// Return the `ignore` field from this submodule's configuration, if present, or `None`. /// /// This method takes into consideration submodule configuration overrides. pub fn ignore(&self) -> Result, config::Error> { self.state.modules.ignore(self.name()) } /// Return the `shallow` field from this submodule's configuration, if present, or `None`. /// /// If `true`, the submodule will be checked out with `depth = 1`. If unset, `false` is assumed. pub fn shallow(&self) -> Result, gix_config::value::Error> { self.state.modules.shallow(self.name()) } /// Returns true if this submodule is considered active and can thus participate in an operation. /// /// Please see the [plumbing crate documentation](gix_submodule::IsActivePlatform::is_active()) for details. pub fn is_active(&self) -> Result { let (mut platform, mut attributes) = self.state.active_state_mut()?; let is_active = platform.is_active(&self.state.repo.config.resolved, self.name.as_ref(), { &mut |relative_path, case, is_dir, out| { attributes .set_case(case) .at_entry(relative_path, Some(is_dir_to_mode(is_dir)), &self.state.repo.objects) .map_or(false, |platform| platform.matching_attributes(out)) } })?; Ok(is_active) } /// Return the object id of the submodule as stored in the index of the superproject, /// or `None` if it was deleted from the index. /// /// If `None`, but `Some()` when calling [`Self::head_id()`], then the submodule was just deleted but the change /// wasn't yet committed. Note that `None` is also returned if the entry at the submodule path isn't a submodule. /// If `Some()`, but `None` when calling [`Self::head_id()`], then the submodule was just added without having committed the change. pub fn index_id(&self) -> Result, index_id::Error> { let path = self.path()?; Ok(self .state .index()? .entry_by_path(&path) .and_then(|entry| (entry.mode == gix_index::entry::Mode::COMMIT).then_some(entry.id))) } /// Return the object id of the submodule as stored in `HEAD^{tree}` of the superproject, or `None` if it wasn't yet committed. /// /// If `Some()`, but `None` when calling [`Self::index_id()`], then the submodule was just deleted but the change /// wasn't yet committed. Note that `None` is also returned if the entry at the submodule path isn't a submodule. /// If `None`, but `Some()` when calling [`Self::index_id()`], then the submodule was just added without having committed the change. pub fn head_id(&self) -> Result, head_id::Error> { let path = self.path()?; Ok(self .state .repo .head_commit()? .tree()? .peel_to_entry_by_path(gix_path::from_bstr(path.as_ref()))? .and_then(|entry| (entry.mode().is_commit()).then_some(entry.inner.oid))) } /// Return the path at which the repository of the submodule should be located. /// /// The directory might not exist yet. pub fn git_dir(&self) -> PathBuf { self.state .repo .common_dir() .join("modules") .join(gix_path::from_bstr(self.name())) } /// Return the path to the location at which the workdir would be checked out. /// /// Note that it may be a path relative to the repository if, for some reason, the parent directory /// doesn't have a working dir set. pub fn work_dir(&self) -> Result { let worktree_git = gix_path::from_bstr(self.path()?); Ok(match self.state.repo.work_dir() { None => worktree_git.into_owned(), Some(prefix) => prefix.join(worktree_git), }) } /// Return the path at which the repository of the submodule should be located, or the path inside of /// the superproject's worktree where it actually *is* located if the submodule in the 'old-form', thus is a directory /// inside of the superproject's work-tree. /// /// Note that 'old-form' paths returned aren't verified, i.e. the `.git` repository might be corrupt or otherwise /// invalid - it's left to the caller to try to open it. /// /// Also note that the returned path may not actually exist. pub fn git_dir_try_old_form(&self) -> Result { let worktree_git = self.work_dir()?.join(gix_discover::DOT_GIT_DIR); Ok(if worktree_git.is_dir() { worktree_git } else { self.git_dir() }) } /// Query various parts of the submodule and assemble it into state information. #[doc(alias = "status", alias = "git2")] pub fn state(&self) -> Result { let maybe_old_path = self.git_dir_try_old_form()?; let git_dir = self.git_dir(); let worktree_git = self.work_dir()?.join(gix_discover::DOT_GIT_DIR); let superproject_configuration = self .state .repo .config .resolved .sections_by_name("submodule") .into_iter() .flatten() .any(|section| section.header().subsection_name() == Some(self.name.as_ref())); Ok(State { repository_exists: maybe_old_path.is_dir(), is_old_form: maybe_old_path != git_dir, worktree_checkout: worktree_git.exists(), superproject_configuration, }) } /// Open the submodule as repository, or `None` if the submodule wasn't initialized yet. /// /// More states can be derived here: /// /// * *initialized* - a repository exists, i.e. `Some(repo)` and the working tree is present. /// * *uninitialized* - a repository does not exist, i.e. `None` /// * *deinitialized* - a repository does exist, i.e. `Some(repo)`, but its working tree is empty. /// /// Also see the [state()](Self::state()) method for learning about the submodule. /// The repository can also be used to learn about the submodule `HEAD`, i.e. where its working tree is at, /// which may differ compared to the superproject's index or `HEAD` commit. pub fn open(&self) -> Result, open::Error> { match crate::open_opts(self.git_dir_try_old_form()?, self.state.repo.options.clone()) { Ok(repo) => Ok(Some(repo)), Err(crate::open::Error::NotARepository { .. }) => Ok(None), Err(err) => Err(err.into()), } } } /// #[cfg(feature = "status")] pub mod status { use super::{head_id, index_id, open, Status}; use crate::Submodule; use gix_submodule::config; /// The error returned by [Submodule::status()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] State(#[from] config::path::Error), #[error(transparent)] HeadId(#[from] head_id::Error), #[error(transparent)] IndexId(#[from] index_id::Error), #[error(transparent)] OpenRepository(#[from] open::Error), #[error(transparent)] IgnoreConfiguration(#[from] config::Error), #[error(transparent)] StatusPlatform(#[from] crate::status::Error), #[error(transparent)] Status(#[from] crate::status::index_worktree::iter::Error), #[error(transparent)] IndexWorktreeStatus(#[from] crate::status::index_worktree::Error), } impl Submodule<'_> { /// Return the status of the submodule. /// /// Use `ignore` to control the portion of the submodule status to ignore. It can be obtained from /// submodule configuration using the [`ignore()`](Submodule::ignore()) method. /// If `check_dirty` is `true`, the computation will stop once the first in a ladder operations /// ordered from cheap to expensive shows that the submodule is dirty. /// Thus, submodules that are clean will still impose the complete set of computation, as given. #[doc(alias = "submodule_status", alias = "git2")] pub fn status( &self, ignore: config::Ignore, check_dirty: bool, ) -> Result { self.status_opts(ignore, check_dirty, &mut |s| s) } /// Return the status of the submodule, just like [`status`](Self::status), but allows to adjust options /// for more control over how the status is performed. /// /// Use `&mut std::convert::identity` for `adjust_options` if no specific options are desired. /// A reason to change them might be to enable sorting to enjoy deterministic order of changes. /// /// The status allows to easily determine if a submodule [has changes](Status::is_dirty). /// /// ### Incomplete Implementation Warning /// /// Currently, changes between the head and the index aren't computed. // TODO: Run the full status, including tree->index once available. #[doc(alias = "submodule_status", alias = "git2")] pub fn status_opts( &self, ignore: config::Ignore, check_dirty: bool, adjust_options: &mut dyn for<'a> FnMut( crate::status::Platform<'a, gix_features::progress::Discard>, ) -> crate::status::Platform<'a, gix_features::progress::Discard>, ) -> Result { let mut state = self.state()?; if ignore == config::Ignore::All { return Ok(Status { state, ..Default::default() }); } let index_id = self.index_id()?; if !state.repository_exists { return Ok(Status { state, index_id, ..Default::default() }); } let sm_repo = match self.open()? { None => { state.repository_exists = false; return Ok(Status { state, index_id, ..Default::default() }); } Some(repo) => repo, }; let checked_out_head_id = sm_repo.head_id().ok().map(crate::Id::detach); let mut status = Status { state, index_id, checked_out_head_id, ..Default::default() }; if ignore == config::Ignore::Dirty || check_dirty && status.is_dirty() == Some(true) { return Ok(status); } if !state.worktree_checkout { return Ok(status); } let statuses = adjust_options(sm_repo.status(gix_features::progress::Discard)?) .index_worktree_options_mut(|opts| { if ignore == config::Ignore::Untracked { opts.dirwalk_options = None; } }) .into_index_worktree_iter(Vec::new())?; let mut changes = Vec::new(); for change in statuses { changes.push(change?); } status.changes = Some(changes); Ok(status) } } impl Status { /// Return `Some(true)` if the submodule status could be determined sufficiently and /// if there are changes that would render this submodule dirty. /// /// Return `Some(false)` if the submodule status could be determined and it has no changes /// at all. /// /// Return `None` if the repository clone or the worktree are missing entirely, which would leave /// it to the caller to determine if that's considered dirty or not. pub fn is_dirty(&self) -> Option { if !self.state.worktree_checkout || !self.state.repository_exists { return None; } let is_dirty = self.checked_out_head_id != self.index_id || self.changes.as_ref().map_or(false, |c| !c.is_empty()); Some(is_dirty) } } pub(super) mod types { use crate::submodule::State; /// A simplified status of the Submodule. /// /// As opposed to the similar-sounding [`State`], it is more exhaustive and potentially expensive to compute, /// particularly for submodules without changes. /// /// It's produced by [Submodule::status()](crate::Submodule::status()). #[derive(Default, Clone, PartialEq, Debug)] pub struct Status { /// The cheapest part of the status that is always performed, to learn if the repository is cloned /// and if there is a worktree checkout. pub state: State, /// The commit at which the submodule is supposed to be according to the super-project's index. /// `None` means the computation wasn't performed, or the submodule didn't exist in the super-project's index anymore. pub index_id: Option, /// The commit-id of the `HEAD` at which the submodule is currently checked out. /// `None` if the computation wasn't performed as it was skipped early, or if no repository was available or /// if the HEAD could not be obtained or wasn't born. pub checked_out_head_id: Option, /// The set of changes obtained from running something akin to `git status` in the submodule working tree. /// /// `None` if the computation wasn't performed as the computation was skipped early, or if no working tree was /// available or repository was available. pub changes: Option>, } } } #[cfg(feature = "status")] pub use status::types::Status; /// A summary of the state of all parts forming a submodule, which allows to answer various questions about it. /// /// Note that expensive questions about its presence in the `HEAD` or the `index` are left to the caller. #[derive(Default, Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct State { /// if the submodule repository has been cloned. pub repository_exists: bool, /// if the submodule repository is located directly in the worktree of the superproject. pub is_old_form: bool, /// if the worktree is checked out. pub worktree_checkout: bool, /// If submodule configuration was found in the superproject's `.git/config` file. /// Note that the presence of a single section is enough, independently of the actual values. pub superproject_configuration: bool, } gix-0.69.1/src/tag.rs000064400000000000000000000007731046102023000124270ustar 00000000000000//! #![allow(clippy::empty_docs)] mod error { /// The error returned by [`tag(…)`][crate::Repository::tag()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ReferenceNameValidation(#[from] gix_ref::name::Error), #[error(transparent)] WriteObject(#[from] crate::object::write::Error), #[error(transparent)] ReferenceEdit(#[from] crate::reference::edit::Error), } } pub use error::Error; gix-0.69.1/src/types.rs000064400000000000000000000241051046102023000130130ustar 00000000000000use std::{cell::RefCell, path::PathBuf}; use gix_hash::ObjectId; use crate::{head, remote}; /// A worktree checkout containing the files of the repository in consumable form. #[derive(Debug, Clone)] pub struct Worktree<'repo> { pub(crate) parent: &'repo Repository, /// The root path of the checkout. pub(crate) path: &'repo std::path::Path, } /// The head reference, as created from looking at `.git/HEAD`, able to represent all of its possible states. /// /// Note that like [`Reference`], this type's data is snapshot of persisted state on disk. #[derive(Clone)] pub struct Head<'repo> { /// One of various possible states for the HEAD reference pub kind: head::Kind, /// The owning repository. pub repo: &'repo Repository, } /// An [`ObjectId`] with access to a repository. #[derive(Clone, Copy)] pub struct Id<'r> { /// The actual object id pub(crate) inner: ObjectId, /// The owning repository. pub repo: &'r Repository, } /// A decoded object with a reference to its owning repository. #[derive(Clone)] pub struct Object<'repo> { /// The id of the object pub id: ObjectId, /// The kind of the object pub kind: gix_object::Kind, /// The fully decoded object data pub data: Vec, /// The owning repository. pub repo: &'repo Repository, } impl Drop for Object<'_> { fn drop(&mut self) { self.repo.reuse_buffer(&mut self.data); } } /// A blob along with access to its owning repository. #[derive(Clone)] pub struct Blob<'repo> { /// The id of the tree pub id: ObjectId, /// The blob's data. pub data: Vec, /// The owning repository. pub repo: &'repo Repository, } impl Drop for Blob<'_> { fn drop(&mut self) { self.repo.reuse_buffer(&mut self.data); } } /// A decoded tree object with access to its owning repository. #[derive(Clone)] pub struct Tree<'repo> { /// Thek[ id of the tree pub id: ObjectId, /// The fully decoded tree data pub data: Vec, /// The owning repository. pub repo: &'repo Repository, } impl Drop for Tree<'_> { fn drop(&mut self) { self.repo.reuse_buffer(&mut self.data); } } /// A decoded tag object with access to its owning repository. #[derive(Clone)] pub struct Tag<'repo> { /// The id of the tree pub id: ObjectId, /// The fully decoded tag data pub data: Vec, /// The owning repository. pub repo: &'repo Repository, } impl Drop for Tag<'_> { fn drop(&mut self) { self.repo.reuse_buffer(&mut self.data); } } /// A decoded commit object with access to its owning repository. #[derive(Clone)] pub struct Commit<'repo> { /// The id of the commit pub id: ObjectId, /// The fully decoded commit data pub data: Vec, /// The owning repository. pub repo: &'repo Repository, } impl Drop for Commit<'_> { fn drop(&mut self) { self.repo.reuse_buffer(&mut self.data); } } /// A detached, self-contained object, without access to its source repository. /// /// Use it if an `ObjectRef` should be sent over thread boundaries or stored in collections. #[derive(Clone)] pub struct ObjectDetached { /// The id of the object pub id: ObjectId, /// The kind of the object pub kind: gix_object::Kind, /// The fully decoded object data pub data: Vec, } /// A reference that points to an object or reference, with access to its source repository. /// /// Note that these are snapshots and won't recognize if they are stale. #[derive(Clone)] pub struct Reference<'r> { /// The actual reference data pub inner: gix_ref::Reference, /// The owning repository. pub repo: &'r Repository, } /// A thread-local handle to interact with a repository from a single thread. /// /// It is `Send` but **not** `Sync` - for the latter you can convert it `to_sync()`. /// Note that it clones itself so that it is empty, requiring the user to configure each clone separately, specifically /// and explicitly. This is to have the fastest-possible default configuration available by default, but allow /// those who experiment with workloads to get speed boosts of 2x or more. pub struct Repository { /// A ref store with shared ownership (or the equivalent of it). pub refs: crate::RefStore, /// A way to access objects. pub objects: crate::OdbHandle, pub(crate) work_tree: Option, /// The path to the resolved common directory if this is a linked worktree repository or it is otherwise set. pub(crate) common_dir: Option, /// A free-list of reusable object backing buffers pub(crate) bufs: Option>>>, /// A pre-assembled selection of often-accessed configuration values for quick access. pub(crate) config: crate::config::Cache, /// the options obtained when instantiating this repository. /// /// Particularly useful when following linked worktrees and instantiating new equally configured worktree repositories. pub(crate) options: crate::open::Options, #[cfg(feature = "index")] pub(crate) index: crate::worktree::IndexStorage, #[cfg(feature = "attributes")] pub(crate) modules: crate::submodule::ModulesFileStorage, pub(crate) shallow_commits: crate::shallow::CommitsStorage, } /// An instance with access to everything a git repository entails, best imagined as container implementing `Sync + Send` for _most_ /// for system resources required to interact with a `git` repository which are loaded in once the instance is created. /// /// Use this type to reference it in a threaded context for creation the creation of a thread-local [`Repositories`][Repository]. /// /// Note that this type purposefully isn't very useful until it is converted into a thread-local repository with `to_thread_local()`, /// it's merely meant to be able to exist in a `Sync` context. /// /// Note that it can also cheaply be cloned, and it will retain references to all contained resources. #[derive(Clone)] pub struct ThreadSafeRepository { /// A store for references to point at objects pub refs: crate::RefStore, /// A store for objects that contain data pub objects: gix_features::threading::OwnShared, /// The path to the worktree at which to find checked out files pub work_tree: Option, /// The path to the common directory if this is a linked worktree repository or it is otherwise set. pub common_dir: Option, pub(crate) config: crate::config::Cache, /// options obtained when instantiating this repository for use when following linked worktrees. pub(crate) linked_worktree_options: crate::open::Options, /// The index of this instances worktree. #[cfg(feature = "index")] pub(crate) index: crate::worktree::IndexStorage, #[cfg(feature = "attributes")] pub(crate) modules: crate::submodule::ModulesFileStorage, pub(crate) shallow_commits: crate::shallow::CommitsStorage, } /// A remote which represents a way to interact with hosts for remote clones of the parent repository. #[derive(Debug, Clone, PartialEq)] pub struct Remote<'repo> { /// The remotes symbolic name, only present if persisted in git configuration files. pub(crate) name: Option>, /// The url of the host to talk to, after application of replacements. If it is unset, the `push_url` must be set. /// and fetches aren't possible. pub(crate) url: Option, /// The rewritten `url`, if it was rewritten. pub(crate) url_alias: Option, /// The url to use for pushing specifically. pub(crate) push_url: Option, /// The rewritten `push_url`, if it was rewritten. pub(crate) push_url_alias: Option, /// Refspecs for use when fetching. pub(crate) fetch_specs: Vec, /// Refspecs for use when pushing. pub(crate) push_specs: Vec, /// Tell us what to do with tags when fetched. pub(crate) fetch_tags: remote::fetch::Tags, // /// Delete local tracking branches that don't exist on the remote anymore. // pub(crate) prune: bool, // /// Delete tags that don't exist on the remote anymore, equivalent to pruning the refspec `refs/tags/*:refs/tags/*`. // pub(crate) prune_tags: bool, /// The owning repository. pub repo: &'repo Repository, } /// A utility to make matching against pathspecs simple. /// /// Note that to perform pathspec matching, attribute access might need to be provided. For that, we use our own /// and argue that the implementation is only going to incur costs for it when a pathspec matches *and* has attributes. /// Should this potential duplication of effort to maintain attribute state be unacceptable, the user may fall back /// to the underlying plumbing. #[derive(Clone)] #[cfg(feature = "attributes")] pub struct Pathspec<'repo> { /// The owning repository. pub repo: &'repo Repository, /// The cache to power attribute access. It's only initialized if we have a pattern with attributes. pub(crate) stack: Option, /// The prepared search to use for checking matches. pub(crate) search: gix_pathspec::Search, } /// Like [`Pathspec`], but without a Repository reference and with minimal API. #[derive(Clone)] #[cfg(feature = "attributes")] pub struct PathspecDetached { /// The cache to power attribute access. It's only initialized if we have a pattern with attributes. pub stack: Option, /// The prepared search to use for checking matches. pub search: gix_pathspec::Search, /// A thread-safe version of an ODB. pub odb: crate::OdbHandleArc, } /// A stand-in for the submodule of a particular name. #[derive(Clone)] #[cfg(feature = "attributes")] pub struct Submodule<'repo> { pub(crate) state: std::rc::Rc>, pub(crate) name: crate::bstr::BString, } /// A utility to access `.gitattributes` and `.gitignore` information efficiently. #[cfg(any(feature = "attributes", feature = "excludes"))] pub struct AttributeStack<'repo> { /// The owning repository. pub repo: &'repo Repository, pub(crate) inner: gix_worktree::Stack, } gix-0.69.1/src/util.rs000064400000000000000000000045741046102023000126340ustar 00000000000000use std::ops::Deref; use std::sync::atomic::AtomicBool; use std::sync::Arc; #[derive(Clone)] pub enum OwnedOrStaticAtomicBool { Owned { flag: Arc, #[cfg_attr(not(feature = "parallel"), allow(dead_code))] private: bool, }, Shared(&'static AtomicBool), } impl Default for OwnedOrStaticAtomicBool { fn default() -> Self { OwnedOrStaticAtomicBool::Owned { flag: Arc::new(AtomicBool::default()), private: true, } } } impl Deref for OwnedOrStaticAtomicBool { type Target = std::sync::atomic::AtomicBool; fn deref(&self) -> &Self::Target { match self { OwnedOrStaticAtomicBool::Owned { flag, .. } => flag, OwnedOrStaticAtomicBool::Shared(flag) => flag, } } } impl From<&'static AtomicBool> for OwnedOrStaticAtomicBool { fn from(value: &'static AtomicBool) -> Self { OwnedOrStaticAtomicBool::Shared(value) } } impl<'a> From<&'a Arc> for OwnedOrStaticAtomicBool { fn from(value: &'a Arc) -> Self { OwnedOrStaticAtomicBool::Owned { flag: value.clone(), private: false, } } } impl From> for OwnedOrStaticAtomicBool { fn from(flag: Arc) -> Self { OwnedOrStaticAtomicBool::Owned { flag, private: false } } } #[cfg(feature = "parallel")] pub fn parallel_iter_drop( mut rx_and_join: Option<(std::sync::mpsc::Receiver, std::thread::JoinHandle)>, should_interrupt: &OwnedOrStaticAtomicBool, ) { let Some((rx, handle)) = rx_and_join.take() else { return; }; let prev = should_interrupt.swap(true, std::sync::atomic::Ordering::Relaxed); let undo = match &should_interrupt { OwnedOrStaticAtomicBool::Shared(flag) => *flag, OwnedOrStaticAtomicBool::Owned { flag, private: false } => flag.as_ref(), OwnedOrStaticAtomicBool::Owned { private: true, .. } => { // Leak the handle to let it shut down in the background, so drop returns more quickly. drop((rx, handle)); return; } }; // Wait until there is time to respond before we undo the change. handle.join().ok(); undo.fetch_update( std::sync::atomic::Ordering::SeqCst, std::sync::atomic::Ordering::SeqCst, |current| current.then_some(prev), ) .ok(); } gix-0.69.1/src/worktree/mod.rs000064400000000000000000000245001046102023000142670ustar 00000000000000use std::path::PathBuf; #[cfg(feature = "worktree-archive")] pub use gix_archive as archive; #[cfg(feature = "excludes")] pub use gix_worktree::*; #[cfg(feature = "worktree-mutation")] pub use gix_worktree_state as state; #[cfg(feature = "worktree-stream")] pub use gix_worktree_stream as stream; use crate::{ bstr::{BStr, BString}, Repository, }; #[cfg(feature = "index")] pub(crate) type IndexStorage = gix_features::threading::OwnShared>; /// A lazily loaded and auto-updated worktree index. #[cfg(feature = "index")] pub type Index = gix_fs::SharedFileSnapshot; /// A type to represent an index which either was loaded from disk as it was persisted there, or created on the fly in memory. #[cfg(feature = "index")] #[allow(clippy::large_enum_variant)] pub enum IndexPersistedOrInMemory { /// The index as loaded from disk, and shared across clones of the owning `Repository`. Persisted(Index), /// A temporary index as created from the `HEAD^{tree}`, with the file path set to the place where it would be stored naturally. /// /// Note that unless saved explicitly, it will not persist. InMemory(gix_index::File), } #[cfg(feature = "index")] impl From for IndexPersistedOrInMemory { fn from(value: Index) -> Self { IndexPersistedOrInMemory::Persisted(value) } } #[cfg(feature = "index")] impl From for IndexPersistedOrInMemory { fn from(value: gix_index::File) -> Self { IndexPersistedOrInMemory::InMemory(value) } } /// A stand-in to a worktree as result of a worktree iteration. /// /// It provides access to typical worktree state, but may not actually point to a valid checkout as the latter has been moved or /// deleted. #[derive(Debug, Clone)] pub struct Proxy<'repo> { pub(crate) parent: &'repo Repository, pub(crate) git_dir: PathBuf, } /// Access impl<'repo> crate::Worktree<'repo> { /// Read the location of the checkout, the base of the work tree pub fn base(&self) -> &'repo std::path::Path { self.path } /// Return true if this worktree is the main worktree associated with a non-bare git repository. /// /// It cannot be removed. pub fn is_main(&self) -> bool { self.id().is_none() } /// Return true if this worktree cannot be pruned, moved or deleted, which is useful if it is located on an external storage device. /// /// Always false for the main worktree. pub fn is_locked(&self) -> bool { Proxy::new(self.parent, self.parent.git_dir()).is_locked() } /// Provide a reason for the locking of this worktree, if it is locked at all. /// /// Note that we squelch errors in case the file cannot be read in which case the /// reason is an empty string. pub fn lock_reason(&self) -> Option { Proxy::new(self.parent, self.parent.git_dir()).lock_reason() } /// Return the ID of the repository worktree, if it is a linked worktree, or `None` if it's a linked worktree. pub fn id(&self) -> Option<&BStr> { id(self.parent.git_dir(), self.parent.common_dir.is_some()) } /// Returns true if the `.git` file or directory exists within the worktree. /// /// This is an indicator for the worktree to be checked out particularly if the parent repository is a submodule. pub fn dot_git_exists(&self) -> bool { self.path.join(gix_discover::DOT_GIT_DIR).exists() } } pub(crate) fn id(git_dir: &std::path::Path, has_common_dir: bool) -> Option<&BStr> { if !has_common_dir { return None; } let candidate = gix_path::os_str_into_bstr(git_dir.file_name().expect("at least one directory level")) .expect("no illformed UTF-8"); let maybe_worktrees = git_dir.parent()?; (maybe_worktrees.file_name()?.to_str()? == "worktrees").then_some(candidate) } /// pub mod proxy; /// #[cfg(feature = "index")] pub mod open_index { /// The error returned by [`Worktree::open_index()`][crate::Worktree::open_index()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] ConfigIndexThreads(#[from] crate::config::key::GenericErrorWithValue), #[error(transparent)] ConfigSkipHash(#[from] crate::config::boolean::Error), #[error(transparent)] IndexFile(#[from] gix_index::file::init::Error), #[error(transparent)] IndexCorrupt(#[from] gix_index::file::verify::Error), } impl crate::Worktree<'_> { /// A shortcut to [`crate::Repository::open_index()`]. pub fn open_index(&self) -> Result { self.parent.open_index() } /// A shortcut to [`crate::Repository::index()`]. pub fn index(&self) -> Result { self.parent.index() } } } /// #[cfg(feature = "excludes")] pub mod excludes { use crate::AttributeStack; /// The error returned by [`Worktree::excludes()`][crate::Worktree::excludes()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenIndex(#[from] crate::worktree::open_index::Error), #[error(transparent)] CreateCache(#[from] crate::config::exclude_stack::Error), } impl crate::Worktree<'_> { /// Configure a file-system cache checking if files below the repository are excluded. /// /// This takes into consideration all the usual repository configuration, namely: /// /// * `$XDG_CONFIG_HOME/…/ignore` if `core.excludesFile` is *not* set, otherwise use the configured file. /// * `$GIT_DIR/info/exclude` if present. /// /// When only excludes are desired, this is the most efficient way to obtain them. Otherwise use /// [`Worktree::attributes()`][crate::Worktree::attributes()] for accessing both attributes and excludes. pub fn excludes(&self, overrides: Option) -> Result, Error> { let index = self.index()?; Ok(self.parent.excludes( &index, overrides, gix_worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped, )?) } } } /// #[cfg(feature = "attributes")] pub mod attributes { use crate::{AttributeStack, Worktree}; /// The error returned by [`Worktree::attributes()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] OpenIndex(#[from] crate::worktree::open_index::Error), #[error(transparent)] CreateCache(#[from] crate::repository::attributes::Error), } impl<'repo> Worktree<'repo> { /// Configure a file-system cache checking if files below the repository are excluded or for querying their attributes. /// /// This takes into consideration all the usual repository configuration, namely: /// /// * `$XDG_CONFIG_HOME/…/ignore|attributes` if `core.excludesFile|attributesFile` is *not* set, otherwise use the configured file. /// * `$GIT_DIR/info/exclude|attributes` if present. pub fn attributes(&self, overrides: Option) -> Result, Error> { let index = self.index()?; Ok(self.parent.attributes( &index, gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping, gix_worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped, overrides, )?) } /// Like [attributes()][Self::attributes()], but without access to exclude/ignore information. pub fn attributes_only(&self) -> Result, Error> { let index = self.index()?; self.parent .attributes_only( &index, gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping, ) .map_err(|err| Error::CreateCache(err.into())) } } } /// #[cfg(feature = "attributes")] pub mod pathspec { use crate::{ bstr::BStr, config::{cache::util::ApplyLeniencyDefaultValue, tree::gitoxide}, Worktree, }; /// The error returned by [`Worktree::pathspec()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Init(#[from] crate::pathspec::init::Error), #[error(transparent)] OpenIndex(#[from] crate::worktree::open_index::Error), } impl<'repo> Worktree<'repo> { /// Configure pathspecs `patterns` to be matched against, with pathspec attributes read from the worktree and then from the index /// if needed. /// /// Note that the `empty_patterns_match_prefix` flag of the [parent method](crate::Repository::pathspec()) defaults to `true`. /// /// ### Deviation /// /// Pathspec attributes match case-insensitively by default if the underlying filesystem is configured that way. pub fn pathspec( &self, patterns: impl IntoIterator>, ) -> Result, Error> { let index = self.index()?; let inherit_ignore_case = self .parent .config .resolved .boolean("gitoxide.pathspec.inheritIgnoreCase") .map(|res| { gitoxide::Pathspec::INHERIT_IGNORE_CASE .enrich_error(res) .with_lenient_default_value( self.parent.config.lenient_config, gitoxide::Pathspec::INHERIT_IGNORE_CASE_DEFAULT, ) }) .transpose() .map_err(|err| Error::Init(crate::pathspec::init::Error::Defaults(err.into())))? .unwrap_or(gitoxide::Pathspec::INHERIT_IGNORE_CASE_DEFAULT); Ok(self.parent.pathspec( true, /* empty patterns match prefix */ patterns, inherit_ignore_case, &index, gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping, )?) } } } gix-0.69.1/src/worktree/proxy.rs000064400000000000000000000075731046102023000147040ustar 00000000000000#![allow(clippy::result_large_err)] use std::path::{Path, PathBuf}; use crate::{ bstr::{BStr, BString, ByteSlice}, worktree::Proxy, Repository, ThreadSafeRepository, }; #[allow(missing_docs)] pub mod into_repo { use std::path::PathBuf; /// The error returned by [`Proxy::into_repo()`][super::Proxy::into_repo()]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { #[error(transparent)] Open(#[from] crate::open::Error), #[error("Worktree at '{}' is inaccessible", .base.display())] MissingWorktree { base: PathBuf }, #[error(transparent)] MissingGitDirFile(#[from] std::io::Error), } } impl<'repo> Proxy<'repo> { pub(crate) fn new(parent: &'repo Repository, git_dir: impl Into) -> Self { Proxy { parent, git_dir: git_dir.into(), } } } impl Proxy<'_> { /// Read the location of the checkout, the base of the work tree. /// Note that the location might not exist. pub fn base(&self) -> std::io::Result { let git_dir = self.git_dir.join("gitdir"); let base_dot_git = gix_discover::path::from_plain_file(&git_dir).ok_or_else(|| { std::io::Error::new( std::io::ErrorKind::NotFound, format!("Required file '{}' does not exist", git_dir.display()), ) })??; Ok(gix_discover::path::without_dot_git_dir(base_dot_git)) } /// The git directory for the work tree, typically contained within the parent git dir. pub fn git_dir(&self) -> &Path { &self.git_dir } /// The name of the worktree, which is derived from its folder within the `worktrees` directory within the parent `.git` folder. pub fn id(&self) -> &BStr { gix_path::os_str_into_bstr(self.git_dir.file_name().expect("worktrees/ parent dir")) .expect("no illformed UTF-8") } /// Return true if the worktree cannot be pruned, moved or deleted, which is useful if it is located on an external storage device. pub fn is_locked(&self) -> bool { self.git_dir.join("locked").is_file() } /// Provide a reason for the locking of this worktree, if it is locked at all. /// /// Note that we squelch errors in case the file cannot be read in which case the /// reason is an empty string. pub fn lock_reason(&self) -> Option { std::fs::read(self.git_dir.join("locked")) .ok() .map(|contents| contents.trim().into()) } /// Transform this proxy into a [`Repository`] while ignoring issues reading `base()` and ignoring that it might not exist. /// /// Most importantly, the `Repository` might be initialized with a non-existing work tree directory as the checkout /// was removed or moved in the mean time or is unavailable for other reasons. /// The caller will encounter io errors if it's used like the work tree is guaranteed to be present, but can still access /// a lot of information if work tree access is avoided. pub fn into_repo_with_possibly_inaccessible_worktree(self) -> Result { let base = self.base().ok(); let repo = ThreadSafeRepository::open_from_paths(self.git_dir, base, self.parent.options.clone())?; Ok(repo.into()) } /// Like `into_repo_with_possibly_inaccessible_worktree()` but will fail if the `base()` cannot be read or /// if the worktree doesn't exist. /// /// Note that it won't fail if the worktree doesn't exist. pub fn into_repo(self) -> Result { let base = self.base()?; if !base.is_dir() { return Err(into_repo::Error::MissingWorktree { base }); } let repo = ThreadSafeRepository::open_from_paths(self.git_dir, base.into(), self.parent.options.clone())?; Ok(repo.into()) } }