grcov-0.8.22/.cargo/config.toml000064400000000000000000000002311046102023000143440ustar 00000000000000[target.x86_64-pc-windows-msvc] rustflags = ["-Ctarget-feature=+crt-static"] [target.i686-pc-windows-msvc] rustflags = ["-Ctarget-feature=+crt-static"] grcov-0.8.22/.cargo_vcs_info.json0000644000000001360000000000100122450ustar { "git": { "sha1": "07a786393256a0e2cdda2b8cb222f66fd6da0822" }, "path_in_vcs": "" }grcov-0.8.22/Cargo.lock0000644000001263170000000000100102320ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "anstream" version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", "windows-sys 0.52.0", ] [[package]] name = "arbitrary" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" dependencies = [ "derive_arbitrary", ] [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "bitflags" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "bstr" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" dependencies = [ "memchr", "serde", ] [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytecount" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" version = "1.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" dependencies = [ "shlex", ] [[package]] name = "cfb" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f" dependencies = [ "byteorder", "fnv", "uuid", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", "wasm-bindgen", "windows-link", ] [[package]] name = "chrono-tz" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93698b29de5e97ad0ae26447b344c482a7284c737d9ddc5f9e52b74a336671bb" dependencies = [ "chrono", "chrono-tz-build", "phf", ] [[package]] name = "chrono-tz-build" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c088aee841df9c3041febbb73934cfc39708749bf96dc827e3359cd39ef11b1" dependencies = [ "parse-zoneinfo", "phf", "phf_codegen", ] [[package]] name = "clap" version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap_builder" version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", "terminal_size", ] [[package]] name = "clap_derive" version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" dependencies = [ "heck", "proc-macro2", "quote", "syn", ] [[package]] name = "clap_lex" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "colorchoice" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpp_demangle" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96e58d342ad113c2b878f16d5d034c03be492ae460cdbc02b7f0f2284d310c7d" dependencies = [ "cfg-if", ] [[package]] name = "cpufeatures" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "debugid" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" dependencies = [ "uuid", ] [[package]] name = "deranged" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", ] [[package]] name = "derive_arbitrary" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "deunicode" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "339544cc9e2c4dc3fc7149fd630c5f22263a4fdf18a98afd0075784968b5cf00" [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys 0.59.0", ] [[package]] name = "fastrand" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" [[package]] name = "flate2" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", "libz-sys", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] name = "getrandom" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "libc", "wasi 0.13.3+wasi-0.2.2", "windows-targets", ] [[package]] name = "globset" version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata", "regex-syntax", ] [[package]] name = "globwalk" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ "bitflags", "ignore", "walkdir", ] [[package]] name = "grcov" version = "0.8.22" dependencies = [ "chrono", "clap", "crossbeam-channel", "flate2", "globset", "infer", "lazy_static", "log", "md-5", "num_cpus", "once_cell", "pretty_assertions", "quick-xml", "rayon", "regex", "rustc-hash", "rustc_version", "semver", "serde", "serde_json", "simplelog", "smallvec", "symbolic-common", "symbolic-demangle", "tabled", "tcmalloc", "tempfile", "tera", "uuid", "walkdir", "zip", ] [[package]] name = "hashbrown" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "humansize" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cb51c9a029ddc91b07a787f1d86b53ccfa49b0e86688c946ebe8d3555685dd7" dependencies = [ "libm", ] [[package]] name = "iana-time-zone" version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "ignore" version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata", "same-file", "walkdir", "winapi-util", ] [[package]] name = "indexmap" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "infer" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7" dependencies = [ "cfb", ] [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "js-sys" version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "libm" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libz-sys" version = "1.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" dependencies = [ "cc", "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "linux-raw-sys" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" [[package]] name = "lockfree-object-pool" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" [[package]] name = "log" version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "md-5" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ "cfg-if", "digest", ] [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" dependencies = [ "libc", ] [[package]] name = "miniz_oxide" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] [[package]] name = "msvc-demangler" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4c25a3bb7d880e8eceab4822f3141ad0700d20f025991c1f03bd3d00219a5fc" dependencies = [ "bitflags", ] [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "num_cpus" version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ "hermit-abi", "libc", ] [[package]] name = "num_threads" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ "libc", ] [[package]] name = "once_cell" version = "1.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" [[package]] name = "papergrid" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b915f831b85d984193fdc3d3611505871dc139b2534530fa01c1a6a6707b6723" dependencies = [ "bytecount", "fnv", "unicode-width", ] [[package]] name = "parse-zoneinfo" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24" dependencies = [ "regex", ] [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ "memchr", "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" dependencies = [ "pest", "pest_generator", ] [[package]] name = "pest_generator" version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", "syn", ] [[package]] name = "pest_meta" version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" dependencies = [ "once_cell", "pest", "sha2", ] [[package]] name = "phf" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ "phf_shared", ] [[package]] name = "phf_codegen" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" dependencies = [ "phf_generator", "phf_shared", ] [[package]] name = "phf_generator" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared", "rand", ] [[package]] name = "phf_shared" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ "siphasher", ] [[package]] name = "pkg-config" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "powerfmt" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" dependencies = [ "zerocopy", ] [[package]] name = "pretty_assertions" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", ] [[package]] name = "proc-macro-error-attr2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" dependencies = [ "proc-macro2", "quote", ] [[package]] name = "proc-macro-error2" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", "syn", ] [[package]] name = "proc-macro2" version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "quick-xml" version = "0.37.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003" dependencies = [ "memchr", ] [[package]] name = "quote" version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.15", ] [[package]] name = "rayon" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", ] [[package]] name = "rayon-core" version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc_version" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.4.14", "windows-sys 0.52.0", ] [[package]] name = "rustix" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.9.3", "windows-sys 0.59.0", ] [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "semver" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" [[package]] name = "serde" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "sha2" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "simd-adler32" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "simplelog" version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16257adbfaef1ee58b1363bdc0664c9b8e1e30aed86049635fb5f147d065a9c0" dependencies = [ "log", "termcolor", "time", ] [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "slug" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "882a80f72ee45de3cc9a5afeb2da0331d58df69e4e7d8eeb5d3c7784ae67e724" dependencies = [ "deunicode", "wasm-bindgen", ] [[package]] name = "smallvec" version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "symbolic-common" version = "12.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66135c8273581acaab470356f808a1c74a707fe7ec24728af019d7247e089e71" dependencies = [ "debugid", "memmap2", "stable_deref_trait", "uuid", ] [[package]] name = "symbolic-demangle" version = "12.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42bcacd080282a72e795864660b148392af7babd75691d5ae9a3b77e29c98c77" dependencies = [ "cc", "cpp_demangle", "msvc-demangler", "rustc-demangle", "symbolic-common", ] [[package]] name = "syn" version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tabled" version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "121d8171ee5687a4978d1b244f7d99c43e7385a272185a2f1e1fa4dc0979d444" dependencies = [ "papergrid", "tabled_derive", ] [[package]] name = "tabled_derive" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52d9946811baad81710ec921809e2af67ad77719418673b2a3794932d57b7538" dependencies = [ "heck", "proc-macro-error2", "proc-macro2", "quote", "syn", ] [[package]] name = "tcmalloc" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "375205113d84a1c5eeed67beaa0ce08e41be1a9d5acc3425ad2381fddd9d819b" [[package]] name = "tempfile" version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ "fastrand", "getrandom 0.3.1", "once_cell", "rustix 1.0.2", "windows-sys 0.59.0", ] [[package]] name = "tera" version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab9d851b45e865f178319da0abdbfe6acbc4328759ff18dafc3a41c16b4cd2ee" dependencies = [ "chrono", "chrono-tz", "globwalk", "humansize", "lazy_static", "percent-encoding", "pest", "pest_derive", "rand", "regex", "serde", "serde_json", "slug", "unic-segment", ] [[package]] name = "termcolor" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] [[package]] name = "terminal_size" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef" dependencies = [ "rustix 0.38.37", "windows-sys 0.59.0", ] [[package]] name = "thiserror" version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "time" version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", "libc", "num-conv", "num_threads", "powerfmt", "serde", "time-core", "time-macros", ] [[package]] name = "time-core" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", ] [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unic-char-property" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" dependencies = [ "unic-char-range", ] [[package]] name = "unic-char-range" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" [[package]] name = "unic-common" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" [[package]] name = "unic-segment" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4ed5d26be57f84f176157270c112ef57b86debac9cd21daaabbe56db0f88f23" dependencies = [ "unic-ucd-segment", ] [[package]] name = "unic-ucd-segment" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2079c122a62205b421f499da10f3ee0f7697f012f55b675e002483c73ea34700" dependencies = [ "unic-char-property", "unic-char-range", "unic-ucd-version", ] [[package]] name = "unic-ucd-version" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" dependencies = [ "unic-common", ] [[package]] name = "unicode-ident" version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-width" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" dependencies = [ "getrandom 0.3.1", ] [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets", ] [[package]] name = "windows-link" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "wit-bindgen-rt" version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ "bitflags", ] [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "zerocopy" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zip" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27c03817464f64e23f6f37574b4fdc8cf65925b5bfd2b0f2aedf959791941f88" dependencies = [ "arbitrary", "crc32fast", "crossbeam-utils", "flate2", "indexmap", "memchr", "zopfli", ] [[package]] name = "zopfli" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946" dependencies = [ "bumpalo", "crc32fast", "lockfree-object-pool", "log", "once_cell", "simd-adler32", ] grcov-0.8.22/Cargo.toml0000644000000065630000000000100102550ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "grcov" version = "0.8.22" authors = ["Marco Castelluccio "] build = false exclude = [ "test/*", "tests/*", "benches/*", ".github/*", ".gitignore", ".dockerignore", ".pre-commit-config.yaml", "CODE_OF_CONDUCT.md", ".markdownlint.yaml", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Rust tool to collect and aggregate code coverage data for multiple source files" homepage = "https://github.com/mozilla/grcov" documentation = "https://github.com/mozilla/grcov" readme = "README.md" keywords = ["coverage"] categories = [ "command-line-utilities", "development-tools", "development-tools::testing", ] license = "MPL-2.0" repository = "https://github.com/mozilla/grcov" [features] default = [ "deflate-zlib", "demangle-no-swift", ] deflate = ["zip/deflate"] deflate-miniz = ["zip/deflate-miniz"] deflate-zlib = ["zip/deflate-zlib"] demangle-no-swift = [ "symbolic-demangle/cpp", "symbolic-demangle/msvc", "symbolic-demangle/rust", ] demangle-with-swift = [ "symbolic-demangle/cpp", "symbolic-demangle/msvc", "symbolic-demangle/rust", "symbolic-demangle/swift", ] tc = ["tcmalloc"] [lib] name = "grcov" path = "src/lib.rs" [[bin]] name = "grcov" path = "src/main.rs" [dependencies.chrono] version = "0.4" features = ["serde"] [dependencies.clap] version = "4.5" features = [ "cargo", "derive", "deprecated", "wrap_help", ] [dependencies.crossbeam-channel] version = "0.5" [dependencies.flate2] version = "1.1" [dependencies.globset] version = "0.4" [dependencies.infer] version = "0.19.0" [dependencies.lazy_static] version = "1.5" [dependencies.log] version = "0.4" [dependencies.md-5] version = "0.10" [dependencies.num_cpus] version = "1.15" [dependencies.once_cell] version = "1.21" [dependencies.quick-xml] version = "0.37" [dependencies.rayon] version = "1.10" [dependencies.regex] version = "1.11" [dependencies.rustc-hash] version = "2.1" [dependencies.rustc_version] version = "0.4" [dependencies.semver] version = "1.0" [dependencies.serde] version = "1.0" features = ["derive"] [dependencies.serde_json] version = "1.0" [dependencies.simplelog] version = "0.12" [dependencies.smallvec] version = "1.14" [dependencies.symbolic-common] version = "12.14" [dependencies.symbolic-demangle] version = "12.14" default-features = false [dependencies.tabled] version = "0.18" [dependencies.tempfile] version = "3.19" [dependencies.tera] version = "1.20" [dependencies.uuid] version = "1.16" features = ["v4"] [dependencies.walkdir] version = "2.5" [dependencies.zip] version = "2.5" default-features = false [dev-dependencies.pretty_assertions] version = "1.4" [dev-dependencies.rustc_version] version = "0.4.1" [target."cfg(unix)".dependencies.tcmalloc] version = "0.3" optional = true [profile.release] lto = "thin" grcov-0.8.22/Cargo.toml.orig000064400000000000000000000041571046102023000137330ustar 00000000000000[package] name = "grcov" version = "0.8.22" authors = ["Marco Castelluccio "] description = "Rust tool to collect and aggregate code coverage data for multiple source files" license = "MPL-2.0" documentation = "https://github.com/mozilla/grcov" homepage = "https://github.com/mozilla/grcov" repository = "https://github.com/mozilla/grcov" readme = "README.md" keywords = ["coverage"] categories = ["command-line-utilities", "development-tools", "development-tools::testing"] exclude = [ "test/*", "tests/*", "benches/*", ".github/*", ".gitignore", ".dockerignore", ".pre-commit-config.yaml", "CODE_OF_CONDUCT.md", ".markdownlint.yaml" ] edition = "2018" [features] default = ["deflate-zlib", "demangle-no-swift"] tc = ["tcmalloc"] deflate = ["zip/deflate"] deflate-miniz = ["zip/deflate-miniz"] deflate-zlib = ["zip/deflate-zlib"] demangle-no-swift = ["symbolic-demangle/cpp", "symbolic-demangle/msvc", "symbolic-demangle/rust"] demangle-with-swift = [ "symbolic-demangle/cpp", "symbolic-demangle/msvc", "symbolic-demangle/rust", "symbolic-demangle/swift", ] [dependencies] chrono = { version = "0.4", features = ["serde"] } clap = { version = "4.5", features = ["cargo", "derive", "deprecated", "wrap_help"] } crossbeam-channel = "0.5" flate2 = "1.1" globset = "0.4" infer = "0.19.0" lazy_static = "1.5" log = "0.4" md-5 = "0.10" num_cpus = "1.15" once_cell = "1.21" quick-xml = "0.37" rayon = "1.10" regex = "1.11" rustc-hash = "2.1" rustc_version = "0.4" semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" simplelog = "0.12" smallvec = "1.14" symbolic-common = "12.14" symbolic-demangle = { version = "12.14", default-features = false } tabled = "0.18" tempfile = "3.19" tera = "1.20" uuid = { version = "1.16", features = ["v4"] } walkdir = "2.5" zip = { version = "2.5", default-features = false } [dev-dependencies] pretty_assertions = "1.4" rustc_version = "0.4.1" [profile.release] lto = "thin" [target.'cfg(unix)'.dependencies] #tcmalloc = { version = "0.3", features = ["bundled"] } tcmalloc = { version = "0.3", optional = true } grcov-0.8.22/LICENSE-MPL-2.0000064400000000000000000000405251046102023000127730ustar 00000000000000Mozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. grcov-0.8.22/README.md000064400000000000000000000447541046102023000123320ustar 00000000000000# grcov [![Build Status](https://github.com/mozilla/grcov/actions/workflows/CICD.yml/badge.svg?branch=master)](https://github.com/mozilla/grcov/actions/workflows/CICD.yml) [![codecov](https://codecov.io/gh/mozilla/grcov/branch/master/graph/badge.svg)](https://codecov.io/gh/mozilla/grcov) [![crates.io](https://img.shields.io/crates/v/grcov.svg)](https://crates.io/crates/grcov) grcov collects and aggregates code coverage information for multiple source files. grcov processes .profraw and .gcda files which can be generated from llvm/clang or gcc. grcov also processes lcov files (for JS coverage) and JaCoCo files (for Java coverage). Linux, macOS and Windows are supported. This is a project initiated by Mozilla to gather code coverage results on Firefox. ## Table of Contents - [man grcov](#man-grcov) - [How to get grcov](#how-to-get-grcov) - [Usage](#usage) - [Example: How to generate source-based coverage for a Rust project](#example-how-to-generate-source-based-coverage-for-a-rust-project) - [Example: How to generate .gcda files for C/C++](#example-how-to-generate-gcda-files-for-cc) - [Example: How to generate .gcda files for a Rust project](#example-how-to-generate-gcda-files-for-a-rust-project) - [Generate a coverage report from coverage artifacts](#generate-a-coverage-report-from-coverage-artifacts) - [LCOV output](#lcov-output) - [Coveralls output](#coveralls-output) - [grcov with Travis](#grcov-with-travis) - [grcov with Gitlab](#grcov-with-gitlab) - [Alternative reports](#alternative-reports) - [Hosting HTML reports and using coverage badges](#hosting-html-reports-and-using-coverage-badges) - [Example](#example) - [Enabling symlinks on Windows](#enabling-symlinks-on-windows) - [Auto-formatting](#auto-formatting) - [Build & Test](#build--test) - [Minimum requirements](#minimum-requirements) - [License](#license) ## man grcov ```text Usage: grcov [OPTIONS] ... Arguments: ... Sets the input paths to use Options: -b, --binary-path Sets the path to the compiled binary to be used --llvm-path Sets the path to the LLVM bin directory -t, --output-types Comma separated list of custom output types: - *html* for a HTML coverage report; - *coveralls* for the Coveralls specific format; - *lcov* for the lcov INFO format; - *covdir* for the covdir recursive JSON format; - *coveralls+* for the Coveralls specific format with function information; - *ade* for the ActiveData-ETL specific format; - *files* to only return a list of files. - *markdown* for human easy read. - *cobertura* for output in cobertura format. - *cobertura-pretty* to pretty-print in cobertura format. [default: lcov] -o, --output-path Specifies the output path. This is a file for a single output type and must be a folder for multiple output types --output-config-file Specifies the output config file -s, --source-dir Specifies the root directory of the source files -p, --prefix-dir Specifies a prefix to remove from the paths (e.g. if grcov is run on a different machine than the one that generated the code coverage information) --ignore-not-existing Ignore source files that can't be found on the disk --ignore Ignore files/directories specified as globs --keep-only Keep only files/directories specified as globs --path-mapping --branch Enables parsing branch coverage information --filter Filters out covered/uncovered files. Use 'covered' to only return covered files, 'uncovered' to only return uncovered files [possible values: covered, uncovered] --llvm Speeds-up parsing, when the code coverage information is exclusively coming from a llvm build --token Sets the repository token from Coveralls, required for the 'coveralls' and 'coveralls+' formats --commit-sha Sets the hash of the commit used to generate the code coverage data --service-name Sets the service name --service-number Sets the service number --service-job-id Sets the service job id [aliases: service-job-number] --service-pull-request Sets the service pull request number --parallel Sets the build type to be parallel for 'coveralls' and 'coveralls+' formats --threads --precision Sets coverage decimal point precision on output reports [default: 2] --guess-directory-when-missing --vcs-branch Set the branch for coveralls report. Defaults to 'master' [default: master] --log Set the file where to log (or stderr or stdout). Defaults to 'stderr' [default: stderr] --log-level Set the log level [default: ERROR] [possible values: OFF, ERROR, WARN, INFO, DEBUG, TRACE] --excl-line Lines in covered files containing this marker will be excluded --excl-start Marks the beginning of an excluded section. The current line is part of this section --excl-stop Marks the end of an excluded section. The current line is part of this section --excl-br-line Lines in covered files containing this marker will be excluded from branch coverage --excl-br-start Marks the beginning of a section excluded from branch coverage. The current line is part of this section --excl-br-stop Marks the end of a section excluded from branch coverage. The current line is part of this section --no-demangle No symbol demangling -h, --help Print help (see a summary with '-h') -V, --version Print version ``` ## How to get grcov Grcov can be downloaded from [releases](https://github.com/mozilla/grcov/releases) or, if you have Rust installed, you can run `cargo install grcov`. ## Usage ### Example: How to generate source-based coverage for a Rust project 1. Install the llvm-tools or llvm-tools-preview component: ```sh rustup component add llvm-tools-preview ``` 2. Ensure that the following environment variable is set up: ```sh export RUSTFLAGS="-Cinstrument-coverage" ``` 3. Build your code: `cargo build` 4. Ensure each test runs gets its own profile information by defining the LLVM_PROFILE_FILE environment variable (%p will be replaced by the process ID, and %m by the binary signature): ```sh export LLVM_PROFILE_FILE="your_name-%p-%m.profraw" ``` 5. Run your tests: `cargo test` In the CWD, you will see a `.profraw` file has been generated. This contains the profiling information that grcov will parse, alongside with your binaries. ### Example: How to generate .gcda files for C/C++ Pass `--coverage` to `clang` or `gcc` (or for older gcc versions pass `-ftest-coverage` and `-fprofile-arcs` options (see [gcc docs](https://gcc.gnu.org/onlinedocs/gcc/Gcov-Data-Files.html)). ### Example: How to generate .gcda files for a Rust project **Nightly Rust is required** to use grcov for Rust gcov-based coverage. Alternatively, you can `export RUSTC_BOOTSTRAP=1`, which basically turns your stable rustc into a Nightly one. 1. Ensure that the following environment variables are set up: ```sh export CARGO_INCREMENTAL=0 export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort" export RUSTDOCFLAGS="-Cpanic=abort" ``` These will ensure that things like dead code elimination do not skew the coverage. 2. Build your code: `cargo build` If you look in `target/debug/deps` dir you will see `.gcno` files have appeared. These are the locations that could be covered. 3. Run your tests: `cargo test` In the `target/debug/deps/` dir you will now also see `.gcda` files. These contain the hit counts on which of those locations have been reached. Both sets of files are used as inputs to `grcov`. ### Generate a coverage report from coverage artifacts Generate a html coverage report like this: ```sh grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage/ ``` N.B.: The `--binary-path` argument is only necessary for source-based coverage. You can see the report in `target/debug/coverage/index.html`. (or alternatively with `-t lcov` grcov will output a lcov compatible coverage report that you could then feed into lcov's `genhtml` command). #### LCOV output By passing `-t lcov` you could generate an lcov.info file and pass it to genhtml: ```sh genhtml -o ./target/debug/coverage/ --show-details --highlight --ignore-errors source --legend ./target/debug/lcov.info ``` LCOV output should be used when uploading to Codecov, with the `--branch` argument for branch coverage support. #### Coveralls output Coverage can also be generated in coveralls format: ```sh grcov . --binary-path ./target/debug/ -t coveralls -s . --token YOUR_COVERALLS_TOKEN > coveralls.json ``` #### grcov with Travis Here is an example of .travis.yml file for source-based coverage: ```yaml language: rust before_install: - curl -L https://github.com/mozilla/grcov/releases/latest/download/grcov-x86_64-unknown-linux-gnu.tar.bz2 | tar jxf - matrix: include: - os: linux rust: stable script: - rustup component add llvm-tools-preview - export RUSTFLAGS="-Cinstrument-coverage" - cargo build --verbose - LLVM_PROFILE_FILE="your_name-%p-%m.profraw" cargo test --verbose - ./grcov . --binary-path ./target/debug/ -s . -t lcov --branch --ignore-not-existing --ignore "/*" -o lcov.info - bash <(curl -s https://codecov.io/bash) -f lcov.info ``` Here is an example of .travis.yml file: ```yaml language: rust before_install: - curl -L https://github.com/mozilla/grcov/releases/latest/download/grcov-x86_64-unknown-linux-gnu.tar.bz2 | tar jxf - matrix: include: - os: linux rust: stable script: - export CARGO_INCREMENTAL=0 - export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort" - export RUSTDOCFLAGS="-Cpanic=abort" - cargo build --verbose $CARGO_OPTIONS - cargo test --verbose $CARGO_OPTIONS - | zip -0 ccov.zip `find . \( -name "YOUR_PROJECT_NAME*.gc*" \) -print`; ./grcov ccov.zip -s . -t lcov --llvm --branch --ignore-not-existing --ignore "/*" -o lcov.info; bash <(curl -s https://codecov.io/bash) -f lcov.info; ``` #### grcov with Gitlab Here is an example `.gitlab-ci.yml` which will build your project, then collect coverage data in a format that Gitlab understands. It is assumed that you'll use an image which already has relevant tools installed, if that's not the case put the appropriate commands at the beginning of the `script` stanza. ```yaml build: variables: # Set an environment variable which causes LLVM to write coverage data to the specified location. This is arbitrary, but the path passed to grcov (the first argument) must contain these files or the coverage data won't be noticed. LLVM_PROFILE_FILE: "target/coverage/%p-%m.profraw" script: # Run all your Rust-based tests - cargo test --workspace # Optionally, run some other command that exercises your code to get more coverage: - ./bin/integration-tests --foo bar # Create the output directory - mkdir target/coverage # This is a multi-line command. You can also write it all as one line if desired, just remove # the '|' and all the newlines. - | grcov # This path must match the setting in LLVM_PROFILE_FILE. If you're not getting the coverage # you expect, look for '.profraw' files in other directories. target/coverage # If your target dir is modified, this will need to match... --binary-path target/debug # Where the source directory is expected -s . # Where to write the output; this should be a directory that exists. -o target/coverage # Exclude coverage of crates and Rust stdlib code. If you get unexpected coverage results from # this (empty, for example), try different combinations of '--ignore-not-existing', # '--ignore "$HOME/.cargo/**"' and see what kind of filtering gets you the coverage you're # looking for. --keep-only 'src/*' # Doing both isn't strictly necessary, if you won't use the HTML version you can modify this # line. --output-types html,cobertura # Extract just the top-level coverage number from the XML report. - xmllint --xpath "concat('Coverage: ', 100 * string(//coverage/@line-rate), '%')" target/coverage/cobertura.xml coverage: '/Coverage: \d+(?:\.\d+)?/' artifacts: paths: - target/coverage/ reports: coverage_report: coverage_format: cobertura path: target/coverage.xml ``` This also ties into Gitlab's coverage percentage collection, so in merge requests you'll be able to see: - increases or decreases of coverage - whether particular lines of code modified by a merge request are covered or not. Additionally, the HTML-formatted coverage report (if you leave it enabled) will be produced as an artifact. ### Alternative reports grcov provides the following output types: | Output Type `-t` | Description | | ---------------- | ------------------------------------------------------------------------- | | lcov (default) | lcov's INFO format that is compatible with the linux coverage project. | | ade | ActiveData\-ETL format. Only useful for Mozilla projects. | | coveralls | Generates coverage in Coveralls format. | | coveralls+ | Like coveralls but with function level information. | | files | Output a file list of covered or uncovered source files. | | covdir | Provides coverage in a recursive JSON format. | | html | Output a HTML coverage report, including coverage badges for your README. | | cobertura | Cobertura XML. Used for coverage analysis in some IDEs and Gitlab CI. | | cobertura-pretty | Pretty-printed Cobertura XML. | ### Hosting HTML reports and using coverage badges The HTML report can be hosted on static website providers like GitHub Pages, Netlify and others. It is common to provide a coverage badge in a project's readme to show the current percentage of covered code. To still allow adding the badge when using a static site host, grcov generates coverage badges and a JSON file with coverage information that can be used with to dynamically generate badges. The coverage data for can be found at `/coverage.json` and the generated bagdes are available as SVGs at `/badges/*svg`. The design of generated badges is taken from `shields.io` but may not be updated immediately if there is any change. Using their endpoint method is recommended if other badges from their service are used already. ### Enabling symlinks on Windows `grcov` uses symbolic links to avoid copying files, when processing directories of coverage data. On Windows, by default, creating symbolic links to files requires Administrator privileges. (The reason is to avoid security attacks in applications that were designed before Windows added support for symbolic links.) When running on Windows `grcov` will attempt to create a symbolic link. If that fails then `grcov` will fall back to copying the file. Copying is less efficient but at least allows users to run `grcov`. `grcov` will also print a warning when it falls back to copying a file, advising the user either to enable the privilege for their account or to run as Administrator. You can enable the "Create Symbolic Links" privilege for your account so that you do not need to run as Administrator to use `grcov`. 1. Click Start, then select "Local Group Policy Editor". Or just run `gpedit.msc` to open it directly. 1. In the navigation tree, select "Computer Configuration", "Windows Settings", "Security Settings", "Local Policies", "User Rights Assignment". 1. In the pane on the right, select "Create symbolic links" and double-click it. 1. Click "Add User or Group", and add your account. 1. Log out and then log back in. #### Example Let's consider we have a project at with username `sample` and project `awesome` that is hosted with GitHub Pages at `https://sample.github.io/awesome`. By using the the `shields.io` endpoint we can create a Markdown badge like so: ```md [![coverage](https://shields.io/endpoint?url=https://sample.github.io/awesome/coverage.json)](https://sample.github.io/awesome/index.html) ``` If we want to avoid using `shields.io` as well, we can use the generated badges as follows (note the different URL for the image): ```md [![coverage](https://sample.github.io/awesome/badges/flat.svg)](https://sample.github.io/awesome/index.html) ``` ## Auto-formatting This project is using pre-commit. Please run `pre-commit install` to install the git pre-commit hooks on your clone. Instructions on how to install pre-commit can be found [here](https://pre-commit.com/#install). Every time you will try to commit, pre-commit will run checks on your files to make sure they follow our style standards and they aren't affected by some simple issues. If the checks fail, pre-commit won't let you commit. ## Build & Test Build with: ```sh cargo build ``` To run unit tests: ```sh cargo test --lib ``` To run integration tests, it is suggested to use the Docker image defined in tests/Dockerfile. Simply build the image to run them: ```sh docker build -t marcocas/grcov -f tests/Dockerfile . ``` Otherwise, if you don't want to use Docker, the only prerequisite is to install GCC 7, setting the `GCC_CXX` environment variable to `g++-7` and the `GCOV` environment variable to `gcov-7`. Then run the tests with: ```sh cargo test ``` ## Minimum requirements - GCC 4.9 or higher is required (if parsing coverage artifacts generated by GCC). - Rust 1.52 ## License Published under the MPL 2.0 license. grcov-0.8.22/src/cobertura.rs000064400000000000000000000656261046102023000141770ustar 00000000000000use crate::defs::*; use quick_xml::{ events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event}, Writer, }; use rustc_hash::FxHashMap; use std::time::{SystemTime, UNIX_EPOCH}; use std::{ fmt::Display, io::{BufWriter, Cursor, Write}, }; use std::{fmt::Formatter, path::Path}; use symbolic_common::Name; use symbolic_demangle::{Demangle, DemangleOptions}; use crate::output::get_target_output_writable; macro_rules! demangle { ($name: expr, $demangle: expr, $options: expr) => {{ if $demangle { Name::from($name) .demangle($options) .unwrap_or_else(|| $name.clone()) } else { $name.clone() } }}; } // http://cobertura.sourceforge.net/xml/coverage-04.dtd struct Coverage { sources: Vec, packages: Vec, } #[derive(Default)] struct CoverageStats { lines_covered: f64, lines_valid: f64, branches_covered: f64, branches_valid: f64, complexity: f64, } impl std::ops::Add for CoverageStats { type Output = Self; fn add(self, rhs: Self) -> Self::Output { Self { lines_covered: self.lines_covered + rhs.lines_covered, lines_valid: self.lines_valid + rhs.lines_valid, branches_covered: self.branches_covered + rhs.branches_covered, branches_valid: self.branches_valid + rhs.branches_valid, complexity: self.complexity + rhs.complexity, } } } impl CoverageStats { fn from_lines(lines: FxHashMap) -> Self { let lines_covered = lines .iter() .fold(0.0, |c, (_, l)| if l.covered() { c + 1.0 } else { c }); let lines_valid = lines.len() as f64; let branches: Vec> = lines .into_iter() .filter_map(|(_, l)| match l { Line::Branch { conditions, .. } => Some(conditions), Line::Plain { .. } => None, }) .collect(); let (branches_covered, branches_valid) = branches .iter() .fold((0.0, 0.0), |(covered, valid), conditions| { ( covered + conditions.iter().fold(0.0, |hits, c| c.coverage + hits), valid + conditions.len() as f64, ) }); Self { lines_valid, lines_covered, branches_valid, branches_covered, // for now always 0 complexity: 0.0, } } fn line_rate(&self) -> f64 { if self.lines_valid > 0.0 { self.lines_covered / self.lines_valid } else { 0.0 } } fn branch_rate(&self) -> f64 { if self.branches_valid > 0.0 { self.branches_covered / self.branches_valid } else { 0.0 } } } trait Stats { fn get_lines(&self) -> FxHashMap; fn get_stats(&self) -> CoverageStats { CoverageStats::from_lines(self.get_lines()) } } impl Stats for Coverage { fn get_lines(&self) -> FxHashMap { unimplemented!("does not make sense to ask Coverage for lines") } fn get_stats(&self) -> CoverageStats { self.packages .iter() .map(|p| p.get_stats()) .fold(CoverageStats::default(), |acc, stats| acc + stats) } } struct Package { name: String, classes: Vec, } impl Stats for Package { fn get_lines(&self) -> FxHashMap { self.classes.get_lines() } } struct Class { name: String, file_name: String, lines: Vec, methods: Vec, } impl Stats for Class { fn get_lines(&self) -> FxHashMap { let mut lines = self.lines.get_lines(); lines.extend(self.methods.get_lines()); lines } } struct Method { name: String, signature: String, lines: Vec, } impl Stats for Method { fn get_lines(&self) -> FxHashMap { self.lines.get_lines() } } impl Stats for Vec { fn get_lines(&self) -> FxHashMap { let mut lines = FxHashMap::default(); for item in self { lines.extend(item.get_lines()); } lines } } #[derive(Debug, Clone)] enum Line { Plain { number: u32, hits: u64, }, Branch { number: u32, hits: u64, conditions: Vec, }, } impl Line { fn number(&self) -> u32 { match self { Line::Plain { number, .. } | Line::Branch { number, .. } => *number, } } fn covered(&self) -> bool { matches!(self, Line::Plain { hits, .. } | Line::Branch { hits, .. } if *hits > 0) } } impl Stats for Line { fn get_lines(&self) -> FxHashMap { let mut lines = FxHashMap::default(); lines.insert(self.number(), self.clone()); lines } } #[derive(Debug, Clone)] struct Condition { number: usize, cond_type: ConditionType, coverage: f64, } // Condition types #[derive(Debug, Clone)] enum ConditionType { Jump, } impl Display for ConditionType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::Jump => write!(f, "jump"), } } } fn get_coverage( results: &[ResultTuple], sources: Vec, demangle: bool, demangle_options: DemangleOptions, ) -> Coverage { let packages: Vec = results .iter() .map(|(_, rel_path, result)| { let all_lines: Vec = result.lines.keys().cloned().collect(); let end: u32 = result.lines.keys().last().unwrap_or(&0) + 1; let mut start_indexes: Vec = Vec::new(); for function in result.functions.values() { start_indexes.push(function.start); } start_indexes.sort_unstable(); let line_from_number = |number| { let hits = result.lines.get(&number).cloned().unwrap_or_default(); if let Some(branches) = result.branches.get(&number) { let conditions = branches .iter() .enumerate() .map(|(i, b)| Condition { cond_type: ConditionType::Jump, coverage: if *b { 1.0 } else { 0.0 }, number: i, }) .collect::>(); Line::Branch { number, hits, conditions, } } else { Line::Plain { number, hits } } }; let methods: Vec = result .functions .iter() .map(|(name, function)| { let mut func_end = end; for start in &start_indexes { if *start > function.start { func_end = *start; break; } } let mut lines_in_function: Vec = Vec::new(); for line in all_lines .iter() .filter(|&&x| x >= function.start && x < func_end) { lines_in_function.push(*line); } let lines: Vec = lines_in_function .into_iter() .map(line_from_number) .collect(); Method { name: demangle!(name, demangle, demangle_options), signature: String::new(), lines, } }) .collect(); let lines: Vec = all_lines.into_iter().map(line_from_number).collect(); let class = Class { name: rel_path .file_stem() .map(|x| x.to_str().unwrap()) .unwrap_or_default() .to_string(), file_name: rel_path.to_str().unwrap_or_default().to_string(), lines, methods, }; Package { name: rel_path.to_str().unwrap_or_default().to_string(), classes: vec![class], } }) .collect(); Coverage { sources, packages } } pub fn output_cobertura( source_dir: Option<&Path>, results: &[ResultTuple], output_file: Option<&Path>, demangle: bool, pretty: bool, ) { let demangle_options = DemangleOptions::name_only(); let sources = vec![source_dir .unwrap_or_else(|| Path::new(".")) .display() .to_string()]; let coverage = get_coverage(results, sources, demangle, demangle_options); let mut writer = if pretty { Writer::new_with_indent(Cursor::new(vec![]), b' ', 4) } else { Writer::new(Cursor::new(vec![])) }; writer .write_event(Event::Decl(BytesDecl::new("1.0", None, None))) .unwrap(); writer .write_event(Event::DocType(BytesText::from_escaped( " coverage SYSTEM 'http://cobertura.sourceforge.net/xml/coverage-04.dtd'", ))) .unwrap(); let cov_tag = "coverage"; let mut cov = BytesStart::from_content(cov_tag, cov_tag.len()); let stats = coverage.get_stats(); cov.push_attribute(("lines-covered", stats.lines_covered.to_string().as_ref())); cov.push_attribute(("lines-valid", stats.lines_valid.to_string().as_ref())); cov.push_attribute(("line-rate", stats.line_rate().to_string().as_ref())); cov.push_attribute(( "branches-covered", stats.branches_covered.to_string().as_ref(), )); cov.push_attribute(("branches-valid", stats.branches_valid.to_string().as_ref())); cov.push_attribute(("branch-rate", stats.branch_rate().to_string().as_ref())); cov.push_attribute(("complexity", "0")); cov.push_attribute(("version", "1.9")); let secs = match SystemTime::now().duration_since(UNIX_EPOCH) { Ok(s) => s.as_secs().to_string(), Err(_) => String::from("0"), }; cov.push_attribute(("timestamp", secs.as_ref())); writer.write_event(Event::Start(cov)).unwrap(); // export header let sources_tag = "sources"; let source_tag = "source"; writer .write_event(Event::Start(BytesStart::from_content( sources_tag, sources_tag.len(), ))) .unwrap(); for path in &coverage.sources { writer .write_event(Event::Start(BytesStart::from_content( source_tag, source_tag.len(), ))) .unwrap(); writer .write_event(Event::Text(BytesText::new(path))) .unwrap(); writer .write_event(Event::End(BytesEnd::new(source_tag))) .unwrap(); } writer .write_event(Event::End(BytesEnd::new(sources_tag))) .unwrap(); // export packages let packages_tag = "packages"; let pack_tag = "package"; writer .write_event(Event::Start(BytesStart::from_content( packages_tag, packages_tag.len(), ))) .unwrap(); // Export the package for package in &coverage.packages { let mut pack = BytesStart::from_content(pack_tag, pack_tag.len()); pack.push_attribute(("name", package.name.as_ref())); let stats = package.get_stats(); pack.push_attribute(("line-rate", stats.line_rate().to_string().as_ref())); pack.push_attribute(("branch-rate", stats.branch_rate().to_string().as_ref())); pack.push_attribute(("complexity", stats.complexity.to_string().as_ref())); writer.write_event(Event::Start(pack)).unwrap(); // export_classes let classes_tag = "classes"; let class_tag = "class"; let methods_tag = "methods"; let method_tag = "method"; writer .write_event(Event::Start(BytesStart::from_content( classes_tag, classes_tag.len(), ))) .unwrap(); for class in &package.classes { let mut c = BytesStart::from_content(class_tag, class_tag.len()); c.push_attribute(("name", class.name.as_ref())); c.push_attribute(("filename", class.file_name.as_ref())); let stats = class.get_stats(); c.push_attribute(("line-rate", stats.line_rate().to_string().as_ref())); c.push_attribute(("branch-rate", stats.branch_rate().to_string().as_ref())); c.push_attribute(("complexity", stats.complexity.to_string().as_ref())); writer.write_event(Event::Start(c)).unwrap(); writer .write_event(Event::Start(BytesStart::from_content( methods_tag, methods_tag.len(), ))) .unwrap(); for method in &class.methods { let mut m = BytesStart::from_content(method_tag, method_tag.len()); m.push_attribute(("name", method.name.as_ref())); m.push_attribute(("signature", method.signature.as_ref())); let stats = method.get_stats(); m.push_attribute(("line-rate", stats.line_rate().to_string().as_ref())); m.push_attribute(("branch-rate", stats.branch_rate().to_string().as_ref())); m.push_attribute(("complexity", stats.complexity.to_string().as_ref())); writer.write_event(Event::Start(m)).unwrap(); write_lines(&mut writer, &method.lines); writer .write_event(Event::End(BytesEnd::new(method_tag))) .unwrap(); } writer .write_event(Event::End(BytesEnd::new(methods_tag))) .unwrap(); write_lines(&mut writer, &class.lines); } writer .write_event(Event::End(BytesEnd::new(class_tag))) .unwrap(); writer .write_event(Event::End(BytesEnd::new(classes_tag))) .unwrap(); writer .write_event(Event::End(BytesEnd::new(pack_tag))) .unwrap(); } writer .write_event(Event::End(BytesEnd::new(packages_tag))) .unwrap(); writer .write_event(Event::End(BytesEnd::new(cov_tag))) .unwrap(); let result = writer.into_inner().into_inner(); let mut file = BufWriter::new(get_target_output_writable(output_file)); file.write_all(&result).unwrap(); } fn write_lines(writer: &mut Writer>>, lines: &[Line]) { let lines_tag = "lines"; let line_tag = "line"; writer .write_event(Event::Start(BytesStart::from_content( lines_tag, lines_tag.len(), ))) .unwrap(); for line in lines { let mut l = BytesStart::from_content(line_tag, line_tag.len()); match line { Line::Plain { ref number, ref hits, } => { l.push_attribute(("number", number.to_string().as_ref())); l.push_attribute(("hits", hits.to_string().as_ref())); writer.write_event(Event::Empty(l)).unwrap(); } Line::Branch { ref number, ref hits, conditions, } => { l.push_attribute(("number", number.to_string().as_ref())); l.push_attribute(("hits", hits.to_string().as_ref())); l.push_attribute(("branch", "true")); writer.write_event(Event::Start(l)).unwrap(); let conditions_tag = "conditions"; let condition_tag = "condition"; writer .write_event(Event::Start(BytesStart::from_content( conditions_tag, conditions_tag.len(), ))) .unwrap(); for condition in conditions { let mut c = BytesStart::from_content(condition_tag, condition_tag.len()); c.push_attribute(("number", condition.number.to_string().as_ref())); c.push_attribute(("type", condition.cond_type.to_string().as_ref())); c.push_attribute(("coverage", condition.coverage.to_string().as_ref())); writer.write_event(Event::Empty(c)).unwrap(); } writer .write_event(Event::End(BytesEnd::new(conditions_tag))) .unwrap(); writer .write_event(Event::End(BytesEnd::new(line_tag))) .unwrap(); } } } writer .write_event(Event::End(BytesEnd::new(lines_tag))) .unwrap(); } #[cfg(test)] mod tests { use super::*; use crate::{CovResult, Function}; use std::io::Read; use std::{collections::BTreeMap, path::PathBuf}; use std::{fs::File, path::Path}; enum Result { Main, Test, } fn coverage_result(which: Result) -> CovResult { match which { Result::Main => CovResult { /* main.rs fn main() { let inp = "a"; if "a" == inp { println!("a"); } else if "b" == inp { println!("b"); } println!("what?"); } */ lines: [ (1, 1), (2, 1), (3, 2), (4, 1), (5, 0), (6, 0), (8, 1), (9, 1), ] .iter() .cloned() .collect(), branches: { let mut map = BTreeMap::new(); map.insert(3, vec![true, false]); map.insert(5, vec![false, false]); map }, functions: { let mut map = FxHashMap::default(); map.insert( "_ZN8cov_test4main17h7eb435a3fb3e6f20E".to_string(), Function { start: 1, executed: true, }, ); map }, }, Result::Test => CovResult { /* main.rs fn main() { } #[test] fn test_fn() { let s = "s"; if s == "s" { println!("test"); } println!("test"); } */ lines: [ (1, 2), (3, 0), (6, 2), (7, 1), (8, 2), (9, 1), (11, 1), (12, 2), ] .iter() .cloned() .collect(), branches: { let mut map = BTreeMap::new(); map.insert(8, vec![true, false]); map }, functions: { let mut map = FxHashMap::default(); map.insert( "_ZN8cov_test7test_fn17hbf19ec7bfabe8524E".to_string(), Function { start: 6, executed: true, }, ); map.insert( "_ZN8cov_test4main17h7eb435a3fb3e6f20E".to_string(), Function { start: 1, executed: false, }, ); map.insert( "_ZN8cov_test4main17h29b45b3d7d8851d2E".to_string(), Function { start: 1, executed: true, }, ); map.insert( "_ZN8cov_test7test_fn28_$u7b$$u7b$closure$u7d$$u7d$17hab7a162ac9b573fcE" .to_string(), Function { start: 6, executed: true, }, ); map.insert( "_ZN8cov_test4main17h679717cd8503f8adE".to_string(), Function { start: 1, executed: false, }, ); map }, }, } } fn read_file(path: &Path) -> String { let mut f = File::open(path).unwrap_or_else(|_| panic!("{:?} file not found", path.file_name())); let mut s = String::new(); f.read_to_string(&mut s).unwrap(); s } #[test] fn test_cobertura() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_cobertura.xml"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("src/main.rs"), PathBuf::from("src/main.rs"), coverage_result(Result::Main), )]; for pretty in [false, true] { output_cobertura(None, &results, Some(&file_path), true, pretty); let results = read_file(&file_path); assert!(results.contains(r#"."#)); assert!(results.contains(r#"package name="src/main.rs""#)); assert!(results.contains(r#"class name="main" filename="src/main.rs""#)); assert!(results.contains(r#"method name="cov_test::main""#)); assert!(results.contains(r#"line number="1" hits="1"/>"#)); assert!(results.contains(r#"line number="3" hits="2" branch="true""#)); assert!(results.contains(r#""#)); assert!(results.contains(r#"lines-covered="6""#)); assert!(results.contains(r#"lines-valid="8""#)); assert!(results.contains(r#"line-rate="0.75""#)); assert!(results.contains(r#"branches-covered="1""#)); assert!(results.contains(r#"branches-valid="4""#)); assert!(results.contains(r#"branch-rate="0.25""#)); } } #[test] fn test_cobertura_double_lines() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_cobertura.xml"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("src/main.rs"), PathBuf::from("src/main.rs"), coverage_result(Result::Test), )]; output_cobertura(None, &results, Some(file_path.as_ref()), true, true); let results = read_file(&file_path); assert!(results.contains(r#"."#)); assert!(results.contains(r#"package name="src/main.rs""#)); assert!(results.contains(r#"class name="main" filename="src/main.rs""#)); assert!(results.contains(r#"method name="cov_test::main""#)); assert!(results.contains(r#"method name="cov_test::test_fn""#)); assert!(results.contains(r#"lines-covered="7""#)); assert!(results.contains(r#"lines-valid="8""#)); assert!(results.contains(r#"line-rate="0.875""#)); assert!(results.contains(r#"branches-covered="1""#)); assert!(results.contains(r#"branches-valid="2""#)); assert!(results.contains(r#"branch-rate="0.5""#)); } #[test] fn test_cobertura_multiple_files() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_cobertura.xml"; let file_path = tmp_dir.path().join(file_name); let results = vec![ ( PathBuf::from("src/main.rs"), PathBuf::from("src/main.rs"), coverage_result(Result::Main), ), ( PathBuf::from("src/test.rs"), PathBuf::from("src/test.rs"), coverage_result(Result::Test), ), ]; output_cobertura(None, &results, Some(file_path.as_ref()), true, true); let results = read_file(&file_path); assert!(results.contains(r#"."#)); assert!(results.contains(r#"package name="src/main.rs""#)); assert!(results.contains(r#"class name="main" filename="src/main.rs""#)); assert!(results.contains(r#"package name="src/test.rs""#)); assert!(results.contains(r#"class name="test" filename="src/test.rs""#)); assert!(results.contains(r#"lines-covered="13""#)); assert!(results.contains(r#"lines-valid="16""#)); assert!(results.contains(r#"line-rate="0.8125""#)); assert!(results.contains(r#"branches-covered="2""#)); assert!(results.contains(r#"branches-valid="6""#)); assert!(results.contains(r#"branch-rate="0.3333333333333333""#)); } #[test] fn test_cobertura_source_root_none() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_cobertura.xml"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("src/main.rs"), PathBuf::from("src/main.rs"), CovResult::default(), )]; output_cobertura(None, &results, Some(&file_path), true, true); let results = read_file(&file_path); assert!(results.contains(r#"."#)); assert!(results.contains(r#"package name="src/main.rs""#)); } #[test] fn test_cobertura_source_root_some() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_cobertura.xml"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("main.rs"), PathBuf::from("main.rs"), CovResult::default(), )]; output_cobertura( Some(Path::new("src")), &results, Some(&file_path), true, true, ); let results = read_file(&file_path); assert!(results.contains(r#"src"#)); assert!(results.contains(r#"package name="main.rs""#)); } } grcov-0.8.22/src/covdir.rs000064400000000000000000000102331046102023000134570ustar 00000000000000use serde_json::{json, map::Map}; use std::collections::BTreeMap; pub use crate::defs::*; impl CDStats { pub fn new(total: usize, covered: usize, precision: usize) -> Self { let missed = total - covered; Self { total, covered, missed, percent: Self::get_percent(covered, total, precision), } } pub fn add(&mut self, other: &Self) { // Add stats to self without recomputing the percentage because it's time consuming. // So once all the stats are merged into one for a directory // then need to call set_percent() self.total += other.total; self.covered += other.covered; self.missed += other.missed; } pub fn set_percent(&mut self, precision: usize) { self.percent = Self::get_percent(self.covered, self.total, precision); } pub fn get_percent(x: usize, y: usize, precision: usize) -> f64 { if y != 0 { // This function calculates the coverage percentage with rounded decimal points up to `precision`. // However the `serdes_json` will determine the final format of `coveragePercent` in the report. // If `precision` is 0, then `coveragePercent` output will still have 1 (null) decimal place, i.e. 98.321... -> 98.0. // If `coveragePercent` has multiple trailing zeros, they will be truncated to 1 decimal place i.e 98.0000... -> 98.0. // These limitation are considered good enough behavior for covdir report, for an improved output // a custom serdes_json serializer for `f64` would have to be written. f64::round(x as f64 / (y as f64) * f64::powi(10.0, precision as i32 + 2)) / f64::powi(10.0, precision as i32) } else { 0.0 } } } impl CDFileStats { pub fn new(name: String, coverage: BTreeMap, precision: usize) -> Self { let (total, covered, lines) = Self::get_coverage(coverage); Self { name, stats: CDStats::new(total, covered, precision), coverage: lines, } } fn get_coverage(coverage: BTreeMap) -> (usize, usize, Vec) { let mut covered = 0; let last_line = *coverage.keys().last().unwrap_or(&0) as usize; let total = coverage.len(); let mut lines: Vec = vec![-1; last_line]; for (line_num, line_count) in coverage.iter() { if let Some(line) = lines.get_mut((*line_num - 1) as usize) { *line = *line_count as i64; covered += (*line_count > 0) as usize; } } (total, covered, lines) } pub fn to_json(&self) -> serde_json::Value { json!({ "name": self.name, "linesTotal": self.stats.total, "linesCovered": self.stats.covered, "linesMissed": self.stats.missed, "coveragePercent": self.stats.percent, "coverage": self.coverage, }) } } impl CDDirStats { pub fn new(name: String) -> Self { Self { name, files: Vec::new(), dirs: Vec::new(), stats: Default::default(), } } pub fn set_stats(&mut self, precision: usize) { for file in self.files.iter() { self.stats.add(&file.stats); } for dir in self.dirs.iter() { let mut dir = dir.borrow_mut(); dir.set_stats(precision); self.stats.add(&dir.stats); } self.stats.set_percent(precision); } pub fn into_json(self) -> serde_json::Value { let mut children = Map::new(); for file in self.files { children.insert(file.name.clone(), file.to_json()); } for dir in self.dirs { let dir = dir.take(); children.insert(dir.name.clone(), dir.into_json()); } json!({ "name": self.name, "linesTotal": self.stats.total, "linesCovered": self.stats.covered, "linesMissed": self.stats.missed, "coveragePercent": self.stats.percent, "children": children, }) } } grcov-0.8.22/src/defs.rs000064400000000000000000000065201046102023000131160ustar 00000000000000use crossbeam_channel::{Receiver, Sender}; use rustc_hash::FxHashMap; use serde::ser::{Serialize, Serializer}; use std::cell::RefCell; use std::collections::BTreeMap; use std::fmt::{Display, Formatter}; use std::path::PathBuf; use std::rc::Rc; use std::sync::Mutex; #[derive(Debug, Clone, PartialEq, Eq)] pub struct Function { pub start: u32, pub executed: bool, } #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct CovResult { pub lines: BTreeMap, pub branches: BTreeMap>, pub functions: FunctionMap, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub enum ItemFormat { Gcno, Profraw, Info, JacocoXml, } #[derive(Debug)] pub struct GcnoBuffers { pub stem: String, pub gcno_buf: Vec, pub gcda_buf: Vec>, } #[derive(Debug)] pub enum ItemType { Path((String, PathBuf)), Paths(Vec), Content(Vec), Buffers(GcnoBuffers), } #[derive(Debug)] pub struct WorkItem { pub format: ItemFormat, pub item: ItemType, pub name: String, } pub type FunctionMap = FxHashMap; pub type JobReceiver = Receiver>; pub type JobSender = Sender>; pub type CovResultMap = FxHashMap; pub type SyncCovResultMap = Mutex; pub type ResultTuple = (PathBuf, PathBuf, CovResult); #[derive(Debug, Default)] pub struct CDStats { pub total: usize, pub covered: usize, pub missed: usize, pub percent: f64, } #[derive(Debug)] pub struct CDFileStats { pub name: String, pub stats: CDStats, pub coverage: Vec, } #[derive(Debug, Default)] pub struct CDDirStats { pub name: String, pub files: Vec, pub dirs: Vec>>, pub stats: CDStats, } #[derive(Debug)] pub struct HtmlItem { pub abs_path: PathBuf, pub rel_path: PathBuf, pub result: CovResult, } #[derive(Clone, Debug, Default, serde::Serialize)] pub struct HtmlStats { pub total_lines: usize, pub covered_lines: usize, pub total_funs: usize, pub covered_funs: usize, pub total_branches: usize, pub covered_branches: usize, } #[derive(Clone, Debug, serde::Serialize)] pub struct HtmlFileStats { pub stats: HtmlStats, } #[derive(Clone, Debug, serde::Serialize)] pub struct HtmlDirStats { pub files: BTreeMap, pub stats: HtmlStats, } #[derive(Debug, Default, serde::Serialize)] pub struct HtmlGlobalStats { pub dirs: BTreeMap, pub stats: HtmlStats, } pub type HtmlJobReceiver = Receiver>; pub type HtmlJobSender = Sender>; pub enum StringOrRef<'a> { S(String), R(&'a String), } impl Display for StringOrRef<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { StringOrRef::S(s) => write!(f, "{}", s), StringOrRef::R(s) => write!(f, "{}", s), } } } impl Serialize for StringOrRef<'_> { fn serialize(&self, serializer: S) -> Result where S: Serializer, { match self { StringOrRef::S(s) => serializer.serialize_str(s), StringOrRef::R(s) => serializer.serialize_str(s), } } } pub struct JacocoReport { pub lines: BTreeMap, pub branches: BTreeMap>, } grcov-0.8.22/src/file_filter.rs000064400000000000000000000076351046102023000144710ustar 00000000000000use regex::Regex; use std::path::Path; pub enum FilterType { Line(u32), Branch(u32), Both(u32), } #[derive(Default)] pub struct FileFilter { excl_line: Option, excl_start: Option, excl_stop: Option, excl_br_line: Option, excl_br_start: Option, excl_br_stop: Option, } impl FileFilter { pub fn new( excl_line: Option, excl_start: Option, excl_stop: Option, excl_br_line: Option, excl_br_start: Option, excl_br_stop: Option, ) -> Self { Self { excl_line, excl_start, excl_stop, excl_br_line, excl_br_start, excl_br_stop, } } pub fn create(&self, file: &Path) -> Vec { if self.excl_line.is_none() && self.excl_start.is_none() && self.excl_br_line.is_none() && self.excl_br_start.is_none() { return Vec::new(); } let file = std::fs::read_to_string(file); let file = if let Ok(file) = file { file } else { return Vec::new(); }; let mut ignore_br = false; let mut ignore = false; file.split('\n') .enumerate() .filter_map(move |(number, line)| { // Line numbers are 1-based. let number = (number + 1) as u32; // The file is split on \n, which may result in a trailing \r // on Windows. Remove it. let line = line.strip_suffix('\r').unwrap_or(line); // End a branch ignore region. Region endings are exclusive. if ignore_br && self.excl_br_stop.as_ref().is_some_and(|f| f.is_match(line)) { ignore_br = false } // End a line ignore region. Region endings are exclusive. if ignore && self.excl_stop.as_ref().is_some_and(|f| f.is_match(line)) { ignore = false } // Start a branch ignore region. Region starts are inclusive. if !ignore_br && self .excl_br_start .as_ref() .is_some_and(|f| f.is_match(line)) { ignore_br = true; } // Start a line ignore region. Region starts are inclusive. if !ignore && self.excl_start.as_ref().is_some_and(|f| f.is_match(line)) { ignore = true; } if ignore_br { // Consuming code has to eliminate each of these // individually, so it has to know when both are ignored vs. // either. if ignore { Some(FilterType::Both(number)) } else { Some(FilterType::Branch(number)) } } else if ignore { Some(FilterType::Line(number)) } else if self.excl_br_line.as_ref().is_some_and(|f| f.is_match(line)) { // Single line exclusion. If single line exclusions occur // inside a region they are meaningless (would be applied // anway), so they are lower priority. if self.excl_line.as_ref().is_some_and(|f| f.is_match(line)) { Some(FilterType::Both(number)) } else { Some(FilterType::Branch(number)) } } else if self.excl_line.as_ref().is_some_and(|f| f.is_match(line)) { Some(FilterType::Line(number)) } else { None } }) .collect() } } grcov-0.8.22/src/filter.rs000064400000000000000000000105451046102023000134640ustar 00000000000000use crate::defs::*; pub fn is_covered(result: &CovResult) -> bool { // For C/C++ source files, we can consider a file as being uncovered // when all its source lines are uncovered. let any_line_covered = result .lines .values() .any(|&execution_count| execution_count != 0); if !any_line_covered { return false; } // For JavaScript files, we can't do the same, as the top-level is always // executed, even if it just contains declarations. So, we need to check if // all its functions, except the top-level, are uncovered. let any_function_covered = result .functions .iter() .any(|(name, function)| function.executed && name != "top-level"); result.functions.len() <= 1 || any_function_covered } #[cfg(test)] mod tests { use super::*; use rustc_hash::FxHashMap; #[test] fn test_covered() { let mut functions: FunctionMap = FxHashMap::default(); functions.insert( "f1".to_string(), Function { start: 1, executed: true, }, ); functions.insert( "f2".to_string(), Function { start: 2, executed: false, }, ); let result = CovResult { lines: [(1, 21), (2, 7), (7, 0)].iter().cloned().collect(), branches: [].iter().cloned().collect(), functions, }; assert!(is_covered(&result)); } #[test] fn test_covered_no_functions() { let result = CovResult { lines: [(1, 21), (2, 7), (7, 0)].iter().cloned().collect(), branches: [].iter().cloned().collect(), functions: FxHashMap::default(), }; assert!(is_covered(&result)); } #[test] fn test_uncovered_no_lines_executed() { let mut functions: FunctionMap = FxHashMap::default(); functions.insert( "f1".to_string(), Function { start: 1, executed: true, }, ); functions.insert( "f2".to_string(), Function { start: 2, executed: false, }, ); let result = CovResult { lines: [(1, 0), (2, 0), (7, 0)].iter().cloned().collect(), branches: [].iter().cloned().collect(), functions: FxHashMap::default(), }; assert!(!is_covered(&result)); } #[test] fn test_covered_functions_executed() { let mut functions: FunctionMap = FxHashMap::default(); functions.insert( "top-level".to_string(), Function { start: 1, executed: true, }, ); functions.insert( "f".to_string(), Function { start: 2, executed: true, }, ); let result = CovResult { lines: [(1, 21), (2, 7), (7, 0)].iter().cloned().collect(), branches: [].iter().cloned().collect(), functions, }; assert!(is_covered(&result)); } #[test] fn test_covered_toplevel_executed() { let mut functions: FunctionMap = FxHashMap::default(); functions.insert( "top-level".to_string(), Function { start: 1, executed: true, }, ); let result = CovResult { lines: [(1, 21), (2, 7), (7, 0)].iter().cloned().collect(), branches: [].iter().cloned().collect(), functions, }; assert!(is_covered(&result)); } #[test] fn test_uncovered_functions_not_executed() { let mut functions: FunctionMap = FxHashMap::default(); functions.insert( "top-level".to_string(), Function { start: 1, executed: true, }, ); functions.insert( "f".to_string(), Function { start: 7, executed: false, }, ); let result = CovResult { lines: [(1, 21), (2, 7), (7, 0)].iter().cloned().collect(), branches: [].iter().cloned().collect(), functions, }; assert!(!is_covered(&result)); } } grcov-0.8.22/src/gcov.rs000064400000000000000000000067041046102023000131370ustar 00000000000000use lazy_static::lazy_static; use semver::Version; use std::env; use std::fmt; use std::path::Path; use std::process::Command; #[derive(Debug)] pub enum GcovToolError { ProcessFailure, Failure((String, String, String)), } impl fmt::Display for GcovToolError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { GcovToolError::ProcessFailure => write!(f, "Failed to execute gcov process"), GcovToolError::Failure((ref path, ref stdout, ref stderr)) => { writeln!(f, "gcov execution failed on {}", path)?; writeln!(f, "gcov stdout: {}", stdout)?; writeln!(f, "gcov stderr: {}", stderr) } } } } fn get_gcov() -> String { if let Ok(s) = env::var("GCOV") { s } else { "gcov".to_string() } } pub fn run_gcov( gcno_path: &Path, branch_enabled: bool, working_dir: &Path, ) -> Result<(), GcovToolError> { let mut command = Command::new(get_gcov()); let command = if branch_enabled { command.arg("-b").arg("-c") } else { &mut command }; let status = command .arg(gcno_path) .arg("-i") // Generate intermediate gcov format, faster to parse. .current_dir(working_dir); let output = if let Ok(output) = status.output() { output } else { return Err(GcovToolError::ProcessFailure); }; if !output.status.success() { return Err(GcovToolError::Failure(( gcno_path.to_str().unwrap().to_string(), String::from_utf8_lossy(&output.stdout).to_string(), String::from_utf8_lossy(&output.stderr).to_string(), ))); } Ok(()) } pub fn get_gcov_version() -> &'static Version { lazy_static! { static ref V: Version = { let output = Command::new(get_gcov()) .arg("--version") .output() .expect("Failed to execute `gcov`. `gcov` is required (it is part of GCC)."); assert!(output.status.success(), "`gcov` failed to execute."); let output = String::from_utf8(output.stdout).unwrap(); parse_version(&output) }; } &V } pub fn get_gcov_output_ext() -> &'static str { lazy_static! { static ref E: &'static str = { let min_ver = Version::new(9, 1, 0); if get_gcov_version() >= &min_ver { ".gcov.json.gz" } else { ".gcov" } }; } &E } fn parse_version(gcov_output: &str) -> Version { let version = gcov_output .split([' ', '\n']) .filter_map(|value| Version::parse(value.trim()).ok()) .next_back(); assert!(version.is_some(), "no version found for `gcov`."); version.unwrap() } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_version() { assert_eq!( parse_version("gcov (Ubuntu 4.3.0-12ubuntu2) 4.3.0 20170406"), Version::new(4, 3, 0) ); assert_eq!( parse_version("gcov (Ubuntu 4.9.0-12ubuntu2) 4.9.0 20170406"), Version::new(4, 9, 0) ); assert_eq!( parse_version("gcov (Ubuntu 6.3.0-12ubuntu2) 6.3.0 20170406"), Version::new(6, 3, 0) ); assert_eq!(parse_version("gcov (GCC) 12.2.0"), Version::new(12, 2, 0)); assert_eq!(parse_version("gcov (GCC) 12.2.0\r"), Version::new(12, 2, 0)); } } grcov-0.8.22/src/html.rs000064400000000000000000000443331046102023000131450ustar 00000000000000use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use serde_json::value::{from_value, to_value, Value}; use std::borrow::Cow; use std::collections::HashMap; use std::collections::{btree_map, BTreeMap}; use std::fs::{self, File}; use std::io::{BufReader, Read, Write}; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; use tera::try_get_value; use crate::defs::*; impl HtmlStats { #[inline(always)] pub fn add(&mut self, stats: &Self) { self.total_lines += stats.total_lines; self.covered_lines += stats.covered_lines; self.total_funs += stats.total_funs; self.covered_funs += stats.covered_funs; self.total_branches += stats.total_branches; self.covered_branches += stats.covered_branches; } } #[derive(Clone, Debug)] pub struct Config { hi_limit: f64, med_limit: f64, fn_hi_limit: f64, fn_med_limit: f64, branch_hi_limit: f64, branch_med_limit: f64, date: DateTime, } impl Config { fn new(cfg: &ConfigFile) -> Config { Config { hi_limit: cfg.hi_limit.unwrap_or(90.), med_limit: cfg.med_limit.unwrap_or(75.), fn_hi_limit: cfg.fn_hi_limit.unwrap_or(90.), fn_med_limit: cfg.fn_med_limit.unwrap_or(75.), branch_hi_limit: cfg.branch_hi_limit.unwrap_or(90.), branch_med_limit: cfg.branch_med_limit.unwrap_or(75.), date: Utc::now(), } } } #[derive(Deserialize, Debug, Default)] pub struct ConfigFile { hi_limit: Option, med_limit: Option, fn_hi_limit: Option, fn_med_limit: Option, branch_hi_limit: Option, branch_med_limit: Option, templates: Option>, } impl ConfigFile { fn load(config: Option<&Path>) -> ConfigFile { if let Some(path) = config { let file = File::open(path).unwrap(); let reader = BufReader::new(file); serde_json::from_reader(reader).unwrap() } else { Default::default() } } } static BULMA_VERSION: &str = "0.9.1"; fn load_template(path: &str) -> String { fs::read_to_string(path).unwrap() } fn get_templates(user_templates: &Option>) -> HashMap { let mut result: HashMap = HashMap::from([ ("macros.html", include_str!("templates/macros.html")), ("base.html", include_str!("templates/base.html")), ("index.html", include_str!("templates/index.html")), ("file.html", include_str!("templates/file.html")), ( BadgeStyle::Flat.template_name(), include_str!("templates/badges/flat.svg"), ), ( BadgeStyle::FlatSquare.template_name(), include_str!("templates/badges/flat_square.svg"), ), ( BadgeStyle::ForTheBadge.template_name(), include_str!("templates/badges/for_the_badge.svg"), ), ( BadgeStyle::Plastic.template_name(), include_str!("templates/badges/plastic.svg"), ), ( BadgeStyle::Social.template_name(), include_str!("templates/badges/social.svg"), ), ]) .iter() .map(|(k, v)| (k.to_string(), v.to_string())) .collect(); if let Some(user_templates) = user_templates { let user_templates: HashMap = user_templates .iter() .map(|(k, v)| (k.to_owned(), load_template(v))) .collect(); result.extend(user_templates); } result } pub fn get_config(output_config_file: Option<&Path>) -> (Tera, Config) { let user_conf = ConfigFile::load(output_config_file); let conf = Config::new(&user_conf); let mut tera = Tera::default(); tera.register_filter("severity", conf.clone()); tera.register_function("percent", percent); tera.add_raw_templates(get_templates(&user_conf.templates)) .unwrap(); (tera, conf) } impl tera::Filter for Config { fn filter(&self, value: &Value, args: &HashMap) -> tera::Result { let rate = try_get_value!("severity", "value", f64, value); let kind = match args.get("kind") { Some(val) => try_get_value!("severity", "kind", String, val), None => "lines".to_string(), }; fn severity(hi: f64, medium: f64, rate: f64) -> Value { to_value(if hi <= rate && rate <= 100. { "success" } else if medium <= rate && rate < hi { "warning" } else { "danger" }) .unwrap() } match kind.as_ref() { "lines" => Ok(severity(self.hi_limit, self.med_limit, rate)), "branches" => Ok(severity(self.branch_hi_limit, self.branch_med_limit, rate)), "functions" => Ok(severity(self.fn_hi_limit, self.fn_med_limit, rate)), _ => Err(tera::Error::msg("Unsupported kind")), } } } fn create_parent(path: &Path) { let dest_parent = path.parent().unwrap(); if !dest_parent.exists() && fs::create_dir_all(dest_parent).is_err() { panic!("Cannot create parent directory: {:?}", dest_parent); } } fn add_html_ext(path: &Path) -> PathBuf { if let Some(ext) = path.extension() { let mut ext = ext.to_str().unwrap().to_owned(); ext.push_str(".html"); path.with_extension(ext) } else { path.with_extension(".html") } } fn get_stats(result: &CovResult) -> HtmlStats { let total_lines = result.lines.len(); let covered_lines = result.lines.values().filter(|x| **x > 0).count(); let total_funs = result.functions.len(); let covered_funs = result.functions.values().filter(|f| f.executed).count(); let total_branches = result.branches.values().map(|v| v.len()).sum(); let covered_branches = result .branches .values() .map(|v| v.iter().filter(|x| **x).count()) .sum(); HtmlStats { total_lines, covered_lines, total_funs, covered_funs, total_branches, covered_branches, } } #[inline(always)] fn get_percentage_of_covered_lines(covered_lines: usize, total_lines: usize) -> f64 { if total_lines != 0 { covered_lines as f64 / total_lines as f64 * 100.0 } else { // If the file is empty (no lines) then the coverage // must be 100% (0% means "bad" which is not the case). 100.0 } } fn percent(args: &HashMap) -> tera::Result { if let (Some(n), Some(d)) = (args.get("num"), args.get("den")) { if let (Ok(num), Ok(den)) = ( from_value::(n.clone()), from_value::(d.clone()), ) { Ok(to_value(get_percentage_of_covered_lines(num, den)).unwrap()) } else { Err(tera::Error::msg("Invalid arguments")) } } else { Err(tera::Error::msg("Not enough arguments")) } } fn get_base(rel_path: &Path) -> String { let count = rel_path.components().count() - 1 /* -1 for the file itself */; "../".repeat(count) } fn get_dirs_result(global: Arc>, rel_path: &Path, stats: &HtmlStats) { let parent = rel_path.parent().unwrap().to_str().unwrap().to_string(); let file_name = rel_path.file_name().unwrap().to_str().unwrap().to_string(); let fs = HtmlFileStats { stats: stats.clone(), }; let mut global = global.lock().unwrap(); global.stats.add(stats); let entry = global.dirs.entry(parent); match entry { btree_map::Entry::Occupied(ds) => { let ds = ds.into_mut(); ds.stats.add(stats); ds.files.insert(file_name, fs); } btree_map::Entry::Vacant(v) => { let mut files = BTreeMap::new(); files.insert(file_name, fs); v.insert(HtmlDirStats { files, stats: stats.clone(), }); } }; } use tera::{Context, Tera}; fn make_context() -> Context { let mut ctx = Context::new(); let ver = std::env::var("BULMA_VERSION").map_or(BULMA_VERSION.into(), |v| v); ctx.insert("bulma_version", &ver); ctx } pub fn gen_index( tera: &Tera, global: &HtmlGlobalStats, conf: &Config, output: &Path, branch_enabled: bool, precision: usize, ) { let output_file = output.join("index.html"); create_parent(&output_file); let mut output_stream = match File::create(&output_file) { Err(_) => { eprintln!("Cannot create file {:?}", output_file); return; } Ok(f) => f, }; let mut ctx = make_context(); let empty: &[&str] = &[]; ctx.insert("date", &conf.date); ctx.insert("current", "top_level"); ctx.insert("parents", empty); ctx.insert("stats", &global.stats); ctx.insert("precision", &precision); ctx.insert("items", &global.dirs); ctx.insert("kind", "Directory"); ctx.insert("branch_enabled", &branch_enabled); let out = tera.render("index.html", &ctx).unwrap(); if output_stream.write_all(out.as_bytes()).is_err() { eprintln!("Cannot write the file {:?}", output_file); return; } for (dir_name, dir_stats) in global.dirs.iter() { gen_dir_index( tera, dir_name, dir_stats, conf, output, branch_enabled, precision, ); } } pub fn gen_dir_index( tera: &Tera, dir_name: &str, dir_stats: &HtmlDirStats, conf: &Config, output: &Path, branch_enabled: bool, precision: usize, ) { let index = Path::new(dir_name).join("index.html"); let layers = index.components().count() - 1; let prefix = "../".repeat(layers) + "index.html"; let output_file = output.join(index); create_parent(&output_file); let mut output = match File::create(&output_file) { Err(_) => { eprintln!("Cannot create file {:?}", output_file); return; } Ok(f) => f, }; let mut ctx = make_context(); ctx.insert("date", &conf.date); ctx.insert("bulma_version", BULMA_VERSION); ctx.insert("current", dir_name); ctx.insert("parents", &[(prefix, "top_level")]); ctx.insert("stats", &dir_stats.stats); ctx.insert("items", &dir_stats.files); ctx.insert("kind", "File"); ctx.insert("branch_enabled", &branch_enabled); ctx.insert("precision", &precision); let out = tera.render("index.html", &ctx).unwrap(); if output.write_all(out.as_bytes()).is_err() { eprintln!("Cannot write the file {:?}", output_file); } } fn gen_html( tera: &Tera, path: &Path, result: &CovResult, conf: &Config, output: &Path, rel_path: &Path, global: Arc>, branch_enabled: bool, precision: usize, ) { if !rel_path.is_relative() { return; } let mut f = match File::open(path) { Err(_) => { //eprintln!("Warning: cannot open file {:?}", path); return; } Ok(f) => f, }; let stats = get_stats(result); get_dirs_result(global, rel_path, &stats); let output_file = output.join(add_html_ext(rel_path)); create_parent(&output_file); let mut output = match File::create(&output_file) { Err(_) => { eprintln!("Cannot create file {:?}", output_file); return; } Ok(f) => f, }; let base_url = get_base(rel_path); let filename = rel_path.file_name().unwrap().to_str().unwrap(); let parent = rel_path.parent().unwrap().to_str().unwrap().to_string(); let mut index_url = base_url; index_url.push_str("index.html"); let mut ctx = make_context(); ctx.insert("date", &conf.date); ctx.insert("bulma_version", BULMA_VERSION); ctx.insert("current", filename); ctx.insert( "parents", &[ (index_url.as_str(), "top_level"), ("./index.html", parent.as_str()), ], ); ctx.insert("stats", &stats); ctx.insert("branch_enabled", &branch_enabled); ctx.insert("precision", &precision); let mut file_buf = Vec::new(); if let Err(e) = f.read_to_end(&mut file_buf) { eprintln!("Failed to read {}: {}", path.display(), e); return; } let file_utf8 = String::from_utf8_lossy(&file_buf); if matches!(&file_utf8, Cow::Owned(_)) { // from_utf8_lossy needs to reallocate only when invalid UTF-8, warn. eprintln!( "Warning: invalid utf-8 characters in source file {}. They will be replaced by U+FFFD", path.display() ); } let items = file_utf8 .lines() .enumerate() .map(move |(i, l)| { let index = i + 1; let count = result .lines .get(&(index as u32)) .map(|&v| v as i64) .unwrap_or(-1); (index, count, l) }) .collect::>(); ctx.insert("items", &items); let out = tera.render("file.html", &ctx).unwrap(); if output.write_all(out.as_bytes()).is_err() { eprintln!("Cannot write the file {:?}", output_file); } } pub fn consumer_html( tera: &Tera, receiver: HtmlJobReceiver, global: Arc>, output: &Path, conf: Config, branch_enabled: bool, precision: usize, ) { while let Ok(job) = receiver.recv() { if job.is_none() { break; } let job = job.unwrap(); gen_html( tera, &job.abs_path, &job.result, &conf, output, &job.rel_path, global.clone(), branch_enabled, precision, ); } } /// Different available styles to render badges with [`gen_badge`]. #[derive(Clone, Copy)] pub enum BadgeStyle { Flat, FlatSquare, ForTheBadge, Plastic, Social, } impl BadgeStyle { /// Name of the template as registered with Tera. fn template_name(self) -> &'static str { match self { Self::Flat => "badge_flat.svg", Self::FlatSquare => "badge_flat_square.svg", Self::ForTheBadge => "badge_for_the_badge.svg", Self::Plastic => "badge_plastic.svg", Self::Social => "badge_social.svg", } } /// Output path where the generator writes the file to. fn path(self) -> &'static Path { Path::new(match self { Self::Flat => "badges/flat.svg", Self::FlatSquare => "badges/flat_square.svg", Self::ForTheBadge => "badges/for_the_badge.svg", Self::Plastic => "badges/plastic.svg", Self::Social => "badges/social.svg", }) } /// Create an iterator over all possible values of this enum. pub fn iter() -> impl Iterator { [ Self::Flat, Self::FlatSquare, Self::ForTheBadge, Self::Plastic, Self::Social, ] .iter() .copied() } } /// Generate coverage badges, typically for use in a README.md if the HTML output is hosted on a /// website like GitHub Pages. pub fn gen_badge(tera: &Tera, stats: &HtmlStats, conf: &Config, output: &Path, style: BadgeStyle) { let output_file = output.join(style.path()); create_parent(&output_file); let mut output_stream = match File::create(&output_file) { Err(_) => { eprintln!("Cannot create file {:?}", output_file); return; } Ok(f) => f, }; let mut ctx = make_context(); ctx.insert( "current", &(get_percentage_of_covered_lines(stats.covered_lines, stats.total_lines) as usize), ); ctx.insert("hi_limit", &conf.hi_limit); ctx.insert("med_limit", &conf.med_limit); let out = tera.render(style.template_name(), &ctx).unwrap(); if output_stream.write_all(out.as_bytes()).is_err() { eprintln!("Cannot write the file {:?}", output_file); } } /// Generate a coverage.json file that can be used with shields.io/endpoint to dynamically create /// badges from the contained information. /// /// For example, when hosting the coverage output on GitHub Pages, the file would be available at /// `https://.github.io//coverage.json` and could be used with shields.io by /// using the following URL to generate a covergage badge: /// /// ```text /// https://shields.io/endpoint?url=https://.github.io//coverage.json /// ``` /// /// `` and `` should be replaced with a real username and project name /// respectively, for the URL to work. pub fn gen_coverage_json(stats: &HtmlStats, conf: &Config, output: &Path, precision: usize) { #[derive(Serialize)] #[serde(rename_all = "camelCase")] struct CoverageData { schema_version: u32, label: &'static str, message: String, color: &'static str, } let output_file = output.join("coverage.json"); create_parent(&output_file); let mut output_stream = match File::create(&output_file) { Err(_) => { eprintln!("Cannot create file {:?}", output_file); return; } Ok(f) => f, }; let coverage = get_percentage_of_covered_lines(stats.covered_lines, stats.total_lines); let res = serde_json::to_writer( &mut output_stream, &CoverageData { schema_version: 1, label: "coverage", message: format!("{:.precision$}%", coverage), color: if coverage >= conf.hi_limit { "green" } else if coverage >= conf.med_limit { "yellow" } else { "red" }, }, ); if res.is_err() { eprintln!("cannot write the file {:?}", output_file); } } #[cfg(test)] mod tests { use super::get_percentage_of_covered_lines; #[test] fn test_get_percentage_of_covered_lines() { assert_eq!(get_percentage_of_covered_lines(5, 5), 100.0); assert_eq!(get_percentage_of_covered_lines(1, 2), 50.0); assert_eq!(get_percentage_of_covered_lines(200, 500), 40.0); assert_eq!(get_percentage_of_covered_lines(0, 0), 100.0); assert_eq!(get_percentage_of_covered_lines(5, 0), 100.0); } } grcov-0.8.22/src/lib.rs000064400000000000000000000363531046102023000127520ustar 00000000000000#![recursion_limit = "1024"] #![allow(clippy::too_many_arguments)] mod defs; pub use crate::defs::*; mod producer; pub use crate::producer::*; mod gcov; pub use crate::gcov::*; mod llvm_tools; pub use crate::llvm_tools::*; mod parser; pub use crate::parser::*; mod filter; pub use crate::filter::*; mod symlink; mod path_rewriting; pub use crate::path_rewriting::*; mod output; pub use crate::output::*; mod cobertura; pub use crate::cobertura::*; mod reader; pub use crate::reader::*; mod covdir; pub mod html; mod file_filter; pub use crate::file_filter::*; use log::{error, warn}; use std::fs; use std::io::{BufReader, Cursor}; use std::{ collections::{btree_map, hash_map}, path::Path, }; use walkdir::WalkDir; // Merge results, without caring about duplicate lines (they will be removed at the end). pub fn merge_results(result: &mut CovResult, result2: CovResult) -> bool { let mut warn_overflow = false; for (&line_no, &execution_count) in &result2.lines { match result.lines.entry(line_no) { btree_map::Entry::Occupied(c) => { let v = c.get().checked_add(execution_count).unwrap_or_else(|| { warn_overflow = true; u64::MAX }); *c.into_mut() = v; } btree_map::Entry::Vacant(v) => { v.insert(execution_count); } }; } for (line_no, taken) in result2.branches { match result.branches.entry(line_no) { btree_map::Entry::Occupied(c) => { let v = c.into_mut(); for (x, y) in taken.iter().zip(v.iter_mut()) { *y |= x; } let l = v.len(); if taken.len() > l { v.extend(&taken[l..]); } } btree_map::Entry::Vacant(v) => { v.insert(taken); } }; } for (name, function) in result2.functions { match result.functions.entry(name) { hash_map::Entry::Occupied(f) => f.into_mut().executed |= function.executed, hash_map::Entry::Vacant(v) => { v.insert(function); } }; } warn_overflow } fn add_results( results: Vec<(String, CovResult)>, result_map: &SyncCovResultMap, source_dir: Option<&Path>, ) { let mut map = result_map.lock().unwrap(); let mut warn_overflow = false; for result in results.into_iter() { let path = match source_dir { Some(source_dir) => { // the goal here is to be able to merge results for paths like foo/./bar and foo/bar if let Ok(p) = canonicalize_path(source_dir.join(&result.0)) { String::from(p.to_str().unwrap()) } else { result.0 } } None => result.0, }; let entry = map.entry(path); match entry { hash_map::Entry::Occupied(obj) => { warn_overflow |= merge_results(obj.into_mut(), result.1); } hash_map::Entry::Vacant(v) => { v.insert(result.1); } }; } if warn_overflow { warn!("Execution count overflow detected."); } } fn rename_single_files(results: &mut [(String, CovResult)], stem: &str) { // sometimes the gcno just contains foo.c // so in such case (with option --guess-directory-when-missing) // we guess the filename in using the buffer stem if let Some(parent) = Path::new(stem).parent() { for (file, _) in results.iter_mut() { if has_no_parent(file) { *file = parent.join(file.as_str()).to_str().unwrap().to_string(); } } } } // Some versions of GCC, because of a bug, generate multiple gcov files for each // gcno, so we have to support this case too for the time being. #[derive(PartialEq, Eq)] enum GcovType { Unknown, SingleFile, MultipleFiles, } macro_rules! try_parse { ($v:expr, $f:expr) => { match $v { Ok(val) => val, Err(err) => { error!("Error parsing file {}: {}", $f, err); continue; } } }; } pub fn consumer( working_dir: &Path, source_dir: Option<&Path>, result_map: &SyncCovResultMap, receiver: JobReceiver, branch_enabled: bool, guess_directory: bool, binary_path: Option<&Path>, ) { let mut gcov_type = GcovType::Unknown; while let Ok(work_item) = receiver.recv() { if work_item.is_none() { break; } let work_item = work_item.unwrap(); let new_results = match work_item.format { ItemFormat::Gcno => { match work_item.item { ItemType::Path((stem, gcno_path)) => { // GCC if let Err(e) = run_gcov(&gcno_path, branch_enabled, working_dir) { error!("Error when running gcov: {}", e); continue; }; let gcov_ext = get_gcov_output_ext(); let gcov_path = gcno_path.file_name().unwrap().to_str().unwrap().to_string() + gcov_ext; let gcov_path = working_dir.join(gcov_path); if gcov_type == GcovType::Unknown { gcov_type = if gcov_path.exists() { GcovType::SingleFile } else { GcovType::MultipleFiles }; } let mut new_results = if gcov_type == GcovType::SingleFile { let new_results = try_parse!( if gcov_ext.ends_with("gz") { parse_gcov_gz(&gcov_path) } else if gcov_ext.ends_with("gcov") { parse_gcov(&gcov_path) } else { panic!("Invalid gcov extension: {}", gcov_ext); }, work_item.name ); fs::remove_file(gcov_path).unwrap(); new_results } else { let mut new_results: Vec<(String, CovResult)> = Vec::new(); for entry in WalkDir::new(working_dir).min_depth(1) { let gcov_path = entry.unwrap(); let gcov_path = gcov_path.path(); new_results.append(&mut try_parse!( if gcov_path.extension().unwrap() == "gz" { parse_gcov_gz(gcov_path) } else { parse_gcov(gcov_path) }, work_item.name )); fs::remove_file(gcov_path).unwrap(); } new_results }; if guess_directory { rename_single_files(&mut new_results, &stem); } new_results } ItemType::Buffers(buffers) => { // LLVM match Gcno::compute( &buffers.stem, buffers.gcno_buf, buffers.gcda_buf, branch_enabled, ) { Ok(mut r) => { if guess_directory { rename_single_files(&mut r, &buffers.stem); } r } Err(e) => { // Just print the error, don't panic and continue error!("Error in computing counters: {}", e); Vec::new() } } } ItemType::Content(_) => { error!("Invalid content type"); continue; } ItemType::Paths(_) => { error!("Invalid content type"); continue; } } } ItemFormat::Profraw => { if binary_path.is_none() { error!("The path to the compiled binary must be given as an argument when source-based coverage is used"); continue; } if let ItemType::Paths(profraw_paths) = work_item.item { match llvm_tools::profraws_to_lcov( profraw_paths.as_slice(), binary_path.as_ref().unwrap(), working_dir, ) { Ok(lcovs) => { let mut new_results: Vec<(String, CovResult)> = Vec::new(); for lcov in lcovs { new_results.append(&mut try_parse!( parse_lcov(lcov, branch_enabled), work_item.name )); } new_results } Err(e) => { error!("Error while executing llvm tools: {}", e); continue; } } } else { error!("Invalid content type"); continue; } } ItemFormat::Info | ItemFormat::JacocoXml => { if let ItemType::Content(content) = work_item.item { if work_item.format == ItemFormat::Info { try_parse!(parse_lcov(content, branch_enabled), work_item.name) } else { let buffer = BufReader::new(Cursor::new(content)); try_parse!(parse_jacoco_xml_report(buffer), work_item.name) } } else { error!("Invalid content type"); continue; } } }; add_results(new_results, result_map, source_dir); } } #[cfg(test)] mod tests { use super::*; use rustc_hash::FxHashMap; use std::fs::File; use std::io::Read; use std::sync::{Arc, Mutex}; #[test] fn test_merge_results() { let mut functions1: FunctionMap = FxHashMap::default(); functions1.insert( "f1".to_string(), Function { start: 1, executed: false, }, ); functions1.insert( "f2".to_string(), Function { start: 2, executed: false, }, ); let mut result = CovResult { lines: [(1, 21), (2, 7), (7, 0)].iter().cloned().collect(), branches: [ (1, vec![false, false]), (2, vec![false, true]), (4, vec![true]), ] .iter() .cloned() .collect(), functions: functions1, }; let mut functions2: FunctionMap = FxHashMap::default(); functions2.insert( "f1".to_string(), Function { start: 1, executed: false, }, ); functions2.insert( "f2".to_string(), Function { start: 2, executed: true, }, ); let result2 = CovResult { lines: [(1, 21), (3, 42), (4, 7), (2, 0), (8, 0)] .iter() .cloned() .collect(), branches: [ (1, vec![false, false]), (2, vec![false, true]), (3, vec![true]), ] .iter() .cloned() .collect(), functions: functions2, }; merge_results(&mut result, result2); assert_eq!( result.lines, [(1, 42), (2, 7), (3, 42), (4, 7), (7, 0), (8, 0)] .iter() .cloned() .collect() ); assert_eq!( result.branches, [ (1, vec![false, false]), (2, vec![false, true]), (3, vec![true]), (4, vec![true]), ] .iter() .cloned() .collect() ); assert!(result.functions.contains_key("f1")); assert!(result.functions.contains_key("f2")); let mut func = result.functions.get("f1").unwrap(); assert_eq!(func.start, 1); assert!(!func.executed); func = result.functions.get("f2").unwrap(); assert_eq!(func.start, 2); assert!(func.executed); } #[test] fn test_merge_relative_path() { let mut f = File::open("./test/relative_path/relative_path.info") .expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let results = parse_lcov(buf, false).unwrap(); let result_map: Arc = Arc::new(Mutex::new( FxHashMap::with_capacity_and_hasher(1, Default::default()), )); add_results( results, &result_map, Some(Path::new("./test/relative_path")), ); let result_map = Arc::try_unwrap(result_map).unwrap().into_inner().unwrap(); assert!(result_map.len() == 1); let cpp_file = canonicalize_path(Path::new("./test/relative_path/foo/bar/oof.cpp")).unwrap(); let cpp_file = cpp_file.to_str().unwrap(); let cov_result = result_map.get(cpp_file).unwrap(); assert_eq!( cov_result.lines, [(1, 63), (2, 63), (3, 84), (4, 42)] .iter() .cloned() .collect() ); assert!(cov_result.functions.contains_key("myfun")); } #[test] fn test_ignore_relative_path() { let mut f = File::open("./test/relative_path/relative_path.info") .expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let results = parse_lcov(buf, false).unwrap(); let result_map: Arc = Arc::new(Mutex::new( FxHashMap::with_capacity_and_hasher(3, Default::default()), )); add_results(results, &result_map, None); let result_map = Arc::try_unwrap(result_map).unwrap().into_inner().unwrap(); assert!(result_map.len() == 3); } } grcov-0.8.22/src/llvm_tools.rs000064400000000000000000000253571046102023000144000ustar 00000000000000use once_cell::sync::OnceCell; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use std::env; use std::env::consts::EXE_SUFFIX; use std::error::Error; use std::ffi::OsStr; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use log::warn; use walkdir::WalkDir; pub static LLVM_PATH: OnceCell = OnceCell::new(); pub fn is_binary(path: impl AsRef) -> bool { if let Ok(oty) = infer::get_from_path(path) { if let Some("dll" | "exe" | "elf" | "mach") = oty.map(|x| x.extension()) { return true; } } false } pub fn run_with_stdin( cmd: impl AsRef, stdin: impl AsRef, args: &[&OsStr], ) -> Result, String> { let mut command = Command::new(cmd.as_ref()); let err_fn = |e| format!("Failed to execute {:?}\n{}", cmd.as_ref(), e); command .args(args) .stdin(Stdio::piped()) .stdout(Stdio::piped()); let mut child = command.spawn().map_err(err_fn)?; child .stdin .as_mut() .unwrap() .write_all(stdin.as_ref().as_bytes()) .map_err(err_fn)?; let output = child.wait_with_output().map_err(err_fn)?; if !output.status.success() { return Err(format!( "Failure while running {:?}\n{}", command, String::from_utf8_lossy(&output.stderr) )); } Ok(output.stdout) } pub fn run(cmd: impl AsRef, args: &[&OsStr]) -> Result, String> { let mut command = Command::new(cmd); command.args(args); let output = command .output() .map_err(|e| format!("Failed to execute {:?}\n{}", command, e))?; if !output.status.success() { return Err(format!( "Failure while running {:?}\n{}", command, String::from_utf8_lossy(&output.stderr) )); } Ok(output.stdout) } pub fn profraws_to_lcov( profraw_paths: &[PathBuf], binary_path: &Path, working_dir: &Path, ) -> Result>, String> { let profdata_path = working_dir.join("grcov.profdata"); let args = vec![ "merge".as_ref(), "-f".as_ref(), "-".as_ref(), "-sparse".as_ref(), "-o".as_ref(), profdata_path.as_ref(), ]; let stdin_paths: String = profraw_paths.iter().fold("".into(), |mut a, x| { a.push_str(x.to_string_lossy().as_ref()); a.push('\n'); a }); get_profdata_path().and_then(|p| run_with_stdin(p, &stdin_paths, &args))?; let metadata = fs::metadata(binary_path) .unwrap_or_else(|e| panic!("Failed to open directory '{:?}': {:?}.", binary_path, e)); let binaries = if metadata.is_file() { vec![binary_path.to_owned()] } else { let mut paths = vec![]; for entry in WalkDir::new(binary_path).follow_links(true) { let entry = entry .unwrap_or_else(|e| panic!("Failed to open directory '{:?}': {}", binary_path, e)); if is_binary(entry.path()) && entry.metadata().unwrap().len() > 0 { paths.push(entry.into_path()); } } paths }; let cov_tool_path = get_cov_path()?; let results = binaries .into_par_iter() .filter_map(|binary| { let args = [ "export".as_ref(), binary.as_ref(), "--instr-profile".as_ref(), profdata_path.as_ref(), "--format".as_ref(), "lcov".as_ref(), ]; match run(&cov_tool_path, &args) { Ok(result) => Some(result), Err(err_str) => { warn!( "Suppressing error returned by llvm-cov tool for binary {:?}\n{}", binary, err_str ); None } } }) .collect::>(); Ok(results) } // The sysroot and rustlib functions are coming from https://github.com/rust-embedded/cargo-binutils/blob/a417523fa990c258509696507d1ce05f85dedbc4/src/rustc.rs. fn sysroot() -> Result> { let rustc = env::var_os("RUSTC").unwrap_or_else(|| "rustc".into()); let output = Command::new(rustc).arg("--print").arg("sysroot").output()?; // Note: We must trim() to remove the `\n` from the end of stdout Ok(String::from_utf8(output.stdout)?.trim().to_owned()) } // See: https://github.com/rust-lang/rust/blob/564758c4c329e89722454dd2fbb35f1ac0b8b47c/src/bootstrap/dist.rs#L2334-L2341 fn rustlib() -> Result> { let sysroot = sysroot()?; let mut pathbuf = PathBuf::from(sysroot); pathbuf.push("lib"); pathbuf.push("rustlib"); pathbuf.push(rustc_version::version_meta()?.host); // TODO: Prevent calling rustc_version::version_meta() multiple times pathbuf.push("bin"); Ok(pathbuf) } fn llvm_tool_path(name: &str) -> Result> { let mut path = rustlib()?; path.push(format!("llvm-{}{}", name, EXE_SUFFIX)); Ok(path) } fn get_profdata_path() -> Result { let path = if let Some(mut path) = LLVM_PATH.get().cloned() { path.push(format!("llvm-profdata{}", EXE_SUFFIX)); path } else { llvm_tool_path("profdata").map_err(|x| x.to_string())? }; if !path.exists() { Err(String::from("We couldn't find llvm-profdata. Try installing the llvm-tools component with `rustup component add llvm-tools-preview` or specifying the --llvm-path option.")) } else { Ok(path) } } fn get_cov_path() -> Result { let path = if let Some(mut path) = LLVM_PATH.get().cloned() { path.push(format!("llvm-cov{}", EXE_SUFFIX)); path } else { llvm_tool_path("cov").map_err(|x| x.to_string())? }; if !path.exists() { Err(String::from("We couldn't find llvm-cov. Try installing the llvm-tools component with `rustup component add llvm-tools-preview` or specifying the --llvm-path option.")) } else { Ok(path) } } #[cfg(test)] mod tests { use super::*; use std::fs; #[test] fn test_profraws_to_lcov() { let output = Command::new("rustc").arg("--version").output().unwrap(); if !String::from_utf8_lossy(&output.stdout).contains("nightly") { return; } let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); fs::copy( PathBuf::from("tests/rust/Cargo.toml"), tmp_path.join("Cargo.toml"), ) .expect("Failed to copy file"); fs::create_dir(tmp_path.join("src")).expect("Failed to create dir"); fs::copy( PathBuf::from("tests/rust/src/main.rs"), tmp_path.join("src/main.rs"), ) .expect("Failed to copy file"); let status = Command::new("cargo") .arg("run") .env("RUSTFLAGS", "-Cinstrument-coverage") .env("LLVM_PROFILE_FILE", tmp_path.join("default.profraw")) .current_dir(&tmp_path) .status() .expect("Failed to build"); assert!(status.success()); let lcovs = profraws_to_lcov( &[tmp_path.join("default.profraw")], &PathBuf::from("src"), &tmp_path, ); assert!(lcovs.is_ok()); let lcovs = lcovs.unwrap(); assert_eq!(lcovs.len(), 0); #[cfg(unix)] let binary_path = format!( "{}/debug/rust-code-coverage-sample", std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_string()) ); #[cfg(windows)] let binary_path = format!( "{}/debug/rust-code-coverage-sample.exe", std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_string()) ); let lcovs = profraws_to_lcov( &[tmp_path.join("default.profraw")], &tmp_path.join(binary_path), &tmp_path, ); assert!(lcovs.is_ok()); let lcovs = lcovs.unwrap(); assert_eq!(lcovs.len(), 1); let output_lcov = String::from_utf8_lossy(&lcovs[0]); println!("{}", output_lcov); assert!(output_lcov .lines() .any(|line| line.contains("SF") && line.contains("src") && line.contains("main.rs"))); if rustc_version::version_meta().unwrap().channel != rustc_version::Channel::Nightly { assert!(output_lcov.lines().any(|line| line.contains("FN:3") && line.contains("rust_code_coverage_sample") && line.contains("Ciao"))); } assert!(output_lcov.lines().any(|line| line.contains("FN:8") && line.contains("rust_code_coverage_sample") && line.contains("main"))); if rustc_version::version_meta().unwrap().channel != rustc_version::Channel::Nightly { assert!(output_lcov.lines().any(|line| line.contains("FNDA:0") && line.contains("rust_code_coverage_sample") && line.contains("Ciao"))); } else { assert!(output_lcov.lines().any(|line| line.contains("FNDA:1") && line.contains("rust_code_coverage_sample") && line.contains("main"))); } assert!(output_lcov.lines().any(|line| line.contains("FNDA:1") && line.contains("rust_code_coverage_sample") && line.contains("main"))); if rustc_version::version_meta().unwrap().channel != rustc_version::Channel::Nightly { assert!(output_lcov.lines().any(|line| line == "FNF:2")); } assert!(output_lcov.lines().any(|line| line == "FNH:1")); if rustc_version::version_meta().unwrap().channel != rustc_version::Channel::Nightly { assert!(output_lcov.lines().any(|line| line == "DA:3,0")); } assert!(output_lcov.lines().any(|line| line == "DA:8,1")); assert!(output_lcov.lines().any(|line| line == "DA:9,1")); assert!(output_lcov.lines().any(|line| line == "DA:10,1")); assert!(output_lcov.lines().any(|line| line == "DA:11,1")); assert!(output_lcov.lines().any(|line| line == "DA:12,1")); assert!(output_lcov.lines().any(|line| line == "BRF:0")); assert!(output_lcov.lines().any(|line| line == "BRH:0")); if rustc_version::version_meta().unwrap().channel == rustc_version::Channel::Nightly { assert!(output_lcov.lines().any(|line| line == "LF:5")); assert!(output_lcov.lines().any(|line| line == "LH:5")); } else { assert!(output_lcov.lines().any(|line| line == "LF:6")); assert!(output_lcov.lines().any(|line| line == "LH:5")); } assert!(output_lcov.lines().any(|line| line == "end_of_record")); } } grcov-0.8.22/src/main.rs000064400000000000000000000501401046102023000131160ustar 00000000000000#[cfg(all(unix, feature = "tc"))] #[global_allocator] static GLOBAL: tcmalloc::TCMalloc = tcmalloc::TCMalloc; use clap::{builder::PossibleValue, ArgGroup, Parser, ValueEnum}; use crossbeam_channel::bounded; use log::error; use regex::Regex; use rustc_hash::FxHashMap; use serde_json::Value; use simplelog::{ColorChoice, Config, LevelFilter, TermLogger, TerminalMode, WriteLogger}; use std::fs::{self, File}; use std::ops::Deref; use std::panic; use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::{Arc, Mutex}; use std::{process, thread}; use grcov::*; #[derive(Clone, PartialEq)] enum OutputType { Ade, Lcov, Coveralls, CoverallsPlus, Files, Covdir, Html, Cobertura, CoberturaPretty, Markdown, } impl FromStr for OutputType { type Err = String; fn from_str(s: &str) -> Result { Ok(match s { "ade" => Self::Ade, "lcov" => Self::Lcov, "coveralls" => Self::Coveralls, "coveralls+" => Self::CoverallsPlus, "files" => Self::Files, "covdir" => Self::Covdir, "html" => Self::Html, "cobertura" => Self::Cobertura, "cobertura-pretty" => Self::CoberturaPretty, "markdown" => Self::Markdown, _ => return Err(format!("{} is not a supported output type", s)), }) } } impl OutputType { fn to_file_name(&self, output_path: Option<&Path>) -> Option { output_path.map(|path| { if path.is_dir() { match self { OutputType::Ade => path.join("activedata"), OutputType::Lcov => path.join("lcov"), OutputType::Coveralls => path.join("coveralls"), OutputType::CoverallsPlus => path.join("coveralls+"), OutputType::Files => path.join("files"), OutputType::Covdir => path.join("covdir"), OutputType::Html => path.join("html"), OutputType::Cobertura | OutputType::CoberturaPretty => { path.join("cobertura.xml") } OutputType::Markdown => path.join("markdown.md"), } } else { path.to_path_buf() } }) } } #[derive(clap::ValueEnum, Clone)] enum Filter { Covered, Uncovered, } impl FromStr for Filter { type Err = String; fn from_str(s: &str) -> Result { Ok(match s { "covered" => Self::Covered, "uncovered" => Self::Uncovered, _ => return Err(format!("{} is not a supported filter", s)), }) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] struct LevelFilterArg(LevelFilter); impl ValueEnum for LevelFilterArg { fn value_variants<'a>() -> &'a [Self] { &[ Self(LevelFilter::Off), Self(LevelFilter::Error), Self(LevelFilter::Warn), Self(LevelFilter::Info), Self(LevelFilter::Debug), Self(LevelFilter::Trace), ] } fn to_possible_value(&self) -> Option { match self.0 { LevelFilter::Off => Some(PossibleValue::new("OFF")), LevelFilter::Error => Some(PossibleValue::new("ERROR")), LevelFilter::Warn => Some(PossibleValue::new("WARN")), LevelFilter::Info => Some(PossibleValue::new("INFO")), LevelFilter::Debug => Some(PossibleValue::new("DEBUG")), LevelFilter::Trace => Some(PossibleValue::new("TRACE")), } } } #[derive(Parser)] #[command( author, version, max_term_width = 100, about = "Parse, collect and aggregate code coverage data for multiple source files", help_template = "\ {before-help}{name} {author-with-newline}{about-with-newline} {usage-heading} {usage} {all-args}{after-help} ", // This group requires that at least one of --token and --service-job-id // be present. --service-job-id requires --service-name, so this // effectively means we accept the following combinations: // - --token // - --token --service-job-id --service-name // - --service-job-id --service-name group = ArgGroup::new("coveralls-auth") .args(&["token", "service_job_id"]) .multiple(true), )] struct Opt { /// Sets the input paths to use. #[arg(required = true)] paths: Vec, /// Sets the path to the compiled binary to be used. #[arg(short, long, value_name = "PATH")] binary_path: Option, /// Sets the path to the LLVM bin directory. #[arg(long, value_name = "PATH")] llvm_path: Option, /// Sets a custom output type. #[arg( short = 't', long, long_help = "\ Comma separated list of custom output types:\n\ - *html* for a HTML coverage report;\n\ - *coveralls* for the Coveralls specific format;\n\ - *lcov* for the lcov INFO format;\n\ - *covdir* for the covdir recursive JSON format;\n\ - *coveralls+* for the Coveralls specific format with function information;\n\ - *ade* for the ActiveData-ETL specific format;\n\ - *files* to only return a list of files.\n\ - *markdown* for human easy read.\n\ - *cobertura* for output in cobertura format.\n\ - *cobertura-pretty* to pretty-print in cobertura format.\n\ ", value_name = "OUTPUT TYPE", requires_ifs = [ ("coveralls", "coveralls-auth"), ("coveralls+", "coveralls-auth"), ], value_delimiter = ',', alias = "output-type", default_value = "lcov", )] output_types: Vec, /// Specifies the output path. This is a file for a single output type and must be a folder /// for multiple output types. #[arg(short, long, value_name = "PATH", alias = "output-file")] output_path: Option, /// Specifies the output config file. #[arg(long, value_name = "PATH", alias = "output-config-file")] output_config_file: Option, /// Specifies the root directory of the source files. #[arg(short, long, value_name = "DIRECTORY")] source_dir: Option, /// Specifies a prefix to remove from the paths (e.g. if grcov is run on a different machine /// than the one that generated the code coverage information). #[arg(short, long, value_name = "PATH")] prefix_dir: Option, /// Ignore source files that can't be found on the disk. #[arg(long)] ignore_not_existing: bool, /// Ignore files/directories specified as globs. #[arg(long = "ignore", value_name = "PATH", num_args = 1)] ignore_dir: Vec, /// Keep only files/directories specified as globs. #[arg(long = "keep-only", value_name = "PATH", num_args = 1)] keep_dir: Vec, #[arg(long, value_name = "PATH")] path_mapping: Option, /// Enables parsing branch coverage information. #[arg(long)] branch: bool, /// Filters out covered/uncovered files. Use 'covered' to only return covered files, 'uncovered' /// to only return uncovered files. #[arg(long, value_enum)] filter: Option, /// Comma separated list of output types to sort files lexicographically for. #[arg( long, value_name = "OUTPUT TYPES", value_delimiter = ',', alias = "sort-output-types", default_value = "markdown" )] sort_output_types: Vec, /// Speeds-up parsing, when the code coverage information is exclusively coming from a llvm /// build. #[arg(long)] llvm: bool, /// Sets the repository token from Coveralls, required for the 'coveralls' and 'coveralls+' /// formats. #[arg(long, value_name = "TOKEN")] token: Option, /// Sets the hash of the commit used to generate the code coverage data. #[arg(long, value_name = "COMMIT HASH")] commit_sha: Option, /// Sets the service name. #[arg(long, value_name = "SERVICE NAME")] service_name: Option, /// Sets the service number. #[arg(long, value_name = "SERVICE NUMBER")] service_number: Option, /// Sets the service job id. #[arg( long, value_name = "SERVICE JOB ID", visible_alias = "service-job-number", requires = "service_name" )] service_job_id: Option, /// Sets the service pull request number. #[arg(long, value_name = "SERVICE PULL REQUEST")] service_pull_request: Option, /// Sets the service flag name for coveralls parallel/carryover mode #[arg(long, value_name = "SERVICE FLAG NAME")] service_flag_name: Option, /// Sets the build type to be parallel for 'coveralls' and 'coveralls+' formats. #[arg(long)] parallel: bool, #[arg(long, value_name = "NUMBER")] threads: Option, /// Sets coverage decimal point precision on output reports. #[arg(long, value_name = "NUMBER", default_value = "2")] precision: usize, #[arg(long = "guess-directory-when-missing")] guess_directory: bool, /// Set the branch for coveralls report. Defaults to 'master'. #[arg(long, value_name = "VCS BRANCH", default_value = "master")] vcs_branch: String, /// Set the file where to log (or stderr or stdout). Defaults to 'stderr'. #[arg(long, value_name = "LOG", default_value = "stderr")] log: PathBuf, /// Set the log level. #[arg(long, value_name = "LEVEL", default_value = "ERROR", value_enum)] log_level: LevelFilterArg, /// Lines in covered files containing this marker will be excluded. #[arg(long, value_name = "regex")] excl_line: Option, /// Marks the beginning of an excluded section. The current line is part of this section. #[arg(long, value_name = "regex")] excl_start: Option, /// Marks the end of an excluded section. The current line is part of this section. #[arg(long, value_name = "regex")] excl_stop: Option, /// Lines in covered files containing this marker will be excluded from branch coverage. #[arg(long, value_name = "regex")] excl_br_line: Option, /// Marks the beginning of a section excluded from branch coverage. The current line is part of /// this section. #[arg(long, value_name = "regex")] excl_br_start: Option, /// Marks the end of a section excluded from branch coverage. The current line is part of this /// section. #[arg(long, value_name = "regex")] excl_br_stop: Option, /// No symbol demangling. #[arg(long)] no_demangle: bool, } fn main() { let opt = Opt::parse(); if let Some(path) = opt.llvm_path { LLVM_PATH.set(path).unwrap(); } let filter_option = opt.filter.map(|filter| match filter { Filter::Covered => true, Filter::Uncovered => false, }); let stdout = Path::new("stdout"); let stderr = Path::new("stderr"); if opt.log == stdout { let _ = TermLogger::init( opt.log_level.0, Config::default(), TerminalMode::Stdout, ColorChoice::Auto, ); } else if opt.log == stderr { let _ = TermLogger::init( opt.log_level.0, Config::default(), TerminalMode::Stderr, ColorChoice::Auto, ); } else if let Ok(file) = File::create(&opt.log) { let _ = WriteLogger::init(opt.log_level.0, Config::default(), file); } else { let _ = TermLogger::init( opt.log_level.0, Config::default(), TerminalMode::Stderr, ColorChoice::Auto, ); error!( "Unable to create log file: {}. Switch to stderr", opt.log.display() ); } let file_filter = FileFilter::new( opt.excl_line, opt.excl_start, opt.excl_stop, opt.excl_br_line, opt.excl_br_start, opt.excl_br_stop, ); let demangle = !opt.no_demangle; panic::set_hook(Box::new(|panic_info| { let (filename, line) = panic_info .location() .map(|loc| (loc.file(), loc.line())) .unwrap_or(("", 0)); let cause = panic_info .payload() .downcast_ref::() .map(String::deref); let cause = cause.unwrap_or_else(|| { panic_info .payload() .downcast_ref::<&str>() .copied() .unwrap_or("") }); error!("A panic occurred at {}:{}: {}", filename, line, cause); })); let num_threads: usize = opt.threads.unwrap_or_else(|| 1.max(num_cpus::get() - 1)); let source_root = opt .source_dir .filter(|source_dir| source_dir != Path::new("")) .map(|source_dir| canonicalize_path(source_dir).expect("Source directory does not exist.")); let prefix_dir = opt.prefix_dir.or_else(|| source_root.clone()); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); assert!(tmp_path.exists()); let result_map: Arc = Arc::new(Mutex::new( FxHashMap::with_capacity_and_hasher(20_000, Default::default()), )); let (sender, receiver) = bounded(2 * num_threads); let path_mapping: Arc>> = Arc::new(Mutex::new(None)); let producer = { let sender: JobSender = sender.clone(); let tmp_path = tmp_path.clone(); let path_mapping_file = opt.path_mapping; let path_mapping = Arc::clone(&path_mapping); let paths = opt.paths; let is_llvm = opt.llvm; thread::Builder::new() .name(String::from("Producer")) .spawn(move || { let producer_path_mapping_buf = producer( &tmp_path, &paths, &sender, filter_option.is_some() && filter_option.unwrap(), is_llvm, ); let mut path_mapping = path_mapping.lock().unwrap(); *path_mapping = if let Some(path) = path_mapping_file { let file = File::open(path).unwrap(); Some(serde_json::from_reader(file).unwrap()) } else { producer_path_mapping_buf.map(|producer_path_mapping_buf| { serde_json::from_slice(&producer_path_mapping_buf).unwrap() }) }; }) .unwrap() }; let mut parsers = Vec::new(); for i in 0..num_threads { let receiver = receiver.clone(); let result_map = Arc::clone(&result_map); let working_dir = tmp_path.join(format!("{}", i)); let source_root = source_root.clone(); let binary_path = opt.binary_path.clone(); let branch_enabled = opt.branch; let guess_directory = opt.guess_directory; let t = thread::Builder::new() .name(format!("Consumer {}", i)) .spawn(move || { fs::create_dir(&working_dir).expect("Failed to create working directory"); consumer( &working_dir, source_root.as_deref(), &result_map, receiver, branch_enabled, guess_directory, binary_path.as_deref(), ); }) .unwrap(); parsers.push(t); } if producer.join().is_err() { process::exit(1); } // Poison the receiver, now that the producer is finished. for _ in 0..num_threads { sender.send(None).unwrap(); } for parser in parsers { if parser.join().is_err() { process::exit(1); } } let result_map_mutex = Arc::try_unwrap(result_map).unwrap(); let result_map = result_map_mutex.into_inner().unwrap(); let path_mapping_mutex = Arc::try_unwrap(path_mapping).unwrap(); let path_mapping = path_mapping_mutex.into_inner().unwrap(); let iterator = rewrite_paths( result_map, path_mapping, source_root.as_deref(), prefix_dir.as_deref(), opt.ignore_not_existing, &opt.ignore_dir, &opt.keep_dir, filter_option, file_filter, ); let mut sorted_iterator: Option> = None; let service_number = opt.service_number.unwrap_or_default(); let service_pull_request = opt.service_pull_request.unwrap_or_default(); let commit_sha = opt.commit_sha.unwrap_or_default(); let output_types = opt.output_types; let output_path = match output_types.len() { 0 => unreachable!("Output types has a default value"), 1 => opt.output_path.as_deref(), _ => match opt.output_path.as_deref() { Some(output_path) => { if output_path.is_dir() { Some(output_path) } else { panic!("output_path must be a directory when using multiple outputs"); } } _ => None, }, }; for output_type in &output_types { let output_path = output_type.to_file_name(output_path); let results = if opt.sort_output_types.contains(output_type) { // compute and cache the sorted results if not already used sorted_iterator = sorted_iterator.or_else(|| { let mut results = iterator.clone(); results.sort_by_key(|result| result.0.display().to_string()); Some(results) }); sorted_iterator.as_ref().unwrap() } else { &iterator }; match output_type { OutputType::Ade => output_activedata_etl(results, output_path.as_deref(), demangle), OutputType::Lcov => output_lcov(results, output_path.as_deref(), demangle), OutputType::Coveralls => output_coveralls( results, opt.token.as_deref(), opt.service_name.as_deref(), &service_number, opt.service_job_id.as_deref(), &service_pull_request, opt.service_flag_name.as_deref(), &commit_sha, false, output_path.as_deref(), &opt.vcs_branch, opt.parallel, demangle, ), OutputType::CoverallsPlus => output_coveralls( results, opt.token.as_deref(), opt.service_name.as_deref(), &service_number, opt.service_job_id.as_deref(), &service_pull_request, opt.service_flag_name.as_deref(), &commit_sha, true, output_path.as_deref(), &opt.vcs_branch, opt.parallel, demangle, ), OutputType::Files => output_files(results, output_path.as_deref()), OutputType::Covdir => output_covdir(results, output_path.as_deref(), opt.precision), OutputType::Html => output_html( results, output_path.as_deref(), num_threads, opt.branch, opt.output_config_file.as_deref(), opt.precision, ), OutputType::Cobertura => output_cobertura( source_root.as_deref(), results, output_path.as_deref(), demangle, false, ), OutputType::CoberturaPretty => output_cobertura( source_root.as_deref(), results, output_path.as_deref(), demangle, true, ), OutputType::Markdown => output_markdown(results, output_path.as_deref(), opt.precision), }; } } #[cfg(test)] mod tests { use super::*; use clap::CommandFactory; #[test] fn clap_debug_assert() { Opt::command().debug_assert(); } } grcov-0.8.22/src/output.rs000064400000000000000000001017161046102023000135400ustar 00000000000000use crossbeam_channel::unbounded; use md5::{Digest, Md5}; use rustc_hash::FxHashMap; use serde_json::{self, json, Value}; use std::cell::RefCell; use std::collections::{hash_map, BTreeMap, BTreeSet}; use std::ffi::OsStr; use std::fs::File; use std::io::{self, BufWriter, Read, Write}; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::{Arc, Mutex}; use std::{ process::{self, Command, Stdio}, thread, }; use symbolic_common::Name; use symbolic_demangle::{Demangle, DemangleOptions}; use tabled::settings::Style; use tabled::{Table, Tabled}; use uuid::Uuid; use crate::defs::*; use crate::html; macro_rules! demangle { ($name: expr, $demangle: expr, $options: expr) => {{ if $demangle { if let Some(name) = Name::from($name).demangle($options) { StringOrRef::S(name) } else { StringOrRef::R($name) } } else { StringOrRef::R($name) } }}; } pub fn get_target_output_writable(output_file: Option<&Path>) -> Box { let write_target: Box = match output_file { Some(output) => { if output.is_dir() { panic!( "The output file {} is a directory, but must be a regular file.", output.display() ) } Box::new(File::create(output).unwrap_or_else(|_| { let parent = output.parent(); if let Some(parent_path) = parent { if !parent_path.exists() { panic!( "Cannot create file {} to dump coverage data, as the parent directory {} doesn't exist.", output.display(), parent_path.display() ) } } panic!( "Cannot create the file {} to dump coverage data.", output.display() ) })) } None => { let stdout = io::stdout(); Box::new(stdout) } }; write_target } pub fn output_activedata_etl(results: &[ResultTuple], output_file: Option<&Path>, demangle: bool) { let demangle_options = DemangleOptions::name_only(); let mut writer = BufWriter::new(get_target_output_writable(output_file)); for (_, rel_path, result) in results { let covered: Vec = result .lines .iter() .filter(|&(_, v)| *v > 0) .map(|(k, _)| k) .cloned() .collect(); let uncovered: Vec = result .lines .iter() .filter(|&(_, v)| *v == 0) .map(|(k, _)| k) .cloned() .collect(); let mut orphan_covered: BTreeSet = covered.iter().cloned().collect(); let mut orphan_uncovered: BTreeSet = uncovered.iter().cloned().collect(); let end: u32 = result.lines.keys().last().unwrap_or(&0) + 1; let mut start_indexes: Vec = Vec::new(); for function in result.functions.values() { start_indexes.push(function.start); } start_indexes.sort_unstable(); for (name, function) in &result.functions { // println!("{} {} {}", name, function.executed, function.start); let mut func_end = end; for start in &start_indexes { if *start > function.start { func_end = *start; break; } } let mut lines_covered: Vec = Vec::new(); for line in covered .iter() .filter(|&&x| x >= function.start && x < func_end) { lines_covered.push(*line); orphan_covered.remove(line); } let mut lines_uncovered: Vec = Vec::new(); for line in uncovered .iter() .filter(|&&x| x >= function.start && x < func_end) { lines_uncovered.push(*line); orphan_uncovered.remove(line); } writeln!( writer, "{}", json!({ "language": "c/c++", "file": { "name": rel_path, }, "method": { "name": demangle!(name, demangle, demangle_options), "covered": lines_covered, "uncovered": lines_uncovered, "total_covered": lines_covered.len(), "total_uncovered": lines_uncovered.len(), "percentage_covered": lines_covered.len() as f32 / (lines_covered.len() + lines_uncovered.len()) as f32, } }) ).unwrap(); } let orphan_covered: Vec = orphan_covered.into_iter().collect(); let orphan_uncovered: Vec = orphan_uncovered.into_iter().collect(); // The orphan lines will represent the file as a whole. writeln!( writer, "{}", json!({ "language": "c/c++", "is_file": true, "file": { "name": rel_path, "covered": covered, "uncovered": uncovered, "total_covered": covered.len(), "total_uncovered": uncovered.len(), "percentage_covered": covered.len() as f32 / (covered.len() + uncovered.len()) as f32, }, "method": { "covered": orphan_covered, "uncovered": orphan_uncovered, "total_covered": orphan_covered.len(), "total_uncovered": orphan_uncovered.len(), "percentage_covered": orphan_covered.len() as f32 / (orphan_covered.len() + orphan_uncovered.len()) as f32, } }) ).unwrap(); } } pub fn output_covdir(results: &[ResultTuple], output_file: Option<&Path>, precision: usize) { let mut writer = BufWriter::new(get_target_output_writable(output_file)); let mut relative: FxHashMap>> = FxHashMap::default(); let global = Rc::new(RefCell::new(CDDirStats::new("".to_string()))); relative.insert(PathBuf::from(""), global.clone()); for (abs_path, rel_path, result) in results { let path = if rel_path.is_relative() { rel_path } else { abs_path }; let parent = path.parent().unwrap(); let mut ancestors = Vec::new(); for ancestor in parent.ancestors() { ancestors.push(ancestor); if relative.contains_key(ancestor) { break; } } let mut prev_stats = global.clone(); while let Some(ancestor) = ancestors.pop() { prev_stats = match relative.entry(ancestor.to_path_buf()) { hash_map::Entry::Occupied(s) => s.get().clone(), hash_map::Entry::Vacant(p) => { let mut prev_stats = prev_stats.borrow_mut(); let path_tail = if ancestor == PathBuf::from("/") { "/".to_string() } else { ancestor.file_name().unwrap().to_str().unwrap().to_string() }; prev_stats .dirs .push(Rc::new(RefCell::new(CDDirStats::new(path_tail)))); let last = prev_stats.dirs.last_mut().unwrap(); p.insert(last.clone()); last.clone() } }; } prev_stats.borrow_mut().files.push(CDFileStats::new( path.file_name().unwrap().to_str().unwrap().to_string(), result.lines.clone(), precision, )); } let mut global = global.take(); global.set_stats(precision); serde_json::to_writer(&mut writer, &global.into_json()).unwrap(); } pub fn output_lcov(results: &[ResultTuple], output_file: Option<&Path>, demangle: bool) { let demangle_options = DemangleOptions::name_only(); let mut writer = BufWriter::new(get_target_output_writable(output_file)); writer.write_all(b"TN:\n").unwrap(); for (_, rel_path, result) in results { // println!("{} {:?}", rel_path, result.lines); writeln!(writer, "SF:{}", rel_path.display()).unwrap(); for (name, function) in &result.functions { writeln!( writer, "FN:{},{}", function.start, demangle!(name, demangle, demangle_options) ) .unwrap(); } for (name, function) in &result.functions { writeln!( writer, "FNDA:{},{}", i32::from(function.executed), demangle!(name, demangle, demangle_options) ) .unwrap(); } if !result.functions.is_empty() { writeln!(writer, "FNF:{}", result.functions.len()).unwrap(); writeln!( writer, "FNH:{}", result.functions.values().filter(|x| x.executed).count() ) .unwrap(); } // branch coverage information let mut branch_count = 0; let mut branch_hit = 0; for (line, taken) in &result.branches { branch_count += taken.len(); for (n, b_t) in taken.iter().enumerate() { writeln!( writer, "BRDA:{},0,{},{}", line, n, if *b_t { "1" } else { "-" } ) .unwrap(); if *b_t { branch_hit += 1; } } } writeln!(writer, "BRF:{}", branch_count).unwrap(); writeln!(writer, "BRH:{}", branch_hit).unwrap(); for (line, execution_count) in &result.lines { writeln!(writer, "DA:{},{}", line, execution_count).unwrap(); } writeln!(writer, "LF:{}", result.lines.len()).unwrap(); writeln!( writer, "LH:{}", result.lines.values().filter(|&v| *v > 0).count() ) .unwrap(); writer.write_all(b"end_of_record\n").unwrap(); } } fn get_digest(path: PathBuf) -> String { if let Ok(mut f) = File::open(path) { let mut buffer = Vec::new(); f.read_to_end(&mut buffer).unwrap(); let mut hasher = Md5::new(); hasher.update(buffer.as_slice()); format!("{:x}", hasher.finalize()) } else { Uuid::new_v4().to_string() } } /// Runs git with given array of arguments (as strings), and returns whatever git printed to /// stdout. On error, returns empty string. Standard input and error are redirected from/to null. fn get_git_output(args: I) -> String where I: IntoIterator, S: AsRef, { Command::new("git") .args(args) .stdin(Stdio::null()) .stderr(Stdio::null()) .stdout(Stdio::piped()) .spawn() .and_then(|child| child.wait_with_output()) .ok() // Discard the error type -- we won't handle it anyway .and_then(|output| String::from_utf8(output.stdout).ok()) .unwrap_or_default() } /// Returns a JSON object describing the given commit. Coveralls uses that to display commit info. /// /// \a vcs_branch is what user passed on the command line via `--vcs-branch`. This is included in /// the output, but doesn't affect the rest of the info (e.g. this function doesn't check if that /// branch actually points to the given commit). fn get_coveralls_git_info(commit_sha: &str, vcs_branch: &str) -> Value { let status = Command::new("git") .arg("status") .stdin(Stdio::null()) .stdout(Stdio::null()) .stderr(Stdio::null()) .status() .map(|exit_status| exit_status.success()); if let Ok(true) = status { // We have a valid Git repo -- the rest of the function will handle this case } else { return json!({ "head": { "id": commit_sha, }, "branch": vcs_branch, }); } // Runs `git log` with a given format, to extract some piece of commit info. On failure, // returns empty string. let gitlog = |format| -> String { get_git_output([ "log", "--max-count=1", &format!("--pretty=format:{}", format), commit_sha, ]) }; let author_name = gitlog("%aN"); let author_email = gitlog("%ae"); let committer_name = gitlog("%cN"); let committer_email = gitlog("%ce"); let message = gitlog("%s"); let remotes: Value = { let output = get_git_output(["remote", "--verbose"]); let mut remotes = Vec::::new(); for line in output.lines() { if line.ends_with(" (fetch)") { let mut fields = line.split_whitespace(); if let (Some(name), Some(url)) = (fields.next(), fields.next()) { remotes.push(json!({"name": name, "url": url})) }; } } json!(remotes) }; json!({ "head": { "id": commit_sha, "author_name": author_name, "author_email": author_email, "committer_name": committer_name, "committer_email": committer_email, "message": message, }, "branch": vcs_branch, "remotes": remotes, }) } pub fn output_coveralls( results: &[ResultTuple], repo_token: Option<&str>, service_name: Option<&str>, service_number: &str, service_job_id: Option<&str>, service_pull_request: &str, service_flag_name: Option<&str>, commit_sha: &str, with_function_info: bool, output_file: Option<&Path>, vcs_branch: &str, parallel: bool, demangle: bool, ) { let demangle_options = DemangleOptions::name_only(); let mut source_files = Vec::new(); for (abs_path, rel_path, result) in results { let end: u32 = result.lines.keys().last().unwrap_or(&0) + 1; let mut coverage = Vec::new(); for line in 1..end { let entry = result.lines.get(&line); if let Some(c) = entry { coverage.push(Value::from(*c)); } else { coverage.push(Value::Null); } } let mut branches = Vec::new(); for (line, taken) in &result.branches { for (n, b_t) in taken.iter().enumerate() { branches.push(*line); branches.push(0); branches.push(n as u32); branches.push(u32::from(*b_t)); } } if !with_function_info { source_files.push(json!({ "name": rel_path, "source_digest": get_digest(abs_path.clone()), "coverage": coverage, "branches": branches, })); } else { let mut functions = Vec::new(); for (name, function) in &result.functions { functions.push(json!({ "name": demangle!(name, demangle, demangle_options), "start": function.start, "exec": function.executed, })); } source_files.push(json!({ "name": rel_path, "source_digest": get_digest(abs_path.clone()), "coverage": coverage, "branches": branches, "functions": functions, })); } } let git = get_coveralls_git_info(commit_sha, vcs_branch); let mut result = json!({ "git": git, "source_files": source_files, "service_number": service_number, "service_pull_request": service_pull_request, "parallel": parallel, }); if let (Some(repo_token), Some(obj)) = (repo_token, result.as_object_mut()) { obj.insert("repo_token".to_string(), json!(repo_token)); } if let (Some(service_name), Some(obj)) = (service_name, result.as_object_mut()) { obj.insert("service_name".to_string(), json!(service_name)); } if let (Some(service_flag_name), Some(obj)) = (service_flag_name, result.as_object_mut()) { obj.insert("flag_name".to_string(), json!(service_flag_name)); } if let (Some(service_job_id), Some(obj)) = (service_job_id, result.as_object_mut()) { obj.insert("service_job_id".to_string(), json!(service_job_id)); } let mut writer = BufWriter::new(get_target_output_writable(output_file)); serde_json::to_writer(&mut writer, &result).unwrap(); } pub fn output_files(results: &[ResultTuple], output_file: Option<&Path>) { let mut writer = BufWriter::new(get_target_output_writable(output_file)); for (_, rel_path, _) in results { writeln!(writer, "{}", rel_path.display()).unwrap(); } } pub fn output_html( results: &[ResultTuple], output_dir: Option<&Path>, num_threads: usize, branch_enabled: bool, output_config_file: Option<&Path>, precision: usize, ) { let output = if let Some(output_dir) = output_dir { PathBuf::from(output_dir) } else { PathBuf::from("./html") }; if output.exists() { if !output.is_dir() { eprintln!("{} is not a directory", output.to_str().unwrap()); return; } } else if std::fs::create_dir_all(&output).is_err() { eprintln!("Cannot create directory {}", output.to_str().unwrap()); return; } let (sender, receiver) = unbounded(); let stats = Arc::new(Mutex::new(HtmlGlobalStats::default())); let mut threads = Vec::with_capacity(num_threads); let (tera, config) = html::get_config(output_config_file); for i in 0..num_threads { let receiver = receiver.clone(); let output = output.clone(); let config = config.clone(); let stats = stats.clone(); let tera = tera.clone(); let t = thread::Builder::new() .name(format!("Consumer HTML {}", i)) .spawn(move || { html::consumer_html( &tera, receiver, stats, &output, config, branch_enabled, precision, ); }) .unwrap(); threads.push(t); } for (abs_path, rel_path, result) in results { sender .send(Some(HtmlItem { abs_path: abs_path.to_path_buf(), rel_path: rel_path.to_path_buf(), result: result.clone(), })) .unwrap(); } for _ in 0..num_threads { sender.send(None).unwrap(); } for t in threads { if t.join().is_err() { process::exit(1); } } let global = Arc::try_unwrap(stats).unwrap().into_inner().unwrap(); html::gen_index(&tera, &global, &config, &output, branch_enabled, precision); for style in html::BadgeStyle::iter() { html::gen_badge(&tera, &global.stats, &config, &output, style); } html::gen_coverage_json(&global.stats, &config, &output, precision); } pub fn output_markdown(results: &[ResultTuple], output_file: Option<&Path>, precision: usize) { #[derive(Tabled)] struct LineSummary { file: String, coverage: String, covered: String, missed_lines: String, } fn format_pair(start: u32, end: u32) -> String { if start == end { start.to_string() } else { format!("{}-{}", start, end) } } fn format_lines(lines: &BTreeMap) -> (usize, String) { let mut total_missed = 0; let mut missed = Vec::new(); let mut start: u32 = 0; let mut end: u32 = 0; for (&line, &hits) in lines { if hits == 0 { total_missed += 1; if start == 0 { start = line; } end = line; } else if start != 0 { missed.push(format_pair(start, end)); start = 0; } } if start != 0 { missed.push(format_pair(start, end)); } (total_missed, missed.join(", ")) } let mut summary = Vec::new(); let mut total_lines: usize = 0; let mut total_covered: usize = 0; for (_, rel_path, result) in results { let (missed, missed_lines) = format_lines(&result.lines); let covered: usize = result.lines.len() - missed; summary.push(LineSummary { file: rel_path.display().to_string(), coverage: format!( "{:.precision$}%", (covered as f32 * 100.0 / result.lines.len() as f32), ), covered: format!("{} / {}", covered, result.lines.len()), missed_lines, }); total_lines += result.lines.len(); total_covered += covered; } let mut writer = BufWriter::new(get_target_output_writable(output_file)); writeln!(writer, "{}", Table::new(summary).with(Style::markdown())).unwrap(); writeln!(writer).unwrap(); writeln!( writer, "Total coverage: {:.precision$}%", (total_covered as f32 * 100.0 / total_lines as f32), ) .unwrap() } #[cfg(test)] mod tests { use super::*; use std::{collections::BTreeMap, path::Path}; fn read_file(path: &Path) -> String { let mut f = File::open(path).unwrap_or_else(|_| panic!("{:?} file not found", path.file_name())); let mut s = String::new(); f.read_to_string(&mut s).unwrap(); s } #[test] fn test_lcov_brf_brh() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_lcov_brf_brh.info"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: { let mut map = BTreeMap::new(); // 3 hit branches over 10 map.insert(1, vec![true, false, false, true, false, false]); map.insert(2, vec![false, false, false, true]); map }, functions: FxHashMap::default(), }, )]; output_lcov(&results, Some(&file_path), false); let results = read_file(&file_path); assert!(results.contains("BRF:10\n")); assert!(results.contains("BRH:3\n")); } #[test] fn test_lcov_demangle() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_lcov_demangle"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: BTreeMap::new(), branches: BTreeMap::new(), functions: { let mut map = FxHashMap::default(); map.insert( "_RINvNtC3std3mem8align_ofNtNtC3std3mem12DiscriminantE".to_string(), Function { start: 1, executed: true, }, ); map.insert( "_ZN9wikipedia7article6formatEv".to_string(), Function { start: 2, executed: true, }, ); map.insert( "hello_world".to_string(), Function { start: 3, executed: true, }, ); map }, }, )]; output_lcov(&results, Some(&file_path), true); let results = read_file(&file_path); assert!(results.contains("FN:1,std::mem::align_of::\n")); assert!(results.contains("FN:2,wikipedia::article::format\n")); assert!(results.contains("FN:3,hello_world\n")); } #[test] fn test_covdir() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_covdir.json"; let file_path = tmp_dir.path().join(file_name); let results = vec![ ( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, ), ( PathBuf::from("foo/bar/b.cpp"), PathBuf::from("foo/bar/b.cpp"), CovResult { lines: [(1, 0), (2, 10), (4, 0)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, ), ( PathBuf::from("foo/c.cpp"), PathBuf::from("foo/c.cpp"), CovResult { lines: [(1, 10), (4, 1)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, ), ( PathBuf::from("/foo/d.cpp"), PathBuf::from("/foo/d.cpp"), CovResult { lines: [(1, 10), (2, 0)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, ), ]; output_covdir(&results, Some(&file_path), 2); let results: Value = serde_json::from_str(&read_file(&file_path)).unwrap(); let expected_path = PathBuf::from("./test/").join(file_name); let expected: Value = serde_json::from_str(&read_file(&expected_path)).unwrap(); assert_eq!(results, expected); } #[test] fn test_coveralls_service_job_id() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_coveralls_service_job_id.json"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, )]; let expected_service_job_id: &str = "100500"; let with_function_info: bool = true; let parallel: bool = true; output_coveralls( &results, None, None, "unused", Some(expected_service_job_id), "unused", Some("unused"), "unused", with_function_info, Some(&file_path), "unused", parallel, false, ); let results: Value = serde_json::from_str(&read_file(&file_path)).unwrap(); assert_eq!(results["service_job_id"], expected_service_job_id); } #[test] fn test_coveralls_service_flag_name() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_coveralls_service_job_id.json"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, )]; let expected_service_job_id: &str = "100500"; let expected_flag_name: &str = "expected flag name"; let with_function_info: bool = true; let parallel: bool = true; output_coveralls( &results, None, None, "unused", Some(expected_service_job_id), "unused", Some(expected_flag_name), "unused", with_function_info, Some(&file_path), "unused", parallel, false, ); let results: Value = serde_json::from_str(&read_file(&file_path)).unwrap(); assert_eq!(results["flag_name"], expected_flag_name); } #[test] fn test_coveralls_token_field_is_absent_if_arg_is_none() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_coveralls_token.json"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, )]; let token = None; let with_function_info: bool = true; let parallel: bool = true; output_coveralls( &results, token, None, "unused", None, "unused", Some("unused"), "unused", with_function_info, Some(&file_path), "unused", parallel, false, ); let results: Value = serde_json::from_str(&read_file(&file_path)).unwrap(); assert_eq!(results.get("repo_token"), None); } #[test] fn test_coveralls_service_fields_are_absent_if_args_are_none() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_coveralls_service_fields.json"; let file_path = tmp_dir.path().join(file_name); let results = vec![( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, )]; let service_name = None; let service_job_id = None; let with_function_info: bool = true; let parallel: bool = true; output_coveralls( &results, None, service_name, "unused", service_job_id, "unused", None, "unused", with_function_info, Some(&file_path), "unused", parallel, false, ); let results: Value = serde_json::from_str(&read_file(&file_path)).unwrap(); assert_eq!(results.get("service_name"), None); assert_eq!(results.get("service_job_id"), None); assert_eq!(results.get("flag_name"), None) } #[test] fn test_markdown() { let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let file_name = "test_markdown"; let file_path = tmp_dir.path().join(file_name); let results = vec![ ( PathBuf::from("foo/bar/a.cpp"), PathBuf::from("foo/bar/a.cpp"), CovResult { lines: [(1, 10), (2, 11)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, ), ( PathBuf::from("foo/bar/b.cpp"), PathBuf::from("foo/bar/b.cpp"), CovResult { lines: [(1, 0), (2, 10), (4, 10), (5, 0), (7, 0)] .iter() .cloned() .collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), }, ), ]; output_markdown(&results, Some(&file_path), 2); let results = &read_file(&file_path); let expected = "| file | coverage | covered | missed_lines | |---------------|----------|---------|--------------| | foo/bar/a.cpp | 100.00% | 2 / 2 | | | foo/bar/b.cpp | 40.00% | 2 / 5 | 1, 5-7 | Total coverage: 57.14% "; assert_eq!(results, expected); } } grcov-0.8.22/src/parser.rs000064400000000000000000002036601046102023000134750ustar 00000000000000use flate2::read::GzDecoder; use serde::{Deserialize, Deserializer}; use std::cmp::Ordering; use std::collections::{btree_map, hash_map, BTreeMap}; use std::fmt; use std::fs::File; use std::io::{self, BufRead, BufReader, Read}; use std::num::ParseIntError; use std::path::Path; use std::str; use std::sync::Arc; use log::error; use quick_xml::encoding::Decoder; use quick_xml::encoding::EncodingError; use quick_xml::events::attributes::AttrError; use quick_xml::events::{BytesStart, Event}; use quick_xml::Reader; use rustc_hash::FxHashMap; use crate::defs::*; #[derive(Debug)] pub enum ParserError { Io(io::Error), Parse(String), InvalidRecord(String), InvalidData(String), } impl From for ParserError { fn from(err: io::Error) -> ParserError { ParserError::Io(err) } } impl From for ParserError { fn from(err: quick_xml::Error) -> ParserError { match err { quick_xml::Error::Io(e) => ParserError::Io(Arc::try_unwrap(e).unwrap()), _ => ParserError::Parse(format!("{:?}", err)), } } } impl From for ParserError { fn from(err: EncodingError) -> ParserError { ParserError::Parse(format!("{:?}", err)) } } impl From for ParserError { fn from(err: AttrError) -> ParserError { ParserError::Parse(format!("{:?}", err)) } } impl From for ParserError { fn from(err: ParseIntError) -> ParserError { ParserError::Parse(err.to_string()) } } impl fmt::Display for ParserError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { ParserError::Io(ref err) => write!(f, "IO error: {}", err), ParserError::Parse(ref s) => write!(f, "Record containing invalid integer: '{}'", s), ParserError::InvalidRecord(ref s) => write!(f, "Invalid record: '{}'", s), ParserError::InvalidData(ref s) => write!(f, "Invalid data: '{}'", s), } } } macro_rules! try_parse { ($v:expr, $l:expr) => { match $v.parse() { Ok(val) => val, Err(_err) => return Err(ParserError::Parse($l.to_string())), } }; } macro_rules! try_next { ($v:expr, $l:expr) => { if let Some(val) = $v.next() { val } else { return Err(ParserError::InvalidRecord($l.to_string())); } }; } macro_rules! try_parse_next { ($v:expr, $l:expr) => { try_parse!(try_next!($v, $l), $l) }; } fn remove_newline(l: &mut Vec) { loop { let last = { let last = l.last(); if last.is_none() { break; } *last.unwrap() }; if last != b'\n' && last != b'\r' { break; } l.pop(); } } pub fn add_branch(branches: &mut BTreeMap>, line_no: u32, no: u32, taken: bool) { match branches.entry(line_no) { btree_map::Entry::Occupied(c) => { let v = c.into_mut(); let l = v.len(); let no = no as usize; match no.cmp(&l) { Ordering::Equal => v.push(taken), Ordering::Greater => { v.extend(vec![false; no - l]); v.push(taken); } Ordering::Less => v[no] |= taken, } } btree_map::Entry::Vacant(v) => { v.insert(vec![taken; 1]); } }; } pub fn parse_lcov( buffer: Vec, branch_enabled: bool, ) -> Result, ParserError> { let mut cur_file = None; let mut cur_lines = BTreeMap::new(); let mut cur_branches = BTreeMap::new(); let mut cur_functions = FxHashMap::default(); // We only log the duplicated FN error once per parse_lcov call. let mut duplicated_error_logged = false; let mut results = Vec::new(); let iter = &mut buffer.iter().peekable(); const SF: u32 = (b'S' as u32) * (1 << 8) + (b'F' as u32); const DA: u32 = (b'D' as u32) * (1 << 8) + (b'A' as u32); const FN: u32 = (b'F' as u32) * (1 << 8) + (b'N' as u32); const FNDA: u32 = (b'F' as u32) * (1 << 24) + (b'N' as u32) * (1 << 16) + (b'D' as u32) * (1 << 8) + (b'A' as u32); const BRDA: u32 = (b'B' as u32) * (1 << 24) + (b'R' as u32) * (1 << 16) + (b'D' as u32) * (1 << 8) + (b'A' as u32); let mut line = 0; while let Some(c) = iter.next() { line += 1; match *c { b'e' => { // we've a end_of_record results.push(( cur_file.unwrap(), CovResult { lines: cur_lines, branches: cur_branches, functions: cur_functions, }, )); cur_file = None; cur_lines = BTreeMap::new(); cur_branches = BTreeMap::new(); cur_functions = FxHashMap::default(); iter.take_while(|&&c| c != b'\n').last(); } b'\n' => { continue; } _ => { if *c != b'S' && *c != b'D' && *c != b'F' && *c != b'B' { iter.take_while(|&&c| c != b'\n').last(); continue; } let key = iter .take_while(|&&c| c.is_ascii_uppercase()) .try_fold(*c as u32, |r, &x| { r.checked_mul(1 << 8)?.checked_add(u32::from(x)) }); if key.is_none() { return Err(ParserError::InvalidRecord(format!( "Invalid key at line {}", line ))); } match key.unwrap() { SF => { // SF:string cur_file = Some( iter.take_while(|&&c| c != b'\n' && c != b'\r') .map(|&c| c as char) .collect(), ); } DA => { // DA:uint,int if let Some(c) = iter.peek() { if !c.is_ascii_digit() { return Err(ParserError::InvalidRecord(format!( "DA at line {}", line ))); } } let line_no = iter .take_while(|&&c| c.is_ascii_digit()) .fold(0, |r, &x| r * 10 + u32::from(x - b'0')); if iter.peek().is_none() { return Err(ParserError::InvalidRecord(format!("DA at line {}", line))); } let execution_count = if let Some(c) = iter.next() { if *c == b'-' { iter.take_while(|&&c| c != b'\n').last(); 0 } else { iter.take_while(|&&c| c.is_ascii_digit()) .fold(u64::from(*c - b'0'), |r, &x| { r * 10 + u64::from(x - b'0') }) } } else { 0 }; *cur_lines.entry(line_no).or_insert(0) += execution_count; } FN => { // FN:int,string if let Some(c) = iter.peek() { if !c.is_ascii_digit() { return Err(ParserError::InvalidRecord(format!( "FN at line {}", line ))); } } let start = iter .take_while(|&&c| c.is_ascii_digit()) .fold(0, |r, &x| r * 10 + u32::from(x - b'0')); if iter.peek().is_none() { return Err(ParserError::InvalidRecord(format!("FN at line {}", line))); } let f_name: String = iter .take_while(|&&c| c != b'\n' && c != b'\r') .map(|&c| c as char) .collect(); if !duplicated_error_logged && cur_functions.contains_key(&f_name) { error!( "FN '{}' duplicated for '{}' in a lcov file", f_name, cur_file.as_ref().unwrap() ); duplicated_error_logged = true; } cur_functions.insert( f_name, Function { start, executed: false, }, ); } FNDA => { // FNDA:int,string if let Some(c) = iter.peek() { if !c.is_ascii_digit() { return Err(ParserError::InvalidRecord(format!( "FNDA at line {}", line ))); } } let executed = iter .take_while(|&&c| c.is_ascii_digit()) .fold(0, |r, &x| r * 10 + u64::from(x - b'0')); if iter.peek().is_none() { return Err(ParserError::InvalidRecord(format!( "FNDA at line {}", line ))); } let f_name: String = iter .take_while(|&&c| c != b'\n' && c != b'\r') .map(|&c| c as char) .collect(); if let Some(f) = cur_functions.get_mut(&f_name) { f.executed |= executed != 0; } else { return Err(ParserError::Parse(format!( "FN record missing for function {}", f_name ))); } } BRDA => { // BRDA:int,int,int,int or - if branch_enabled { if let Some(c) = iter.peek() { if !c.is_ascii_digit() { return Err(ParserError::InvalidRecord(format!( "BRDA at line {}", line ))); } } let line_no = iter .take_while(|&&c| c.is_ascii_digit()) .fold(0, |r, &x| r * 10 + u32::from(x - b'0')); if iter.peek().is_none() { return Err(ParserError::InvalidRecord(format!( "BRDA at line {}", line ))); } let _block_number = iter .take_while(|&&c| c.is_ascii_digit()) .fold(0, |r, &x| r * 10 + u64::from(x - b'0')); if iter.peek().is_none() { return Err(ParserError::InvalidRecord(format!( "BRDA at line {}", line ))); } let branch_number = iter .take_while(|&&c| c.is_ascii_digit()) .fold(0, |r, &x| r * 10 + u32::from(x - b'0')); if iter.peek().is_none() { return Err(ParserError::InvalidRecord(format!( "BRDA at line {}", line ))); } let taken = iter .take_while(|&&c| c != b'\n' && c != b'\r') .any(|&x| x != b'-'); add_branch(&mut cur_branches, line_no, branch_number, taken); } else { iter.take_while(|&&c| c != b'\n').last(); } } _ => { iter.take_while(|&&c| c != b'\n').last(); } } } } } Ok(results) } #[derive(Debug, Deserialize)] #[allow(dead_code)] struct GcovJson { format_version: String, gcc_version: String, // the cwd during gcno generation current_working_directory: Option, // the file used to generated this json data_file: String, files: Vec, } #[derive(Debug, Deserialize)] struct GcovFile { file: String, functions: Vec, lines: Vec, } #[derive(Debug, Deserialize)] #[allow(dead_code)] struct GcovLine { line_number: u32, function_name: Option, #[serde(deserialize_with = "deserialize_counter")] count: u64, unexecuted_block: bool, branches: Vec, } #[derive(Debug, Deserialize)] #[allow(dead_code)] struct GcovBr { #[serde(deserialize_with = "deserialize_counter")] count: u64, throw: bool, fallthrough: bool, } #[derive(Debug, Deserialize)] #[allow(dead_code)] struct GcovFunction { name: String, demangled_name: String, start_line: u32, start_column: u32, end_line: u32, end_column: u32, blocks: u32, blocks_executed: u32, #[serde(deserialize_with = "deserialize_counter")] execution_count: u64, } // JSON sometimes surprises us with floats where we expected integers, use // a custom deserializer to ensure all the counters are converted to u64. pub fn deserialize_counter<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, { let n: serde_json::Number = Deserialize::deserialize(deserializer)?; if n.is_f64() { let value: f64 = n.as_f64().unwrap(); if (value >= 0.0) && (value <= u64::MAX as f64) { return Ok(value as u64); } } match n.as_u64() { Some(value) => Ok(value), None => Err(serde::de::Error::custom(format!( "Unable to parse u64 from {}", n ))), } } pub fn parse_gcov_gz(gcov_path: &Path) -> Result, ParserError> { let f = File::open(gcov_path) .unwrap_or_else(|_| panic!("Failed to open gcov file {}", gcov_path.display())); let file = BufReader::new(&f); let gz = GzDecoder::new(file); let mut gcov: GcovJson = serde_json::from_reader(gz).unwrap(); let mut results = Vec::new(); if gcov.format_version != "1" { error!( "Format version {} is not expected, please file a bug on https://github.com/mozilla/grcov", gcov.format_version ); } for mut file in gcov.files.drain(..) { let mut lines = BTreeMap::new(); let mut branches = BTreeMap::new(); for mut line in file.lines.drain(..) { lines.insert(line.line_number, line.count); if !line.branches.is_empty() { branches.insert( line.line_number, line.branches.drain(..).map(|b| b.count > 0).collect(), ); } } if lines.is_empty() { continue; } let mut functions = FxHashMap::default(); for fun in file.functions.drain(..) { functions.insert( fun.demangled_name, Function { start: fun.start_line, executed: fun.execution_count > 0, }, ); } results.push(( file.file, CovResult { lines, branches, functions, }, )); } Ok(results) } pub fn parse_gcov(gcov_path: &Path) -> Result, ParserError> { let mut cur_file = None; let mut cur_lines = BTreeMap::new(); let mut cur_branches = BTreeMap::new(); let mut cur_functions = FxHashMap::default(); let mut results = Vec::new(); let f = File::open(gcov_path) .unwrap_or_else(|_| panic!("Failed to open gcov file {}", gcov_path.display())); let mut file = BufReader::new(&f); let mut l = vec![]; loop { l.clear(); let num_bytes = file.read_until(b'\n', &mut l)?; if num_bytes == 0 { break; } remove_newline(&mut l); let l = unsafe { str::from_utf8_unchecked(&l) }; let mut key_value = l.splitn(2, ':'); let key = try_next!(key_value, l); let value = try_next!(key_value, l); match key { "file" => { if let Some(cur_file) = cur_file.filter(|_: &String| !cur_lines.is_empty()) { // println!("{} {} {:?}", gcov_path.display(), cur_file, cur_lines); results.push(( cur_file, CovResult { lines: cur_lines, branches: cur_branches, functions: cur_functions, }, )); } cur_file = Some(value.to_owned()); cur_lines = BTreeMap::new(); cur_branches = BTreeMap::new(); cur_functions = FxHashMap::default(); } "function" => { let mut f_splits = value.splitn(3, ','); let start = try_parse_next!(f_splits, l); let executed = try_next!(f_splits, l) != "0"; let f_name = try_next!(f_splits, l); cur_functions.insert(f_name.to_owned(), Function { start, executed }); } "lcount" => { let mut values = value.splitn(2, ','); let line_no = try_parse_next!(values, l); let execution_count = try_next!(values, l); if execution_count == "0" || execution_count.starts_with('-') { cur_lines.insert(line_no, 0); } else { cur_lines.insert(line_no, try_parse!(execution_count, l)); } } "branch" => { let mut values = value.splitn(2, ','); let line_no = try_parse_next!(values, l); let taken = try_next!(values, l) == "taken"; match cur_branches.entry(line_no) { btree_map::Entry::Occupied(c) => { let v = c.into_mut(); v.push(taken); } btree_map::Entry::Vacant(p) => { p.insert(vec![taken; 1]); } } } _ => {} } } if !cur_lines.is_empty() { results.push(( cur_file.unwrap(), CovResult { lines: cur_lines, branches: cur_branches, functions: cur_functions, }, )); } Ok(results) } fn get_xml_attribute( reader: &Reader, event: &BytesStart<'_>, name: &str, ) -> Result { for a in event.attributes() { let a = a?; if a.key.into_inner() == name.as_bytes() { return Ok(a.decode_and_unescape_value(reader.decoder())?.into_owned()); } } Err(ParserError::InvalidRecord(format!( "Attribute {} not found", name ))) } fn parse_jacoco_report_sourcefile( parser: &mut Reader, buf: &mut Vec, ) -> Result { let mut lines: BTreeMap = BTreeMap::new(); let mut branches: BTreeMap> = BTreeMap::new(); loop { match parser.read_event_into(buf) { Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"line" => { let (mut ci, mut cb, mut mb, mut nr) = (None, None, None, None); for a in e.attributes() { let a = a?; match a.key.into_inner() { b"ci" => ci = Some(Decoder {}.decode(&a.value)?.parse::()?), b"cb" => cb = Some(Decoder {}.decode(&a.value)?.parse::()?), b"mb" => mb = Some(Decoder {}.decode(&a.value)?.parse::()?), b"nr" => nr = Some(Decoder {}.decode(&a.value)?.parse::()?), _ => (), } } fn try_att(opt: Option, name: &str) -> Result { opt.ok_or_else(|| { ParserError::InvalidRecord(format!("Attribute {} not found", name)) }) } let ci = try_att(ci, "ci")?; let cb = try_att(cb, "cb")?; let mb = try_att(mb, "mb")?; let nr = try_att(nr, "nr")?; if mb > 0 || cb > 0 { // This line is a branch. let mut v = vec![true; cb as usize]; v.extend(vec![false; mb as usize]); branches.insert(nr, v); } else { // This line is a statement. // JaCoCo does not feature execution counts, so we set the // count to 0 or 1. let hit = u64::from(ci > 0); lines.insert(nr, hit); } } Ok(Event::End(ref e)) if e.local_name().into_inner() == b"sourcefile" => { break; } Err(e) => return Err(ParserError::Parse(e.to_string())), _ => {} } buf.clear(); } Ok(JacocoReport { lines, branches }) } fn parse_jacoco_report_method( parser: &mut Reader, buf: &mut Vec, start: u32, ) -> Result { let mut executed = false; loop { match parser.read_event_into(buf) { Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"counter" => { if get_xml_attribute(parser, e, "type")? == "METHOD" { executed = get_xml_attribute(parser, e, "covered")?.parse::()? > 0; } } Ok(Event::End(ref e)) if e.local_name().into_inner() == b"method" => break, Err(e) => return Err(ParserError::Parse(e.to_string())), _ => {} } buf.clear(); } Ok(Function { start, executed }) } fn parse_jacoco_report_class( parser: &mut Reader, buf: &mut Vec, class_name: &str, ) -> Result { let mut functions: FunctionMap = FxHashMap::default(); loop { match parser.read_event_into(buf) { Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"method" => { let name = get_xml_attribute(parser, e, "name")?; let full_name = format!("{}#{}", class_name, name); let start_line = get_xml_attribute(parser, e, "line")?.parse::()?; let function = parse_jacoco_report_method(parser, buf, start_line)?; functions.insert(full_name, function); } Ok(Event::End(ref e)) if e.local_name().into_inner() == b"class" => break, Err(e) => return Err(ParserError::Parse(e.to_string())), _ => {} } buf.clear(); } Ok(functions) } fn parse_jacoco_report_package( parser: &mut Reader, buf: &mut Vec, package: &str, ) -> Result, ParserError> { let mut results_map: FxHashMap = FxHashMap::default(); loop { match parser.read_event_into(buf) { Ok(Event::Start(ref e)) => { match e.local_name().into_inner() { b"class" => { // Fully qualified class name: "org/example/Person$Age" let fq_class = get_xml_attribute(parser, e, "name")?; // Class name: "Person$Age" let class = fq_class .split('/') .next_back() .expect("Failed to parse class name"); // Class name "Person" let top_class = class .split('$') .next() .expect("Failed to parse top class name"); // Process all and for this class let functions = parse_jacoco_report_class(parser, buf, class)?; match results_map.entry(top_class.to_string()) { hash_map::Entry::Occupied(obj) => { obj.into_mut().functions.extend(functions); } hash_map::Entry::Vacant(v) => { v.insert(CovResult { functions, lines: BTreeMap::new(), branches: BTreeMap::new(), }); } }; } b"sourcefile" => { let sourcefile = get_xml_attribute(parser, e, "name")?; let class = sourcefile.trim_end_matches(".java"); let JacocoReport { lines, branches } = parse_jacoco_report_sourcefile(parser, buf)?; match results_map.entry(class.to_string()) { hash_map::Entry::Occupied(obj) => { let obj = obj.into_mut(); obj.lines = lines; obj.branches = branches; } hash_map::Entry::Vacant(v) => { v.insert(CovResult { functions: FxHashMap::default(), lines, branches, }); } }; } &_ => {} } } Ok(Event::End(ref e)) if e.local_name().into_inner() == b"package" => break, Err(e) => return Err(ParserError::Parse(e.to_string())), _ => {} } } for (class, result) in &results_map { if result.lines.is_empty() && result.branches.is_empty() { return Err(ParserError::InvalidData(format!( "Class {}/{} is not the top class in its file.", package, class ))); } } // Change all keys from the class name to the file name and turn the result into a Vec. // If package is the empty string, we have to trim the leading '/' in order to obtain a // relative path. Ok(results_map .into_iter() .map(|(class, result)| { ( format!("{}/{}.java", package, class) .trim_start_matches('/') .to_string(), result, ) }) .collect()) } pub fn parse_jacoco_xml_report( xml_reader: BufReader, ) -> Result, ParserError> { let mut parser = Reader::from_reader(xml_reader); let config = parser.config_mut(); config.expand_empty_elements = true; config.trim_text(false); let mut results = Vec::new(); let mut buf = Vec::new(); loop { match parser.read_event_into(&mut buf) { Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"package" => { let package = get_xml_attribute(&parser, e, "name")?; let mut package_results = parse_jacoco_report_package(&mut parser, &mut buf, &package)?; results.append(&mut package_results); } Ok(Event::Eof) => break, Err(e) => return Err(ParserError::Parse(e.to_string())), _ => {} } buf.clear(); } Ok(results) } #[cfg(test)] mod tests { use super::*; #[test] fn test_remove_newline() { let mut l = "Marco".as_bytes().to_vec(); remove_newline(&mut l); assert_eq!(l, "Marco".as_bytes().to_vec()); let mut l = "Marco\n".as_bytes().to_vec(); remove_newline(&mut l); assert_eq!(l, "Marco".as_bytes().to_vec()); let mut l = "Marco\r".as_bytes().to_vec(); remove_newline(&mut l); assert_eq!(l, "Marco".as_bytes().to_vec()); let mut l = "Marco\r\n".as_bytes().to_vec(); remove_newline(&mut l); assert_eq!(l, "Marco".as_bytes().to_vec()); let mut l = "\r\n".as_bytes().to_vec(); remove_newline(&mut l); assert_eq!(l, "".as_bytes().to_vec()); } #[test] fn test_lcov_parser() { let mut f = File::open("./test/prova.info").expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let results = parse_lcov(buf, false).unwrap(); assert_eq!(results.len(), 603); let (ref source_name, ref result) = results[0]; assert_eq!( source_name, "resource://gre/components/MainProcessSingleton.js" ); assert_eq!( result.lines, [ (7, 1), (9, 1), (10, 1), (12, 2), (13, 1), (16, 1), (17, 1), (18, 2), (19, 1), (21, 1), (22, 0), (23, 0), (24, 0), (28, 1), (29, 0), (30, 0), (32, 0), (33, 0), (34, 0), (35, 0), (37, 0), (39, 0), (41, 0), (42, 0), (44, 0), (45, 0), (46, 0), (47, 0), (49, 0), (50, 0), (51, 0), (52, 0), (53, 0), (54, 0), (55, 0), (56, 0), (59, 0), (60, 0), (61, 0), (63, 0), (65, 0), (67, 1), (68, 2), (70, 1), (74, 1), (75, 1), (76, 1), (77, 1), (78, 1), (83, 1), (84, 1), (90, 1) ] .iter() .cloned() .collect() ); assert_eq!(result.branches, [].iter().cloned().collect()); assert!(result.functions.contains_key("MainProcessSingleton")); let func = result.functions.get("MainProcessSingleton").unwrap(); assert_eq!(func.start, 15); assert!(func.executed); assert!(result.functions.contains_key("logConsoleMessage")); let func = result.functions.get("logConsoleMessage").unwrap(); assert_eq!(func.start, 21); assert!(!func.executed); } #[test] fn test_lcov_parser_with_branch_parsing() { // Parse the same file, but with branch parsing enabled. let mut f = File::open("./test/prova.info").expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let results = parse_lcov(buf, true).unwrap(); assert_eq!(results.len(), 603); let (ref source_name, ref result) = results[0]; assert_eq!( source_name, "resource://gre/components/MainProcessSingleton.js" ); assert_eq!( result.lines, [ (7, 1), (9, 1), (10, 1), (12, 2), (13, 1), (16, 1), (17, 1), (18, 2), (19, 1), (21, 1), (22, 0), (23, 0), (24, 0), (28, 1), (29, 0), (30, 0), (32, 0), (33, 0), (34, 0), (35, 0), (37, 0), (39, 0), (41, 0), (42, 0), (44, 0), (45, 0), (46, 0), (47, 0), (49, 0), (50, 0), (51, 0), (52, 0), (53, 0), (54, 0), (55, 0), (56, 0), (59, 0), (60, 0), (61, 0), (63, 0), (65, 0), (67, 1), (68, 2), (70, 1), (74, 1), (75, 1), (76, 1), (77, 1), (78, 1), (83, 1), (84, 1), (90, 1) ] .iter() .cloned() .collect() ); assert_eq!( result.branches, [ (34, vec![false, false]), (41, vec![false, false]), (44, vec![false, false]), (60, vec![false, false]), (63, vec![false, false]), (68, vec![true, true]) ] .iter() .cloned() .collect() ); assert!(result.functions.contains_key("MainProcessSingleton")); let func = result.functions.get("MainProcessSingleton").unwrap(); assert_eq!(func.start, 15); assert!(func.executed); assert!(result.functions.contains_key("logConsoleMessage")); let func = result.functions.get("logConsoleMessage").unwrap(); assert_eq!(func.start, 21); assert!(!func.executed); } #[test] fn test_lcov_parser_fn_with_commas() { let mut f = File::open("./test/prova_fn_with_commas.info").expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let results = parse_lcov(buf, true).unwrap(); assert_eq!(results.len(), 1); let (ref source_name, ref result) = results[0]; assert_eq!(source_name, "aFile.js"); assert_eq!( result.lines, [ (7, 1), (9, 1), (10, 1), (12, 2), (13, 1), (16, 1), (17, 1), (18, 2), (19, 1), (21, 1), (22, 0), (23, 0), (24, 0), (28, 1), (29, 0), (30, 0), (32, 0), (33, 0), (34, 0), (35, 0), (37, 0), (39, 0), (41, 0), (42, 0), (44, 0), (45, 0), (46, 0), (47, 0), (49, 0), (50, 0), (51, 0), (52, 0), (53, 0), (54, 0), (55, 0), (56, 0), (59, 0), (60, 0), (61, 0), (63, 0), (65, 0), (67, 1), (68, 2), (70, 1), (74, 1), (75, 1), (76, 1), (77, 1), (78, 1), (83, 1), (84, 1), (90, 1), (95, 1), (96, 1), (97, 1), (98, 1), (99, 1) ] .iter() .cloned() .collect() ); assert!(result.functions.contains_key("MainProcessSingleton")); let func = result.functions.get("MainProcessSingleton").unwrap(); assert_eq!(func.start, 15); assert!(func.executed); assert!(result .functions .contains_key("cubic-bezier(0.0, 0.0, 1.0, 1.0)")); let func = result .functions .get("cubic-bezier(0.0, 0.0, 1.0, 1.0)") .unwrap(); assert_eq!(func.start, 95); assert!(func.executed); } #[test] fn test_lcov_parser_empty_line() { let mut f = File::open("./test/empty_line.info").expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let results = parse_lcov(buf, true).unwrap(); assert_eq!(results.len(), 1); let (ref source_name, ref result) = results[0]; assert_eq!(source_name, "aFile.js"); assert_eq!( result.lines, [ (7, 1), (9, 1), (10, 1), (12, 2), (13, 1), (16, 1), (17, 1), (18, 2), (19, 1), (21, 1), (22, 0), (23, 0), (24, 0), (28, 1), (29, 0), (30, 0), (32, 0), (33, 0), (34, 0), (35, 0), (37, 0), (39, 0), (41, 0), (42, 0), (44, 0), (45, 0), (46, 0), (47, 0), (49, 0), (50, 0), (51, 0), (52, 0), (53, 0), (54, 0), (55, 0), (56, 0), (59, 0), (60, 0), (61, 0), (63, 0), (65, 0), (67, 1), (68, 2), (70, 1), (74, 1), (75, 1), (76, 1), (77, 1), (78, 1), (83, 1), (84, 1), (90, 1), (95, 1), (96, 1), (97, 1), (98, 1), (99, 1) ] .iter() .cloned() .collect() ); assert!(result.functions.contains_key("MainProcessSingleton")); let func = result.functions.get("MainProcessSingleton").unwrap(); assert_eq!(func.start, 15); assert!(func.executed); assert!(result .functions .contains_key("cubic-bezier(0.0, 0.0, 1.0, 1.0)")); let func = result .functions .get("cubic-bezier(0.0, 0.0, 1.0, 1.0)") .unwrap(); assert_eq!(func.start, 95); assert!(func.executed); } #[allow(non_snake_case)] #[test] fn test_lcov_parser_invalid_DA_record() { let mut f = File::open("./test/invalid_DA_record.info").expect("Failed to open lcov file"); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); let result = parse_lcov(buf, true); assert!(result.is_err()); } #[allow(non_snake_case)] #[test] fn test_lcov_parser_empty_DA_record() { let buf = "DA:152,4 DA:153,4 DA:154,8 DA:156,12 DA TN:http_3a_2f_2fweb_2dplatform_2etest_3a8000_2freferrer_2dpolicy_2fgen_2fsrcdoc_2dinherit_2emeta_2funset_2fiframe_2dtag_2ehttp_2ehtml_2c_20about_3ablank" .as_bytes().to_vec(); let result = parse_lcov(buf, true); assert!(result.is_err()); let error = result.unwrap_err(); assert_eq!(error.to_string(), "Invalid record: 'DA at line 5'"); } #[test] fn test_parser() { let results = parse_gcov(Path::new("./test/prova.gcov")).unwrap(); assert_eq!(results.len(), 10); let (ref source_name, ref result) = results[0]; assert_eq!(source_name, "/home/marco/Documenti/FD/mozilla-central/build-cov-gcc/dist/include/nsExpirationTracker.h"); assert_eq!( result.lines, [ (393, 0), (397, 0), (399, 0), (401, 0), (402, 0), (403, 0), (405, 0) ] .iter() .cloned() .collect() ); assert!(result.functions.contains_key("_ZN19nsExpirationTrackerIN11nsIDocument16SelectorCacheKeyELj4EE25ExpirationTrackerObserver7ReleaseEv")); let mut func = result.functions.get("_ZN19nsExpirationTrackerIN11nsIDocument16SelectorCacheKeyELj4EE25ExpirationTrackerObserver7ReleaseEv").unwrap(); assert_eq!(func.start, 393); assert!(!func.executed); let (ref source_name, ref result) = results[5]; assert_eq!( source_name, "/home/marco/Documenti/FD/mozilla-central/accessible/atk/Platform.cpp" ); assert_eq!( result.lines, [ (81, 0), (83, 0), (85, 0), (87, 0), (88, 0), (90, 0), (94, 0), (96, 0), (97, 0), (98, 0), (99, 0), (100, 0), (101, 0), (103, 0), (104, 0), (108, 0), (110, 0), (111, 0), (112, 0), (115, 0), (117, 0), (118, 0), (122, 0), (123, 0), (124, 0), (128, 0), (129, 0), (130, 0), (136, 17), (138, 17), (141, 0), (142, 0), (146, 0), (147, 0), (148, 0), (151, 0), (152, 0), (153, 0), (154, 0), (155, 0), (156, 0), (157, 0), (161, 0), (162, 0), (165, 0), (166, 0), (167, 0), (168, 0), (169, 0), (170, 0), (171, 0), (172, 0), (184, 0), (187, 0), (189, 0), (190, 0), (194, 0), (195, 0), (196, 0), (200, 0), (201, 0), (202, 0), (203, 0), (207, 0), (208, 0), (216, 17), (218, 17), (219, 0), (220, 0), (221, 0), (222, 0), (223, 0), (226, 17), (232, 0), (233, 0), (234, 0), (253, 17), (261, 11390), (265, 11390), (268, 373), (274, 373), (277, 373), (278, 373), (281, 373), (288, 373), (289, 373), (293, 373), (294, 373), (295, 373), (298, 373), (303, 5794), (306, 5794), (307, 5558), (309, 236), (311, 236), (312, 236), (313, 0), (316, 236), (317, 236), (318, 0), (321, 236), (322, 236), (323, 236), (324, 236), (327, 236), (328, 236), (329, 236), (330, 236), (331, 472), (332, 472), (333, 236), (338, 236), (339, 236), (340, 236), (343, 0), (344, 0), (345, 0), (346, 0), (347, 0), (352, 236), (353, 236), (354, 236), (355, 236), (361, 236), (362, 236), (364, 236), (365, 236), (370, 0), (372, 0), (373, 0), (374, 0), (376, 0) ] .iter() .cloned() .collect() ); assert!(result .functions .contains_key("_ZL13LoadGtkModuleR24GnomeAccessibilityModule")); func = result .functions .get("_ZL13LoadGtkModuleR24GnomeAccessibilityModule") .unwrap(); assert_eq!(func.start, 81); assert!(!func.executed); assert!(result .functions .contains_key("_ZN7mozilla4a11y12PlatformInitEv")); func = result .functions .get("_ZN7mozilla4a11y12PlatformInitEv") .unwrap(); assert_eq!(func.start, 136); assert!(func.executed); assert!(result .functions .contains_key("_ZN7mozilla4a11y16PlatformShutdownEv")); func = result .functions .get("_ZN7mozilla4a11y16PlatformShutdownEv") .unwrap(); assert_eq!(func.start, 216); assert!(func.executed); assert!(result.functions.contains_key("_ZN7mozilla4a11y7PreInitEv")); func = result.functions.get("_ZN7mozilla4a11y7PreInitEv").unwrap(); assert_eq!(func.start, 261); assert!(func.executed); assert!(result .functions .contains_key("_ZN7mozilla4a11y19ShouldA11yBeEnabledEv")); func = result .functions .get("_ZN7mozilla4a11y19ShouldA11yBeEnabledEv") .unwrap(); assert_eq!(func.start, 303); assert!(func.executed); } #[test] fn test_parser_gcov_with_negative_counts() { let results = parse_gcov(Path::new("./test/negative_counts.gcov")).unwrap(); assert_eq!(results.len(), 118); let (ref source_name, ref result) = results[14]; assert_eq!(source_name, "/home/marco/Documenti/FD/mozilla-central/build-cov-gcc/dist/include/mozilla/Assertions.h"); assert_eq!(result.lines, [(40, 0)].iter().cloned().collect()); } #[test] fn test_parser_gcov_with_64bit_counts() { let results = parse_gcov(Path::new("./test/64bit_count.gcov")).unwrap(); assert_eq!(results.len(), 46); let (ref source_name, ref result) = results[8]; assert_eq!( source_name, "/home/marco/Documenti/FD/mozilla-central/build-cov-gcc/dist/include/js/HashTable.h" ); assert_eq!( result.lines, [ (324, 8096), (343, 12174), (344, 6085), (345, 23331), (357, 10720), (361, 313_165_934), (399, 272_539_208), (402, 31_491_125), (403, 35_509_735), (420, 434_104), (709, 313_172_766), (715, 272_542_535), (801, 584_943_263), (822, 0), (825, 0), (826, 0), (828, 0), (829, 0), (831, 0), (834, 2_210_404_897), (835, 196_249_666), (838, 3_764_974), (840, 516_370_744), (841, 1_541_684), (842, 2_253_988_941), (843, 197_245_483), (844, 0), (845, 5_306_658), (846, 821_426_720), (847, 47_096_565), (853, 82_598_134), (854, 247_796_865), (886, 272_542_256), (887, 272_542_256), (904, 599_154_437), (908, 584_933_028), (913, 584_943_263), (916, 543_534_922), (917, 584_933_028), (940, 508_959_481), (945, 1_084_660_344), (960, 545_084_512), (989, 534_593), (990, 128_435), (1019, 427_973_453), (1029, 504_065_334), (1038, 1_910_289_238), (1065, 425_402), (1075, 10_613_316), (1076, 5_306_658), (1090, 392_499_332), (1112, 48_208), (1113, 48_208), (1114, 0), (1115, 0), (1118, 48211), (1119, 8009), (1120, 48211), (1197, 40347), (1202, 585_715_301), (1207, 1_171_430_602), (1210, 585_715_301), (1211, 910_968), (1212, 585_715_301), (1222, 30_644), (1223, 70_165), (1225, 1647), (1237, 4048), (1238, 4048), (1240, 8096), (1244, 6087), (1250, 6087), (1257, 6085), (1264, 6085), (1278, 6085), (1279, 6085), (1280, 0), (1283, 6085), (1284, 66935), (1285, 30425), (1286, 30425), (1289, 6085), (1293, 12171), (1294, 6086), (1297, 6087), (1299, 6087), (1309, 4048), (1310, 4048), (1316, 632_104_110), (1327, 251_893_735), (1329, 251_893_735), (1330, 251_893_735), (1331, 503_787_470), (1337, 528_619_265), (1344, 35_325_952), (1345, 35_325_952), (1353, 26236), (1354, 13118), (1364, 305_520_839), (1372, 585_099_705), (1381, 585_099_705), (1382, 585_099_705), (1385, 585_099_705), (1391, 1_135_737_600), (1397, 242_807_686), (1400, 242_807_686), (1403, 1_032_741_488), (1404, 1_290_630), (1405, 1_042_115), (1407, 515_080_114), (1408, 184_996_962), (1412, 516_370_744), (1414, 516_370_744), (1415, 516_370_744), (1417, 154_330_912), (1420, 812_664_176), (1433, 47_004_405), (1442, 47_004_405), (1443, 47_004_405), (1446, 94_008_810), (1452, 9_086_049), (1456, 24_497_042), (1459, 12_248_521), (1461, 12_248_521), (1462, 24_497_042), (1471, 30642), (1474, 30642), (1475, 30642), (1476, 30642), (1477, 30642), (1478, 30642), (1484, 64904), (1485, 34260), (1489, 34260), (1490, 34260), (1491, 34260), (1492, 34260), (1495, 34260), (1496, 69_792_911), (1497, 139_524_496), (1498, 94_193_130), (1499, 47_096_565), (1500, 47_096_565), (1506, 61326), (1507, 30663), (1513, 58000), (1516, 35_325_952), (1518, 35_325_952), (1522, 29000), (1527, 29000), (1530, 29000), (1534, 0), (1536, 0), (1537, 0), (1538, 0), (1540, 0), (1547, 10_613_316), (1548, 1_541_684), (1549, 1_541_684), (1552, 3_764_974), (1554, 5_306_658), (1571, 8009), (1573, 8009), (1574, 8009), (1575, 31345), (1576, 5109), (1577, 5109), (1580, 8009), (1581, 1647), (1582, 8009), (1589, 0), (1592, 0), (1593, 0), (1594, 0), (1596, 0), (1597, 0), (1599, 0), (1600, 0), (1601, 0), (1604, 0), (1605, 0), (1606, 0), (1607, 0), (1609, 0), (1610, 0), (1611, 0), (1615, 0), (1616, 0), (1625, 0), (1693, 655_507), (1711, 35_615_006), (1730, 10720), (1732, 10720), (1733, 10720), (1735, 10720), (1736, 10720), (1739, 313_162_046), (1741, 313_162_046), (1743, 313_162_046), (1744, 313_162_046), (1747, 272_542_535), (1749, 272_542_535), (1750, 272_542_535), (1752, 272_542_535), (1753, 272_542_535), (1754, 272_542_256), (1755, 272_542_256), (1759, 35_509_724), (1761, 35_509_724), (1767, 71_019_448), (1772, 35_505_028), (1773, 179_105), (1776, 179_105), (1777, 179_105), (1780, 35_325_923), (1781, 35_326_057), (1785, 35_326_058), (1786, 29011), (1789, 71_010_332), (1790, 35_505_166), (1796, 35_505_166) ] .iter() .cloned() .collect() ); // Assert more stuff. } #[test] fn test_parser_gcov_with_branches() { let results = parse_gcov(Path::new("./test/intermediate_with_branches.gcov")).unwrap(); assert_eq!(results.len(), 1); let (ref source_name, ref result) = results[0]; assert_eq!(source_name, "/home/marco/Documenti/FD/mozilla-central/build-cov-gcc/dist/include/nsExpirationTracker.h"); assert_eq!( result.lines, [ (393, 0), (397, 0), (399, 0), (401, 1), (402, 0), (403, 0), (405, 0) ] .iter() .cloned() .collect() ); assert_eq!( result.branches, [(399, vec![false, false]), (401, vec![true, false])] .iter() .cloned() .collect() ); assert!(result.functions.contains_key("_ZN19nsExpirationTrackerIN11nsIDocument16SelectorCacheKeyELj4EE25ExpirationTrackerObserver7ReleaseEv")); let func = result.functions.get("_ZN19nsExpirationTrackerIN11nsIDocument16SelectorCacheKeyELj4EE25ExpirationTrackerObserver7ReleaseEv").unwrap(); assert_eq!(func.start, 393); assert!(!func.executed); } #[test] fn test_parser_gcov_rust_generics_with_two_parameters() { let results = parse_gcov(Path::new( "./test/rust/generics_with_two_parameters_intermediate.gcov", )) .unwrap(); assert_eq!(results.len(), 1); let (ref source_name, ref result) = results[0]; assert_eq!(source_name, "src/main.rs"); assert_eq!( result.lines, [(4, 3), (5, 3), (6, 1), (9, 2), (10, 1), (11, 1), (12, 2)] .iter() .cloned() .collect() ); assert_eq!(result.branches, [].iter().cloned().collect()); assert!(result .functions .contains_key("_ZN27rust_code_coverage_sample_24mainE")); let func = result .functions .get("_ZN27rust_code_coverage_sample_24mainE") .unwrap(); assert_eq!(func.start, 8); assert!(func.executed); assert!(result.functions.contains_key( "_ZN27rust_code_coverage_sample_244compare_types<[i32; 3],alloc::vec::Vec>E" )); let func = result .functions .get("_ZN27rust_code_coverage_sample_244compare_types<[i32; 3],alloc::vec::Vec>E") .unwrap(); assert_eq!(func.start, 3); assert!(func.executed); } #[test] fn test_parser_gcov_gz() { let results = parse_gcov_gz(Path::new( "./test/mozillavpn_serverconnection.gcno.gcov.json.gz", )) .unwrap(); assert_eq!(results.len(), 37); let (ref source_name, ref result) = results[0]; assert_eq!(source_name, "server/serverconnection.cpp"); assert_eq!( result.lines, [ (32, 0), (33, 0), (35, 0), (36, 0), (37, 0), (38, 0), (40, 0), (41, 0), (42, 0), (43, 0), (44, 0), (45, 0), (46, 0), (48, 0), (49, 0), (50, 0), (51, 0), (52, 0), (55, 0), (56, 0), (57, 0), (58, 0), (59, 0), (61, 0), (62, 0), (63, 0), (66, 0), (67, 0), (68, 0), (71, 0), (74, 0), (75, 0), (78, 0), (79, 0), (82, 0), (83, 0), (85, 0), (86, 0), (87, 0), (88, 0), (90, 0), (91, 0), (94, 0), (95, 0), (96, 0), (97, 0), (101, 0), (102, 0), (103, 0), (104, 0), (107, 0), (112, 0), (113, 0), (114, 0), (118, 0), (119, 0), (120, 0), (124, 0), (125, 0), (126, 0), (129, 0), (130, 0), (131, 0), (135, 0), (136, 0), (137, 0), (138, 0), (139, 0), (142, 0), (143, 0), (144, 0), (148, 0), (149, 0), (150, 0), (151, 0), (157, 0), (158, 0), (159, 0), (164, 0), (169, 0), (171, 0), (172, 0), (175, 0), (176, 0), (178, 0), (179, 0), (181, 0), (183, 0), (184, 0), (185, 0), (186, 0), (188, 0), (189, 0), (190, 0), (193, 0), (194, 0), (195, 0), (196, 0), (199, 0), (200, 0), (202, 0), (203, 0), (205, 0), (206, 0), (207, 0), (210, 0), (216, 0), (217, 0), (220, 0), (221, 0), (223, 0), (225, 0), (226, 0), (227, 0), (230, 0), (231, 0), (234, 0), (237, 0), (238, 0), (239, 0), (241, 0), (242, 0), (243, 0), (245, 0), (247, 0), (248, 0), (249, 0), (251, 0), (252, 0), (254, 0), (255, 0), (256, 0), (257, 0), (258, 0), (260, 0), (261, 0), (262, 0), (263, 0), (264, 0), (267, 0), (268, 0), (270, 0), (271, 0), (272, 0), (273, 0), (274, 0), (275, 0), (279, 0) ] .iter() .cloned() .collect() ); assert_eq!(result.branches, [].iter().cloned().collect()); assert!(result .functions .contains_key("ServerConnection::readData()")); let func = result .functions .get("ServerConnection::readData()") .unwrap(); assert_eq!(func.start, 188); assert!(!func.executed); } #[test] fn test_parser_jacoco_xml_basic() { let mut lines: BTreeMap = BTreeMap::new(); lines.insert(1, 0); lines.insert(4, 1); lines.insert(6, 1); let mut functions: FunctionMap = FxHashMap::default(); functions.insert( String::from("hello#"), Function { executed: false, start: 1, }, ); functions.insert( String::from("hello#main"), Function { executed: true, start: 3, }, ); let mut branches: BTreeMap> = BTreeMap::new(); branches.insert(3, vec![true, true]); let expected = vec![( String::from("hello.java"), CovResult { lines, branches, functions, }, )]; let f = File::open("./test/jacoco/basic-report.xml").expect("Failed to open xml file"); let file = BufReader::new(&f); let results = parse_jacoco_xml_report(file).unwrap(); assert_eq!(results, expected); } #[test] fn test_parser_jacoco_xml_inner_classes() { let mut lines: BTreeMap = BTreeMap::new(); for i in &[5, 10, 14, 15, 18, 22, 23, 25, 27, 31, 34, 37, 44, 49] { lines.insert(*i, 0); } let mut functions: FunctionMap = FxHashMap::default(); for (name, start, executed) in vec![ ("Person$InnerClassForPerson#getSomethingElse", 31, false), ("Person#getSurname", 10, false), ("Person$InnerClassForPerson#", 25, false), ("Person#setSurname", 14, false), ("Person#getAge", 18, false), ( "Person$InnerClassForPerson$InnerInnerClass#", 34, false, ), ("Person$InnerClassForPerson#getSomething", 27, false), ("Person#", 5, false), ( "Person$InnerClassForPerson$InnerInnerClass#everything", 37, false, ), ("Person#setAge", 22, false), ] { functions.insert(String::from(name), Function { start, executed }); } let branches: BTreeMap> = BTreeMap::new(); let expected = vec![( String::from("org/gradle/Person.java"), CovResult { lines, branches, functions, }, )]; let f = File::open("./test/jacoco/inner-classes.xml").expect("Failed to open xml file"); let file = BufReader::new(&f); let results = parse_jacoco_xml_report(file).unwrap(); assert_eq!(results, expected); } #[test] #[should_panic] fn test_parser_jacoco_xml_non_top_level_classes_panics() { let f = File::open("./test/jacoco/multiple-top-level-classes.xml") .expect("Failed to open xml file"); let file = BufReader::new(&f); let _results = parse_jacoco_xml_report(file).unwrap(); } #[test] #[should_panic] fn test_parser_jacoco_xml_full_report_with_non_top_level_classes_panics() { let f = File::open("./test/jacoco/full-junit4-report-multiple-top-level-classes.xml") .expect("Failed to open xml file"); let file = BufReader::new(&f); let _results = parse_jacoco_xml_report(file).unwrap(); } } grcov-0.8.22/src/path_rewriting.rs000064400000000000000000001561071046102023000152320ustar 00000000000000use globset::{Glob, GlobSet, GlobSetBuilder}; use rayon::prelude::*; use rustc_hash::FxHashMap; use serde_json::Value; use std::collections::hash_map; use std::fs; use std::io; use std::path::{Component, Path, PathBuf}; use walkdir::{DirEntry, WalkDir}; use crate::defs::*; use crate::filter::*; fn to_lowercase_first(s: &str) -> String { let mut c = s.chars(); c.next().unwrap().to_lowercase().collect::() + c.as_str() } fn to_uppercase_first(s: &str) -> String { let mut c = s.chars(); c.next().unwrap().to_uppercase().collect::() + c.as_str() } pub fn canonicalize_path>(path: P) -> io::Result { let path = fs::canonicalize(path)?; #[cfg(windows)] let path = path .to_str() .unwrap() .strip_prefix(r"\\?\") .map(PathBuf::from) .unwrap_or(path); Ok(path) } pub fn has_no_parent(path: &str) -> bool { PathBuf::from(path).parent() == Some(&PathBuf::from("")) } pub fn normalize_path>(path: P) -> Option { // Copied from Cargo sources: https://github.com/rust-lang/cargo/blob/911f0b94e5c10f514b13affbeccd5fd2661a32d9/src/cargo/util/paths.rs#L60 let mut components = path.as_ref().components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { components.next(); PathBuf::from(c.as_os_str()) } else { PathBuf::new() }; for component in components { match component { Component::Prefix(..) => unreachable!(), Component::RootDir => { ret.push(component.as_os_str()); } Component::CurDir => {} Component::ParentDir => { if !ret.pop() { eprintln!( "Warning: {:?} cannot be normalized because of \"..\", so skip it.", path.as_ref() ); return None; } } Component::Normal(c) => { ret.push(c); } } } Some(ret) } // Search the source file's path in the mapping. fn apply_mapping(mapping: &Option, path: &str) -> PathBuf { if let Some(mapping) = mapping { if let Some(p) = mapping.get(to_lowercase_first(path)) { return PathBuf::from(p.as_str().unwrap()); } else if let Some(p) = mapping.get(to_uppercase_first(path)) { return PathBuf::from(p.as_str().unwrap()); } } PathBuf::from(path) } // If the join of the source and the relative path is a file, return it. // Otherwise, remove common part between the source's end and the relative // path's start. fn guess_abs_path(prefix_dir: &Path, path: &Path) -> PathBuf { let full_path = prefix_dir.join(path); if full_path.is_file() { return full_path; } for ancestor in path.ancestors() { if prefix_dir.ends_with(ancestor) && !ancestor.as_os_str().is_empty() { return prefix_dir.join(path.strip_prefix(ancestor).unwrap()); } } full_path } // Remove prefix from the source file's path. fn remove_prefix(prefix_dir: Option<&Path>, path: PathBuf) -> PathBuf { if let Some(prefix_dir) = prefix_dir { if path.starts_with(prefix_dir) { return path.strip_prefix(prefix_dir).unwrap().to_path_buf(); } } path } fn fixup_rel_path(source_dir: Option<&Path>, abs_path: &Path, rel_path: PathBuf) -> PathBuf { if let Some(ref source_dir) = source_dir { if abs_path.starts_with(source_dir) { return abs_path.strip_prefix(source_dir).unwrap().to_path_buf(); } else if !rel_path.is_relative() { return abs_path.to_owned(); } } rel_path } // Get the absolute path for the source file's path, resolving symlinks. fn get_abs_path(source_dir: Option<&Path>, rel_path: PathBuf) -> Option<(PathBuf, PathBuf)> { let mut abs_path = if !rel_path.is_relative() { rel_path.to_owned() } else if let Some(source_dir) = source_dir { if !cfg!(windows) { guess_abs_path(source_dir, &rel_path) } else { guess_abs_path( source_dir, &PathBuf::from(&rel_path.to_str().unwrap().replace('/', "\\")), ) } } else { rel_path.to_owned() }; // Canonicalize, if possible. if let Ok(p) = canonicalize_path(&abs_path) { abs_path = p; } // Fixup the relative path, in case the absolute path was a symlink. let rel_path = fixup_rel_path(source_dir, &abs_path, rel_path); // Normalize the path in removing './' or '//' or '..' let rel_path = normalize_path(rel_path); let abs_path = normalize_path(abs_path); abs_path.zip(rel_path) } fn check_extension(path: &Path, e: &str) -> bool { if let Some(ext) = &path.extension() { if let Some(ext) = ext.to_str() { ext == e } else { false } } else { false } } fn map_partial_path(file_to_paths: &FxHashMap>, path: PathBuf) -> PathBuf { let options = file_to_paths.get(path.file_name().unwrap().to_str().unwrap()); if options.is_none() { return path; } let options = options.unwrap(); if options.len() == 1 { return options[0].clone(); } let mut result: Option<&PathBuf> = None; for option in options { if option.ends_with(&path) { assert!( result.is_none(), "Only one file in the repository should end with {} ({} and {} both end with that)", path.display(), result.unwrap().display(), option.display() ); result = Some(option) } } if let Some(result) = result { result.clone() } else { path } } fn is_hidden(entry: &DirEntry) -> bool { entry .file_name() .to_str() .map(|s| s.starts_with('.')) .unwrap_or(false) } fn is_symbolic_link(entry: &DirEntry) -> bool { entry.path_is_symlink() } fn to_globset(dirs: &[impl AsRef]) -> GlobSet { let mut glob_builder = GlobSetBuilder::new(); for dir in dirs { glob_builder.add(Glob::new(dir.as_ref()).unwrap()); } glob_builder.build().unwrap() } pub fn rewrite_paths( result_map: CovResultMap, path_mapping: Option, source_dir: Option<&Path>, prefix_dir: Option<&Path>, ignore_not_existing: bool, to_ignore_dirs: &[impl AsRef], to_keep_dirs: &[impl AsRef], filter_option: Option, file_filter: crate::FileFilter, ) -> Vec { let to_ignore_globset = to_globset(to_ignore_dirs); let to_keep_globset = to_globset(to_keep_dirs); if let Some(p) = &source_dir { assert!(p.is_absolute()); } // Traverse source dir and store all paths, reversed. let mut file_to_paths: FxHashMap> = FxHashMap::default(); if let Some(ref source_dir) = source_dir { for entry in WalkDir::new(source_dir) .into_iter() .filter_entry(|e| !is_hidden(e) && !is_symbolic_link(e)) { let entry = entry .unwrap_or_else(|_| panic!("Failed to open directory '{}'.", source_dir.display())); let full_path = entry.path(); if !full_path.is_file() { continue; } let path = full_path.strip_prefix(source_dir).unwrap().to_path_buf(); if to_ignore_globset.is_match(&path) { continue; } let name = entry.file_name().to_str().unwrap().to_string(); match file_to_paths.entry(name) { hash_map::Entry::Occupied(f) => f.into_mut().push(path), hash_map::Entry::Vacant(v) => { v.insert(vec![path]); } }; } } let results = result_map .into_par_iter() .filter_map(move |(path, mut result)| { let path = path.replace('\\', "/"); // Get path from the mapping. let rel_path = apply_mapping(&path_mapping, &path); // Remove prefix from the path. let rel_path = remove_prefix(prefix_dir, rel_path); // Try mapping a partial path to a full path. let rel_path = if check_extension(&rel_path, "java") { map_partial_path(&file_to_paths, rel_path) } else { rel_path }; // Get absolute path to the source file. let (abs_path, rel_path) = get_abs_path(source_dir, rel_path)?; if to_ignore_globset.is_match(&rel_path) { return None; } if !to_keep_globset.is_empty() && !to_keep_globset.is_match(&rel_path) { return None; } if ignore_not_existing && !abs_path.exists() { return None; } // Always return results with '/'. let rel_path = PathBuf::from(rel_path.to_str().unwrap().replace('\\', "/")); for filter in file_filter.create(&abs_path) { match filter { crate::FilterType::Both(number) => { result.branches.remove(&number); result.lines.remove(&number); } crate::FilterType::Line(number) => { result.lines.remove(&number); } crate::FilterType::Branch(number) => { result.branches.remove(&number); } } } match filter_option { Some(true) => { if !is_covered(&result) { return None; } } Some(false) => { if is_covered(&result) { return None; } } None => (), }; Some((abs_path, rel_path, result)) }); results.collect() } #[cfg(test)] mod tests { use super::*; use serde_json::json; use std::collections::BTreeMap; #[test] fn test_to_lowercase_first() { assert_eq!(to_lowercase_first("marco"), "marco"); assert_eq!(to_lowercase_first("Marco"), "marco"); } #[test] #[should_panic] fn test_to_lowercase_first_empty() { to_lowercase_first(""); } #[test] fn test_to_uppercase_first() { assert_eq!(to_uppercase_first("marco"), "Marco"); assert_eq!(to_uppercase_first("Marco"), "Marco"); } #[test] #[should_panic] fn test_to_uppercase_first_empty() { to_uppercase_first(""); } macro_rules! empty_result { () => {{ CovResult { lines: BTreeMap::new(), branches: BTreeMap::new(), functions: FxHashMap::default(), } }}; } macro_rules! covered_result { () => {{ CovResult { lines: [(42, 1)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), } }}; } macro_rules! uncovered_result { () => {{ CovResult { lines: [(42, 0)].iter().cloned().collect(), branches: BTreeMap::new(), functions: FxHashMap::default(), } }}; } macro_rules! skipping_result { () => {{ let mut result = empty_result!(); for i in 1..20 { result.lines.insert(i, 1); result.branches.insert(i, vec![true]); } result }}; } #[test] fn test_rewrite_paths_basic() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "/home/worker/src/workspace/main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, None, None, Some(Path::new("/home/worker/src/workspace/")), false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, None, None, Some(Path::new("C:\\Users\\worker\\src\\workspace\\")), false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_remove_prefix_with_slash() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:/Users/worker/src/workspace/main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, None, None, Some(Path::new("C:/Users/worker/src/workspace/")), false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_remove_prefix_with_slash_longer_path() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:/Users/worker/src/workspace/main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, None, None, Some(Path::new("C:/Users/worker/src/")), false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("workspace/main.cpp")); assert_eq!(rel_path.to_str().unwrap(), "workspace/main.cpp"); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_ignore_non_existing_files() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("tests/class/main.cpp".to_string(), empty_result!()); result_map.insert("tests/class/doesntexist.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!( abs_path.is_absolute(), "{} is not absolute", abs_path.display() ); assert!(abs_path.ends_with("tests/class/main.cpp")); assert!(rel_path.ends_with("tests/class/main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_ignore_non_existing_files() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("tests\\class\\main.cpp".to_string(), empty_result!()); result_map.insert("tests\\class\\doesntexist.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert!(rel_path.ends_with("tests\\class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_ignore_a_directory() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir/prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &["mydir/*"], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_ignore_a_directory() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir\\prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &["mydir/*"], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_ignore_multiple_directories() { let mut ignore_dirs = vec!["mydir/*", "mydir2/*"]; for _ in 0..2 { // we run the test twice, one with ignore_dirs and the other with ignore_dirs.reverse() let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir/prova.h".to_string(), empty_result!()); result_map.insert("mydir2/prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &ignore_dirs, &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); ignore_dirs.reverse(); } } #[cfg(windows)] #[test] fn test_rewrite_paths_ignore_multiple_directories() { let mut ignore_dirs = vec!["mydir/*", "mydir2/*"]; for _ in 0..2 { // we run the test twice, one with ignore_dirs and the other with ignore_dirs.reverse() let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir\\prova.h".to_string(), empty_result!()); result_map.insert("mydir2\\prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &ignore_dirs, &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("main.cpp")); assert_eq!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); ignore_dirs.reverse(); } } #[cfg(unix)] #[test] fn test_rewrite_paths_keep_only_a_directory() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir/prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &["mydir/*"], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("mydir/prova.h")); assert_eq!(rel_path, PathBuf::from("mydir/prova.h")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_keep_only_a_directory() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir\\prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &["mydir/*"], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("mydir\\prova.h")); assert_eq!(rel_path, PathBuf::from("mydir\\prova.h")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_keep_only_multiple_directories() { let mut keep_only_dirs = vec!["mydir/*", "mydir2/*"]; for _ in 0..2 { // we run the test twice, one with keep_only_dirs and the other with keep_only_dirs.reverse() let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir/prova.h".to_string(), empty_result!()); result_map.insert("mydir2/prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &keep_only_dirs, None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_ne!(abs_path, PathBuf::from("main.cpp")); assert_ne!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 2); keep_only_dirs.reverse(); } } #[cfg(windows)] #[test] fn test_rewrite_paths_keep_only_multiple_directories() { let mut keep_only_dirs = vec!["mydir/*", "mydir2/*"]; for _ in 0..2 { // we run the test twice, one with keep_only_dirs and the other with keep_only_dirs.reverse() let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.cpp".to_string(), empty_result!()); result_map.insert("mydir\\prova.h".to_string(), empty_result!()); result_map.insert("mydir2\\prova.h".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &keep_only_dirs, None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_ne!(abs_path, PathBuf::from("main.cpp")); assert_ne!(rel_path, PathBuf::from("main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 2); keep_only_dirs.reverse(); } } #[cfg(unix)] #[test] fn test_rewrite_paths_keep_only_and_ignore() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.rs".to_string(), empty_result!()); result_map.insert("foo/keep.rs".to_string(), empty_result!()); result_map.insert("foo/not_keep.cpp".to_string(), empty_result!()); result_map.insert("foo/bar_ignore.rs".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &["foo/bar_*.rs"], &["foo/*.rs"], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("foo/keep.rs")); assert_eq!(rel_path, PathBuf::from("foo/keep.rs")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_keep_only_and_ignore() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("main.rs".to_string(), empty_result!()); result_map.insert("foo\\keep.rs".to_string(), empty_result!()); result_map.insert("foo\\not_keep.cpp".to_string(), empty_result!()); result_map.insert("foo\\bar_ignore.rs".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, None, None, false, &["foo/bar_*.rs"], &["foo/*.rs"], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("foo\\keep.rs")); assert_eq!(rel_path, PathBuf::from("foo\\keep.rs")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[test] #[should_panic] fn test_rewrite_paths_rewrite_path_using_relative_source_directory() { let result_map: CovResultMap = FxHashMap::default(); rewrite_paths( result_map, None, Some(Path::new("tests")), None, true, &[""; 0], &[""; 0], None, Default::default(), ) .iter() .any(|_| false); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_using_absolute_source_directory() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("java/main.java".to_string(), empty_result!()); result_map.insert("test/java/main.java".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("test").unwrap()), None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test/java/main.java")); assert_eq!(rel_path, PathBuf::from("java/main.java")); assert_eq!(result, empty_result!()); } assert_eq!(count, 2); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_using_absolute_source_directory() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("java\\main.java".to_string(), empty_result!()); result_map.insert("test\\java\\main.java".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("test").unwrap()), None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test\\java\\main.java")); assert_eq!(rel_path, PathBuf::from("java\\main.java")); assert_eq!(result, empty_result!()); } assert_eq!(count, 2); } #[cfg(unix)] #[test] fn test_rewrite_paths_subfolder_same_as_root() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("test/main.rs".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("test").unwrap()), None, false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test/test/main.rs")); assert_eq!(rel_path, PathBuf::from("test/main.rs")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_subfolder_same_as_root() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("test\\main.rs".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("test").unwrap()), None, false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test\\test\\main.rs")); assert_eq!(rel_path, PathBuf::from("test\\main.rs")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_for_java_and_rust() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("java/main.java".to_string(), empty_result!()); result_map.insert("main.rs".to_string(), empty_result!()); let mut results = rewrite_paths( result_map, None, Some(&canonicalize_path(".").unwrap()), None, true, &[""; 0], &[""; 0], None, Default::default(), ); assert!(results.len() == 1); let (abs_path, rel_path, result) = results.remove(0); assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test/java/main.java")); assert_eq!(rel_path, PathBuf::from("test/java/main.java")); assert_eq!(result, empty_result!()); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_for_java_and_rust() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("java\\main.java".to_string(), empty_result!()); result_map.insert("main.rs".to_string(), empty_result!()); let mut results = rewrite_paths( result_map, None, Some(&canonicalize_path(".").unwrap()), None, true, &[""; 0], &[""; 0], None, Default::default(), ); assert!(results.len() == 1); let (abs_path, rel_path, result) = results.remove(0); assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test\\java\\main.java")); assert_eq!(rel_path, PathBuf::from("test\\java\\main.java")); assert_eq!(result, empty_result!()); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_using_absolute_source_directory_and_partial_path() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("java/main.java".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path(".").unwrap()), None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test/java/main.java")); assert_eq!(rel_path, PathBuf::from("test/java/main.java")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_using_absolute_source_directory_and_partial_path() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("java\\main.java".to_string(), empty_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path(".").unwrap()), None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("test\\java\\main.java")); assert_eq!(rel_path, PathBuf::from("test\\java\\main.java")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_and_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "/home/worker/src/workspace/class/main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("tests").unwrap()), Some(Path::new("/home/worker/src/workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests/class/main.cpp")); eprintln!("{:?}", rel_path); assert_eq!(rel_path, PathBuf::from("class/main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_and_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\class\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("tests").unwrap()), Some(Path::new("C:\\Users\\worker\\src\\workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("class/main.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, Some(json!({"class/main.cpp": "rewritten/main.cpp"})), None, None, false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("rewritten/main.cpp")); assert_eq!(rel_path, PathBuf::from("rewritten/main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("class\\main.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, Some(json!({"class/main.cpp": "rewritten/main.cpp"})), None, None, false, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("rewritten\\main.cpp")); assert_eq!(rel_path, PathBuf::from("rewritten\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping_and_ignore_non_existing() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("rewritten/main.cpp".to_string(), empty_result!()); result_map.insert("tests/class/main.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, Some( json!({"rewritten/main.cpp": "tests/class/main.cpp", "tests/class/main.cpp": "rewritten/main.cpp"}), ), None, None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests/class/main.cpp")); assert_eq!(rel_path, PathBuf::from("tests/class/main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping_and_ignore_non_existing() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("rewritten\\main.cpp".to_string(), empty_result!()); result_map.insert("tests\\class\\main.cpp".to_string(), empty_result!()); let results = rewrite_paths( result_map, Some( json!({"rewritten/main.cpp": "tests/class/main.cpp", "tests/class/main.cpp": "rewritten/main.cpp"}), ), None, None, true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("tests\\class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping_and_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "/home/worker/src/workspace/rewritten/main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some(json!({"/home/worker/src/workspace/rewritten/main.cpp": "tests/class/main.cpp"})), None, Some(Path::new("/home/worker/src/workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests/class/main.cpp")); assert_eq!(rel_path, PathBuf::from("tests/class/main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping_and_remove_prefix() { // Mapping with uppercase disk and prefix with uppercase disk. let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\rewritten\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some( json!({"C:/Users/worker/src/workspace/rewritten/main.cpp": "tests/class/main.cpp"}), ), None, Some(Path::new("C:\\Users\\worker\\src\\workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("tests\\class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); // Mapping with lowercase disk and prefix with uppercase disk. let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\rewritten\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some( json!({"c:/Users/worker/src/workspace/rewritten/main.cpp": "tests/class/main.cpp"}), ), None, Some(Path::new("C:\\Users\\worker\\src\\workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("tests\\class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); // Mapping with uppercase disk and prefix with lowercase disk. let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\rewritten\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some( json!({"C:/Users/worker/src/workspace/rewritten/main.cpp": "tests/class/main.cpp"}), ), None, Some(Path::new("c:\\Users\\worker\\src\\workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("tests\\class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); // Mapping with lowercase disk and prefix with lowercase disk. let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\rewritten\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some( json!({"c:/Users/worker/src/workspace/rewritten/main.cpp": "tests/class/main.cpp"}), ), None, Some(Path::new("c:\\Users\\worker\\src\\workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("tests\\class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(unix)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping_and_source_directory_and_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "/home/worker/src/workspace/rewritten/main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some(json!({"/home/worker/src/workspace/rewritten/main.cpp": "class/main.cpp"})), Some(&canonicalize_path("tests").unwrap()), Some(Path::new("/home/worker/src/workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests/class/main.cpp")); assert_eq!(rel_path, PathBuf::from("class/main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_rewrite_path_using_mapping_and_source_directory_and_remove_prefix() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert( "C:\\Users\\worker\\src\\workspace\\rewritten\\main.cpp".to_string(), empty_result!(), ); let results = rewrite_paths( result_map, Some(json!({"C:/Users/worker/src/workspace/rewritten/main.cpp": "class/main.cpp"})), Some(&canonicalize_path("tests").unwrap()), Some(Path::new("C:\\Users\\worker\\src\\workspace")), true, &[""; 0], &[""; 0], None, Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert!(abs_path.is_absolute()); assert!(abs_path.ends_with("tests\\class\\main.cpp")); assert_eq!(rel_path, PathBuf::from("class\\main.cpp")); assert_eq!(result, empty_result!()); } assert_eq!(count, 1); } #[test] fn test_rewrite_paths_only_covered() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("covered.cpp".to_string(), covered_result!()); result_map.insert("uncovered.cpp".to_string(), uncovered_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &[""; 0], Some(true), Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("covered.cpp")); assert_eq!(rel_path, PathBuf::from("covered.cpp")); assert_eq!(result, covered_result!()); } assert_eq!(count, 1); } #[test] fn test_rewrite_paths_only_uncovered() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("covered.cpp".to_string(), covered_result!()); result_map.insert("uncovered.cpp".to_string(), uncovered_result!()); let results = rewrite_paths( result_map, None, None, None, false, &[""; 0], &[""; 0], Some(false), Default::default(), ); let mut count = 0; for (abs_path, rel_path, result) in results { count += 1; assert_eq!(abs_path, PathBuf::from("uncovered.cpp")); assert_eq!(rel_path, PathBuf::from("uncovered.cpp")); assert_eq!(result, uncovered_result!()); } assert_eq!(count, 1); } #[test] fn test_normalize_path() { assert_eq!( normalize_path("./foo/bar").unwrap(), PathBuf::from("foo/bar") ); assert_eq!( normalize_path("./foo//bar").unwrap(), PathBuf::from("foo/bar") ); assert_eq!( normalize_path("./foo/./bar/./oof/").unwrap(), PathBuf::from("foo/bar/oof") ); assert_eq!( normalize_path("./foo/../bar/./oof/").unwrap(), PathBuf::from("bar/oof") ); assert!(normalize_path("../bar/oof/").is_none()); assert!(normalize_path("bar/foo/../../../oof/").is_none()); } #[test] fn test_has_no_parent() { assert!(has_no_parent("foo.bar")); assert!(has_no_parent("foo")); assert!(!has_no_parent("/foo.bar")); assert!(!has_no_parent("./foo.bar")); assert!(!has_no_parent("../foo.bar")); assert!(!has_no_parent("foo/foo.bar")); assert!(!has_no_parent("bar/foo/foo.bar")); assert!(!has_no_parent("/")); assert!(!has_no_parent("/foo/bar.oof")); } #[cfg(unix)] #[test] fn test_rewrite_paths_filter_lines_and_branches() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("test/java/skip.java".to_string(), skipping_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("test").unwrap()), None, true, &[""; 0], &[""; 0], None, crate::FileFilter::new( Some(regex::Regex::new("excluded line").unwrap()), Some(regex::Regex::new("skip line start").unwrap()), Some(regex::Regex::new("skip line end").unwrap()), Some(regex::Regex::new("excluded branch").unwrap()), Some(regex::Regex::new("skip branch start").unwrap()), Some(regex::Regex::new("skip branch end").unwrap()), ), ); let mut count = 0; for (_, _, result) in results { count += 1; for inc in [1, 2, 3, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16].iter() { assert!(result.lines.contains_key(inc)); } for inc in [4, 6, 7, 17, 18, 19, 20].iter() { assert!(!result.lines.contains_key(inc)); } for inc in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 15, 16, 17].iter() { assert!(result.branches.contains_key(inc)); } for inc in [11, 13, 14, 18, 19, 20].iter() { assert!(!result.branches.contains_key(inc)); } } assert_eq!(count, 1); } #[cfg(windows)] #[test] fn test_rewrite_paths_filter_lines_and_branches() { let mut result_map: CovResultMap = FxHashMap::default(); result_map.insert("test\\java\\skip.java".to_string(), skipping_result!()); let results = rewrite_paths( result_map, None, Some(&canonicalize_path("test").unwrap()), None, true, &[""; 0], &[""; 0], None, crate::FileFilter::new( Some(regex::Regex::new("excluded line").unwrap()), Some(regex::Regex::new("skip line start").unwrap()), Some(regex::Regex::new("skip line end").unwrap()), Some(regex::Regex::new("excluded branch").unwrap()), Some(regex::Regex::new("skip branch start").unwrap()), Some(regex::Regex::new("skip branch end").unwrap()), ), ); let mut count = 0; for (_, _, result) in results { count += 1; for inc in [1, 2, 3, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16].iter() { assert!(result.lines.contains_key(inc)); } for inc in [4, 6, 7, 17, 18, 19, 20].iter() { assert!(!result.lines.contains_key(inc)); } for inc in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 15, 16, 17].iter() { assert!(result.branches.contains_key(inc)); } for inc in [11, 13, 14, 18, 19, 20].iter() { assert!(!result.branches.contains_key(inc)); } } assert_eq!(count, 1); } } grcov-0.8.22/src/producer.rs000064400000000000000000001616261046102023000140310ustar 00000000000000use rustc_hash::FxHashMap; use std::cell::RefCell; use std::env; use std::fs::{self, File}; use std::io::{self, BufReader, Read}; use std::path::{Path, PathBuf}; use walkdir::WalkDir; use zip::ZipArchive; use crate::defs::*; #[derive(Debug)] pub enum ArchiveType { Zip(RefCell>>), Dir(PathBuf), Plain(Vec), } #[derive(Debug)] pub struct Archive { pub name: String, pub item: RefCell, } #[derive(Debug, PartialEq, Eq, Hash)] pub struct GCNOStem { pub stem: String, pub llvm: bool, } #[cfg(not(windows))] fn clean_path(path: &Path) -> String { path.to_str().unwrap().to_string() } #[cfg(windows)] fn clean_path(path: &Path) -> String { path.to_str().unwrap().to_string().replace("\\", "/") } impl Archive { fn insert_vec<'a>( &'a self, filename: String, map: &RefCell>>, ) { let mut map = map.borrow_mut(); map.entry(filename) .or_insert_with(|| Vec::with_capacity(1)) .push(self); } fn handle_file<'a>( &'a self, file: Option<&mut impl Read>, path: &Path, gcno_stem_archives: &RefCell>, gcda_stem_archives: &RefCell>>, profraws: &RefCell>>, infos: &RefCell>>, xmls: &RefCell>>, linked_files_maps: &RefCell>, is_llvm: bool, ) { if let Some(ext) = path.extension() { match ext.to_str().unwrap() { "gcno" => { let llvm = is_llvm || Archive::check_file(file, &Archive::is_gcno_llvm); let filename = clean_path(&path.with_extension("")); gcno_stem_archives.borrow_mut().insert( GCNOStem { stem: filename, llvm, }, self, ); } "gcda" => { let filename = clean_path(&path.with_extension("")); self.insert_vec(filename, gcda_stem_archives); } "profraw" => { let filename = clean_path(path); self.insert_vec(filename, profraws); } "info" => { if Archive::check_file(file, &Archive::is_info) { let filename = clean_path(path); self.insert_vec(filename, infos); } } "xml" => { if Archive::check_file(file, &Archive::is_jacoco) { let filename = clean_path(path); self.insert_vec(filename, xmls); } } "json" => { let filename = path.file_name().unwrap(); if filename == "linked-files-map.json" { let filename = clean_path(path); linked_files_maps.borrow_mut().insert(filename, self); } } _ => {} } } } fn is_gcno_llvm(reader: &mut dyn Read) -> bool { let mut bytes: [u8; 8] = [0; 8]; reader.read_exact(&mut bytes).is_ok() && &bytes[..5] == b"oncg*" && (&bytes[5..] == b"204" || &bytes[5..] == b"804") } fn is_jacoco(reader: &mut dyn Read) -> bool { let mut bytes: [u8; 256] = [0; 256]; if reader.read_exact(&mut bytes).is_ok() { return match String::from_utf8(bytes.to_vec()) { Ok(s) => s.contains("-//JACOCO//DTD"), Err(_) => false, }; } false } fn is_info(reader: &mut dyn Read) -> bool { let mut bytes: [u8; 3] = [0; 3]; reader.read_exact(&mut bytes).is_ok() && (bytes == [b'T', b'N', b':'] || bytes == [b'S', b'F', b':']) } fn check_file(file: Option<&mut impl Read>, checker: &dyn Fn(&mut dyn Read) -> bool) -> bool { file.is_some_and(|f| checker(f)) } pub fn get_name(&self) -> &String { &self.name } pub fn explore<'a>( &'a mut self, gcno_stem_archives: &RefCell>, gcda_stem_archives: &RefCell>>, profraws: &RefCell>>, infos: &RefCell>>, xmls: &RefCell>>, linked_files_maps: &RefCell>, is_llvm: bool, ) { match *self.item.borrow() { ArchiveType::Zip(ref zip) => { let mut zip = zip.borrow_mut(); for i in 0..zip.len() { let mut file = zip.by_index(i).unwrap(); let path = PathBuf::from(file.name()); self.handle_file( Some(&mut file), &path, gcno_stem_archives, gcda_stem_archives, profraws, infos, xmls, linked_files_maps, is_llvm, ); } } ArchiveType::Dir(ref dir) => { for entry in WalkDir::new(dir) { let entry = entry.unwrap_or_else(|err| { panic!( "Failed to open '{}'.", err.path().unwrap().to_string_lossy() ) }); let full_path = entry.path(); if full_path.is_file() { let mut file = File::open(full_path).ok(); let path = full_path.strip_prefix(dir).unwrap(); self.handle_file( file.as_mut(), path, gcno_stem_archives, gcda_stem_archives, profraws, infos, xmls, linked_files_maps, is_llvm, ); } } } ArchiveType::Plain(ref plain) => { // All the paths are absolutes for full_path in plain { let mut file = File::open(full_path).ok(); self.handle_file( file.as_mut(), full_path, gcno_stem_archives, gcda_stem_archives, profraws, infos, xmls, linked_files_maps, is_llvm, ); } } } } pub fn read(&self, name: &str) -> Option> { match *self.item.borrow_mut() { ArchiveType::Zip(ref mut zip) => { let mut zip = zip.borrow_mut(); let zipfile = zip.by_name(name); match zipfile { Ok(mut f) => { let mut buf = Vec::with_capacity(f.size() as usize + 1); f.read_to_end(&mut buf).expect("Failed to read gcda file"); Some(buf) } Err(_) => None, } } ArchiveType::Dir(ref dir) => { let path = dir.join(name); if let Ok(metadata) = fs::metadata(&path) { match File::open(path) { Ok(mut f) => { let mut buf = Vec::with_capacity(metadata.len() as usize + 1); f.read_to_end(&mut buf).expect("Failed to read gcda file"); Some(buf) } Err(_) => None, } } else { None } } ArchiveType::Plain(_) => { if let Ok(metadata) = fs::metadata(name) { match File::open(name) { Ok(mut f) => { let mut buf = Vec::with_capacity(metadata.len() as usize + 1); f.read_to_end(&mut buf) .unwrap_or_else(|_| panic!("Failed to read file: {}.", name)); Some(buf) } Err(_) => None, } } else { None } } } } pub fn extract(&self, name: &str, path: &Path) -> bool { let dest_parent = path.parent().unwrap(); if !dest_parent.exists() { fs::create_dir_all(dest_parent).expect("Cannot create parent directory"); } match *self.item.borrow_mut() { ArchiveType::Zip(ref mut zip) => { let mut zip = zip.borrow_mut(); let zipfile = zip.by_name(name); if let Ok(mut f) = zipfile { let mut file = File::create(path).expect("Failed to create file"); io::copy(&mut f, &mut file).expect("Failed to copy file from ZIP"); true } else { false } } ArchiveType::Dir(ref dir) => { // don't use a hard link here because it can fail when src and dst are not on the same device let src_path = dir.join(name); crate::symlink::symlink_file(&src_path, path).unwrap_or_else(|_| { panic!("Failed to create a symlink {:?} -> {:?}", src_path, path) }); true } ArchiveType::Plain(_) => { panic!("We shouldn't be there !!"); } } } } fn gcno_gcda_producer( tmp_dir: &Path, gcno_stem_archives: &FxHashMap, gcda_stem_archives: &FxHashMap>, sender: &JobSender, ignore_orphan_gcno: bool, ) { let send_job = |item, name| { sender .send(Some(WorkItem { format: ItemFormat::Gcno, item, name, })) .unwrap() }; for (gcno_stem, gcno_archive) in gcno_stem_archives { let stem = &gcno_stem.stem; if let Some(gcda_archives) = gcda_stem_archives.get(stem) { let gcno_archive = *gcno_archive; let gcno = format!("{}.gcno", stem).to_string(); let physical_gcno_path = tmp_dir.join(format!("{}_{}.gcno", stem, 1)); if gcno_stem.llvm { let mut gcda_buffers: Vec> = Vec::with_capacity(gcda_archives.len()); if let Some(gcno_buffer) = gcno_archive.read(&gcno) { for gcda_archive in gcda_archives { let gcda = format!("{}.gcda", stem).to_string(); if let Some(gcda_buf) = gcda_archive.read(&gcda) { gcda_buffers.push(gcda_buf); } } send_job( ItemType::Buffers(GcnoBuffers { stem: stem.clone(), gcno_buf: gcno_buffer, gcda_buf: gcda_buffers, }), "".to_string(), ); } } else { gcno_archive.extract(&gcno, &physical_gcno_path); for (num, &gcda_archive) in gcda_archives.iter().enumerate() { let gcno_path = tmp_dir.join(format!("{}_{}.gcno", stem, num + 1)); let gcda = format!("{}.gcda", stem).to_string(); // Create symlinks. if num != 0 { fs::hard_link(&physical_gcno_path, &gcno_path).unwrap_or_else(|_| { panic!("Failed to create hardlink {:?}", gcno_path) }); } let gcda_path = tmp_dir.join(format!("{}_{}.gcda", stem, num + 1)); if gcda_archive.extract(&gcda, &gcda_path) || (num == 0 && !ignore_orphan_gcno) { send_job( ItemType::Path((stem.clone(), gcno_path)), gcda_archive.get_name().to_string(), ); } } } } else if !ignore_orphan_gcno { let gcno_archive = *gcno_archive; let gcno = format!("{}.gcno", stem).to_string(); if gcno_stem.llvm { if let Some(gcno_buf) = gcno_archive.read(&gcno) { send_job( ItemType::Buffers(GcnoBuffers { stem: stem.clone(), gcno_buf, gcda_buf: Vec::new(), }), gcno_archive.get_name().to_string(), ); } } else { let physical_gcno_path = tmp_dir.join(format!("{}_{}.gcno", stem, 1)); if gcno_archive.extract(&gcno, &physical_gcno_path) { send_job( ItemType::Path((stem.clone(), physical_gcno_path)), gcno_archive.get_name().to_string(), ); } } } } } fn profraw_producer( tmp_dir: &Path, profraws: &FxHashMap>, sender: &JobSender, ) { if profraws.is_empty() { return; } let mut profraw_paths = Vec::new(); for (name, archives) in profraws { let path = PathBuf::from(name); let stem = clean_path(&path.with_extension("")); // TODO: If there is only one archive and it is not a zip, we don't need to "extract". for (num, &archive) in archives.iter().enumerate() { let profraw_path = if let ArchiveType::Plain(_) = *archive.item.borrow() { Some(path.clone()) } else { None }; let profraw_path = if let Some(profraw_path) = profraw_path { profraw_path } else { let tmp_path = tmp_dir.join(format!("{}_{}.profraw", stem, num + 1)); archive.extract(name, &tmp_path); tmp_path }; profraw_paths.push(profraw_path); } } sender .send(Some(WorkItem { format: ItemFormat::Profraw, item: ItemType::Paths(profraw_paths), name: "profraws".to_string(), })) .unwrap() } fn file_content_producer( files: &FxHashMap>, sender: &JobSender, item_format: ItemFormat, ) { for (name, archives) in files { for archive in archives { if let Some(buffer) = archive.read(name) { sender .send(Some(WorkItem { format: item_format, item: ItemType::Content(buffer), name: archive.get_name().to_string(), })) .unwrap(); } } } } pub fn get_mapping(linked_files_maps: &FxHashMap) -> Option> { if let Some((name, archive)) = linked_files_maps.iter().next() { archive.read(name) } else { None } } fn open_archive(path: &str) -> ZipArchive> { let file = File::open(path).unwrap_or_else(|_| panic!("Failed to open ZIP file '{}'.", path)); let reader = BufReader::new(file); ZipArchive::new(reader).unwrap_or_else(|_| panic!("Failed to parse ZIP file: {}", path)) } pub fn producer( tmp_dir: &Path, paths: &[String], sender: &JobSender, ignore_orphan_gcno: bool, is_llvm: bool, ) -> Option> { let mut archives: Vec = Vec::new(); let mut plain_files: Vec = Vec::new(); let current_dir = env::current_dir().unwrap(); for path in paths { if path.ends_with(".zip") { let archive = open_archive(path); archives.push(Archive { name: path.to_string(), item: RefCell::new(ArchiveType::Zip(RefCell::new(archive))), }); } else { let path_dir = PathBuf::from(path); let full_path = if path_dir.is_relative() { current_dir.join(path_dir) } else { path_dir }; if full_path.is_dir() { archives.push(Archive { name: path.to_string(), item: RefCell::new(ArchiveType::Dir(full_path)), }); } else if let Some(ext) = full_path.clone().extension() { let ext = ext.to_str().unwrap(); if ext == "info" || ext == "json" || ext == "xml" || ext == "profraw" { plain_files.push(full_path); } else { panic!( "Cannot load file '{:?}': it isn't a .info, a .json or a .xml file.", full_path ); } } else { panic!("Cannot load file '{:?}': it isn't a directory, a .info, a .json or a .xml file.", full_path); } } } if !plain_files.is_empty() { archives.push(Archive { name: "plain files".to_string(), item: RefCell::new(ArchiveType::Plain(plain_files)), }); } let gcno_stems_archives: RefCell> = RefCell::new(FxHashMap::default()); let gcda_stems_archives: RefCell>> = RefCell::new(FxHashMap::default()); let profraws: RefCell>> = RefCell::new(FxHashMap::default()); let infos: RefCell>> = RefCell::new(FxHashMap::default()); let xmls: RefCell>> = RefCell::new(FxHashMap::default()); let linked_files_maps: RefCell> = RefCell::new(FxHashMap::default()); for archive in &mut archives { archive.explore( &gcno_stems_archives, &gcda_stems_archives, &profraws, &infos, &xmls, &linked_files_maps, is_llvm, ); } assert!( !(gcno_stems_archives.borrow().is_empty() && profraws.borrow().is_empty() && infos.borrow().is_empty() && xmls.borrow().is_empty()), "No input files found" ); file_content_producer(&infos.into_inner(), sender, ItemFormat::Info); file_content_producer(&xmls.into_inner(), sender, ItemFormat::JacocoXml); profraw_producer(tmp_dir, &profraws.into_inner(), sender); gcno_gcda_producer( tmp_dir, &gcno_stems_archives.into_inner(), &gcda_stems_archives.into_inner(), sender, ignore_orphan_gcno, ); get_mapping(&linked_files_maps.into_inner()) } #[cfg(test)] mod tests { use super::*; use crossbeam_channel::unbounded; use serde_json::{self, Value}; fn check_produced( directory: PathBuf, receiver: &JobReceiver, expected: Vec<(ItemFormat, bool, &str, bool)>, ) { let mut vec: Vec> = Vec::new(); while let Ok(elem) = receiver.try_recv() { vec.push(elem); } for elem in &expected { assert!( vec.iter().any(|x| { if !x.is_some() { return false; } let x = x.as_ref().unwrap(); if x.format != elem.0 { return false; } match x.item { ItemType::Content(_) => !elem.1, ItemType::Path((_, ref p)) => elem.1 && p.ends_with(elem.2), ItemType::Paths(ref paths) => paths.iter().any(|p| p.ends_with(elem.2)), ItemType::Buffers(ref b) => b.stem.replace('\\', "/").ends_with(elem.2), } }), "Missing {:?}", elem ); } for v in &vec { let v = v.as_ref().unwrap(); assert!( expected.iter().any(|x| { if v.format != x.0 { return false; } match v.item { ItemType::Content(_) => !x.1, ItemType::Path((_, ref p)) => x.1 && p.ends_with(x.2), ItemType::Paths(ref paths) => paths.iter().any(|p| p.ends_with(x.2)), ItemType::Buffers(ref b) => b.stem.replace('\\', "/").ends_with(x.2), } }), "Unexpected {:?}", v ); } // Make sure we haven't generated duplicated entries. assert!(vec.len() <= expected.len()); // Assert file exists and file with the same name but with extension .gcda exists. for x in expected.iter() { if !x.1 { continue; } let p = directory.join(x.2); assert!(p.exists(), "{} doesn't exist", p.display()); if x.0 == ItemFormat::Gcno { let gcda = p.with_file_name(format!("{}.gcda", p.file_stem().unwrap().to_str().unwrap())); if x.3 { assert!(gcda.exists(), "{} doesn't exist", gcda.display()); } else { assert!(!gcda.exists(), "{} exists", gcda.display()); } } } } #[test] fn test_dir_producer() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer(&tmp_path, &["test".to_string()], &sender, false, false); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", true), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", true, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), ( ItemFormat::Gcno, true, "Unified_cpp_netwerk_base0_1.gcno", true, ), (ItemFormat::Gcno, true, "prova_1.gcno", true), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), (ItemFormat::Gcno, true, "negative_counts_1.gcno", true), (ItemFormat::Gcno, true, "64bit_count_1.gcno", true), (ItemFormat::Gcno, true, "no_gcda/main_1.gcno", false), (ItemFormat::Gcno, true, "only_one_gcda/main_1.gcno", true), (ItemFormat::Gcno, true, "only_one_gcda/orphan_1.gcno", false), ( ItemFormat::Gcno, true, "gcno_symlink/gcda/main_1.gcno", true, ), ( ItemFormat::Gcno, true, "gcno_symlink/gcno/main_1.gcno", false, ), ( ItemFormat::Gcno, false, "rust/generics_with_two_parameters", true, ), (ItemFormat::Gcno, true, "reader_gcc-6_1.gcno", true), (ItemFormat::Gcno, true, "reader_gcc-7_1.gcno", true), (ItemFormat::Gcno, true, "reader_gcc-8_1.gcno", true), (ItemFormat::Gcno, true, "reader_gcc-9_1.gcno", true), (ItemFormat::Gcno, true, "reader_gcc-10_1.gcno", true), (ItemFormat::Info, false, "1494603973-2977-7.info", false), (ItemFormat::Info, false, "prova.info", false), (ItemFormat::Info, false, "prova_fn_with_commas.info", false), (ItemFormat::Info, false, "empty_line.info", false), (ItemFormat::Info, false, "invalid_DA_record.info", false), ( ItemFormat::Info, false, "relative_path/relative_path.info", false, ), (ItemFormat::Gcno, false, "llvm/file", true), (ItemFormat::Gcno, false, "llvm/file_branch", true), (ItemFormat::Gcno, false, "llvm/reader", true), ( ItemFormat::JacocoXml, false, "jacoco/basic-jacoco.xml", false, ), ( ItemFormat::JacocoXml, false, "jacoco/inner-classes.xml", false, ), ( ItemFormat::JacocoXml, false, "jacoco/multiple-top-level-classes.xml", false, ), ( ItemFormat::JacocoXml, false, "jacoco/full-junit4-report-multiple-top-level-classes.xml", false, ), (ItemFormat::Profraw, true, "default_1.profraw", false), ( ItemFormat::Gcno, true, "mozillavpn_serverconnection_1.gcno", true, ), ]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_some()); let mapping: Value = serde_json::from_slice(&mapping.unwrap()).unwrap(); assert_eq!( mapping .get("dist/include/zlib.h") .unwrap() .as_str() .unwrap(), "modules/zlib/src/zlib.h" ); } #[test] fn test_dir_producer_multiple_directories() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &["test/sub".to_string(), "test/sub2".to_string()], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "RootAccessibleWrap_1.gcno", true), (ItemFormat::Gcno, true, "prova2_1.gcno", true), ]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } #[test] fn test_dir_producer_directory_with_gcno_symlinks() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &["test/gcno_symlink/gcda".to_string()], &sender, false, false, ); let expected = vec![(ItemFormat::Gcno, true, "main_1.gcno", true)]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } #[test] fn test_dir_producer_directory_with_no_gcda() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &["test/only_one_gcda".to_string()], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "main_1.gcno", true), (ItemFormat::Gcno, true, "orphan_1.gcno", false), ]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } #[test] fn test_dir_producer_directory_with_no_gcda_ignore_orphan_gcno() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &["test/only_one_gcda".to_string()], &sender, true, false, ); let expected = vec![(ItemFormat::Gcno, true, "main_1.gcno", true)]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } #[test] fn test_zip_producer_with_gcda_dir() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &[ "test/zip_dir/gcno.zip".to_string(), "test/zip_dir".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", true), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", true, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), ]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_some()); let mapping: Value = serde_json::from_slice(&mapping.unwrap()).unwrap(); assert_eq!( mapping .get("dist/include/zlib.h") .unwrap() .as_str() .unwrap(), "modules/zlib/src/zlib.h" ); } // Test extracting multiple gcda archives. #[test] fn test_zip_producer_multiple_gcda_archives() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &[ "test/gcno.zip".to_string(), "test/gcda1.zip".to_string(), "test/gcda2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", true), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", true, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_2.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_2.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_2.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_2.gcno", true), ]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_some()); let mapping: Value = serde_json::from_slice(&mapping.unwrap()).unwrap(); assert_eq!( mapping .get("dist/include/zlib.h") .unwrap() .as_str() .unwrap(), "modules/zlib/src/zlib.h" ); } // Test extracting gcno with no path mapping. #[test] fn test_zip_producer_gcno_with_no_path_mapping() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &[ "test/gcno_no_path_mapping.zip".to_string(), "test/gcda1.zip".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", true), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", true, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), ]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } // Test calling zip_producer with a different order of zip files. #[test] fn test_zip_producer_different_order_of_zip_files() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/gcda1.zip".to_string(), "test/gcno.zip".to_string(), "test/gcda2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", true), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", true, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_2.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_2.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_2.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_2.gcno", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting profraw files. #[test] fn test_zip_producer_profraw_files() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/profraw1.zip".to_string(), "test/profraw2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Profraw, true, "default_1.profraw", false), (ItemFormat::Profraw, true, "default_2.profraw", false), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting info files. #[test] fn test_zip_producer_info_files() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["test/info1.zip".to_string(), "test/info2.zip".to_string()], &sender, false, false, ); let expected = vec![ (ItemFormat::Info, false, "1494603967-2977-2_0.info", true), (ItemFormat::Info, false, "1494603967-2977-3_0.info", true), (ItemFormat::Info, false, "1494603967-2977-4_0.info", true), (ItemFormat::Info, false, "1494603968-2977-5_0.info", true), (ItemFormat::Info, false, "1494603972-2977-6_0.info", true), (ItemFormat::Info, false, "1494603973-2977-7_0.info", true), (ItemFormat::Info, false, "1494603967-2977-2_1.info", true), (ItemFormat::Info, false, "1494603967-2977-3_1.info", true), (ItemFormat::Info, false, "1494603967-2977-4_1.info", true), (ItemFormat::Info, false, "1494603968-2977-5_1.info", true), (ItemFormat::Info, false, "1494603972-2977-6_1.info", true), (ItemFormat::Info, false, "1494603973-2977-7_1.info", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting jacoco report XML files. #[test] fn test_zip_producer_jacoco_xml_files() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/jacoco1.zip".to_string(), "test/jacoco2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ ( ItemFormat::JacocoXml, false, "jacoco/basic-jacoco.xml", true, ), (ItemFormat::JacocoXml, false, "inner-classes.xml", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting both jacoco xml and info files. #[test] fn test_zip_producer_both_info_and_jacoco_xml() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/jacoco1.zip".to_string(), "test/jacoco2.zip".to_string(), "test/info1.zip".to_string(), "test/info2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ ( ItemFormat::JacocoXml, false, "jacoco/basic-jacoco.xml", true, ), (ItemFormat::JacocoXml, false, "inner-classes.xml", true), (ItemFormat::Info, false, "1494603967-2977-2_0.info", true), (ItemFormat::Info, false, "1494603967-2977-3_0.info", true), (ItemFormat::Info, false, "1494603967-2977-4_0.info", true), (ItemFormat::Info, false, "1494603968-2977-5_0.info", true), (ItemFormat::Info, false, "1494603972-2977-6_0.info", true), (ItemFormat::Info, false, "1494603973-2977-7_0.info", true), (ItemFormat::Info, false, "1494603967-2977-2_1.info", true), (ItemFormat::Info, false, "1494603967-2977-3_1.info", true), (ItemFormat::Info, false, "1494603967-2977-4_1.info", true), (ItemFormat::Info, false, "1494603968-2977-5_1.info", true), (ItemFormat::Info, false, "1494603972-2977-6_1.info", true), (ItemFormat::Info, false, "1494603973-2977-7_1.info", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting both info and gcno/gcda files. #[test] fn test_zip_producer_both_info_and_gcnogcda_files() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/gcno.zip".to_string(), "test/gcda1.zip".to_string(), "test/info1.zip".to_string(), "test/info2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", true), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", true, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), (ItemFormat::Info, false, "1494603967-2977-2_0.info", true), (ItemFormat::Info, false, "1494603967-2977-3_0.info", true), (ItemFormat::Info, false, "1494603967-2977-4_0.info", true), (ItemFormat::Info, false, "1494603968-2977-5_0.info", true), (ItemFormat::Info, false, "1494603972-2977-6_0.info", true), (ItemFormat::Info, false, "1494603973-2977-7_0.info", true), (ItemFormat::Info, false, "1494603967-2977-2_1.info", true), (ItemFormat::Info, false, "1494603967-2977-3_1.info", true), (ItemFormat::Info, false, "1494603967-2977-4_1.info", true), (ItemFormat::Info, false, "1494603968-2977-5_1.info", true), (ItemFormat::Info, false, "1494603972-2977-6_1.info", true), (ItemFormat::Info, false, "1494603973-2977-7_1.info", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting gcno with no associated gcda. #[test] fn test_zip_producer_gcno_with_no_associated_gcda() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &[ "test/no_gcda/main.gcno.zip".to_string(), "test/no_gcda/empty.gcda.zip".to_string(), ], &sender, false, false, ); let expected = vec![(ItemFormat::Gcno, true, "main_1.gcno", false)]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } // Test extracting gcno with an associated gcda file in only one zip file. #[test] fn test_zip_producer_gcno_with_associated_gcda_in_only_one_archive() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let mapping = producer( &tmp_path, &[ "test/no_gcda/main.gcno.zip".to_string(), "test/no_gcda/empty.gcda.zip".to_string(), "test/no_gcda/main.gcda.zip".to_string(), ], &sender, false, false, ); let expected = vec![(ItemFormat::Gcno, true, "main_1.gcno", true)]; check_produced(tmp_path, &receiver, expected); assert!(mapping.is_none()); } // Test passing a gcda archive with no gcno archive makes zip_producer fail. #[test] #[should_panic] fn test_zip_producer_with_gcda_archive_and_no_gcno_archive() { let (sender, _) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["test/no_gcda/main.gcda.zip".to_string()], &sender, false, false, ); } // Test extracting gcno/gcda archives, where a gcno file exist with no matching gcda file. #[test] fn test_zip_producer_no_matching_gcno() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["test/gcno.zip".to_string(), "test/gcda2.zip".to_string()], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", false), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", false, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting gcno/gcda archives, where a gcno file exist with no matching gcda file. // The gcno file should be produced only once, not twice. #[test] fn test_zip_producer_no_matching_gcno_two_gcda_archives() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/gcno.zip".to_string(), "test/gcda2.zip".to_string(), "test/gcda2.zip".to_string(), ], &sender, false, false, ); let expected = vec![ (ItemFormat::Gcno, true, "Platform_1.gcno", false), ( ItemFormat::Gcno, true, "sub2/RootAccessibleWrap_1.gcno", false, ), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_2.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_2.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_2.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), (ItemFormat::Gcno, true, "nsGnomeModule_2.gcno", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting gcno/gcda archives, where a gcno file exist with no matching gcda file and ignore orphan gcno files. #[test] fn test_zip_producer_no_matching_gcno_ignore_orphan_gcno() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["test/gcno.zip".to_string(), "test/gcda2.zip".to_string()], &sender, true, false, ); let expected = vec![ (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), ]; check_produced(tmp_path, &receiver, expected); } // Test extracting gcno/gcda archives, where a gcno file exist with no matching gcda file and ignore orphan gcno files. #[test] fn test_zip_producer_no_matching_gcno_two_gcda_archives_ignore_orphan_gcno() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/gcno.zip".to_string(), "test/gcda2.zip".to_string(), "test/gcda2.zip".to_string(), ], &sender, true, false, ); let expected = vec![ (ItemFormat::Gcno, true, "nsMaiInterfaceValue_1.gcno", true), (ItemFormat::Gcno, true, "nsMaiInterfaceValue_2.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_1.gcno", true), (ItemFormat::Gcno, true, "sub/prova2_2.gcno", true), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_1.gcno", true, ), ( ItemFormat::Gcno, true, "nsMaiInterfaceDocument_2.gcno", true, ), (ItemFormat::Gcno, true, "nsGnomeModule_1.gcno", true), (ItemFormat::Gcno, true, "nsGnomeModule_2.gcno", true), ]; check_produced(tmp_path, &receiver, expected); } #[test] fn test_zip_producer_llvm_buffers() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &[ "test/llvm/gcno.zip".to_string(), "test/llvm/gcda1.zip".to_string(), "test/llvm/gcda2.zip".to_string(), ], &sender, true, true, ); let gcno_buf: Vec = vec![ 111, 110, 99, 103, 42, 50, 48, 52, 74, 200, 254, 66, 0, 0, 0, 1, 9, 0, 0, 0, 0, 0, 0, 0, 236, 217, 93, 255, 2, 0, 0, 0, 109, 97, 105, 110, 0, 0, 0, 0, 2, 0, 0, 0, 102, 105, 108, 101, 46, 99, 0, 0, 1, 0, 0, 0, 0, 0, 65, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 1, 3, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 1, 3, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 1, 8, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 102, 105, 108, 101, 46, 99, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; let gcda1_buf: Vec = vec![ 97, 100, 99, 103, 42, 50, 48, 52, 74, 200, 254, 66, 0, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 236, 217, 93, 255, 2, 0, 0, 0, 109, 97, 105, 110, 0, 0, 0, 0, 0, 0, 161, 1, 4, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; let gcda2_buf: Vec = vec![ 97, 100, 99, 103, 42, 50, 48, 52, 74, 200, 254, 66, 0, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 236, 217, 93, 255, 2, 0, 0, 0, 109, 97, 105, 110, 0, 0, 0, 0, 0, 0, 161, 1, 4, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; while let Ok(elem) = receiver.try_recv() { let elem = elem.unwrap(); if let ItemType::Buffers(buffers) = elem.item { let stem = PathBuf::from(buffers.stem); let stem = stem.file_stem().expect("Unable to get file_stem"); assert!(stem == "file", "Unexpected file: {:?}", stem); assert_eq!(buffers.gcno_buf, gcno_buf); assert_eq!(buffers.gcda_buf, vec![gcda1_buf.clone(), gcda2_buf.clone()]); } else { panic!("Buffers expected"); } } } #[test] fn test_plain_producer() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); let json_path = "test/linked-files-map.json"; let mapping = producer( &tmp_path, &["test/prova.info".to_string(), json_path.to_string()], &sender, true, false, ); assert!(mapping.is_some()); let mapping = mapping.unwrap(); let expected = vec![(ItemFormat::Info, false, "prova_1.info", true)]; if let Ok(mut reader) = File::open(json_path) { let mut json = Vec::new(); reader.read_to_end(&mut json).unwrap(); assert_eq!(json, mapping); } else { panic!("Failed to read the file: {}", json_path); } check_produced(tmp_path, &receiver, expected); } #[test] fn test_plain_profraw_producer() { let (sender, receiver) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["test/default.profraw".to_string()], &sender, true, false, ); let expected = vec![(ItemFormat::Profraw, true, "default.profraw", false)]; check_produced(PathBuf::from("test"), &receiver, expected); } #[test] #[should_panic] fn test_plain_producer_with_gcno() { let (sender, _) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["sub2/RootAccessibleWrap_1.gcno".to_string()], &sender, true, false, ); } #[test] #[should_panic] fn test_plain_producer_with_gcda() { let (sender, _) = unbounded(); let tmp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); let tmp_path = tmp_dir.path().to_owned(); producer( &tmp_path, &["./test/llvm/file.gcda".to_string()], &sender, true, false, ); } #[test] fn test_jacoco_files() { let mut file = File::open("./test/jacoco/basic-report.xml").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_jacoco), "A Jacoco XML file expected" ); let mut file = File::open("./test/jacoco/full-junit4-report-multiple-top-level-classes.xml").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_jacoco), "A Jacoco XML file expected" ); let mut file = File::open("./test/jacoco/inner-classes.xml").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_jacoco), "A Jacoco XML file expected" ); let mut file = File::open("./test/jacoco/multiple-top-level-classes.xml").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_jacoco), "A Jacoco XML file expected" ); let mut file = File::open("./test/jacoco/not_jacoco_file.xml").ok(); assert!( !Archive::check_file(file.as_mut(), &Archive::is_jacoco), "Not a Jacoco XML file expected" ); } #[test] fn test_info_files() { let mut file = File::open("./test/1494603973-2977-7.info").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_info), "An info file expected" ); let mut file = File::open("./test/empty_line.info").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_info), "An info file expected" ); let mut file = File::open("./test/relative_path/relative_path.info").ok(); assert!( Archive::check_file(file.as_mut(), &Archive::is_info), "An info file expected" ); let mut file = File::open("./test/not_info_file.info").ok(); assert!( !Archive::check_file(file.as_mut(), &Archive::is_info), "Not an info file expected" ); } } grcov-0.8.22/src/reader.rs000064400000000000000000001372741046102023000134520ustar 00000000000000use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; use std::cmp; use std::collections::{btree_map, hash_map, BTreeMap}; use std::convert::From; use std::fmt::{Debug, Display, Formatter}; use std::fs::File; use std::io::{BufReader, Error, Read, Write}; use std::marker::PhantomData; use std::path::Path; use std::path::PathBuf; use std::result::Result; use crate::defs::{CovResult, Function}; const GCOV_ARC_ON_TREE: u32 = 1 << 0; const GCOV_ARC_FAKE: u32 = 1 << 1; //const GCOV_ARC_FALLTHROUGH: u32 = 1 << 2; const GCOV_TAG_FUNCTION: u32 = 0x0100_0000; const GCOV_TAG_BLOCKS: u32 = 0x0141_0000; const GCOV_TAG_ARCS: u32 = 0x0143_0000; const GCOV_TAG_LINES: u32 = 0x0145_0000; const GCOV_TAG_COUNTER_ARCS: u32 = 0x01a1_0000; const GCOV_TAG_OBJECT_SUMMARY: u32 = 0xa100_0000; const GCOV_TAG_PROGRAM_SUMMARY: u32 = 0xa300_0000; #[derive(Debug)] pub enum GcovReaderError { Io(std::io::Error), Str(String), } impl From for GcovReaderError { fn from(err: Error) -> GcovReaderError { GcovReaderError::Str(format!("Reader error: {}", err)) } } pub trait Endian { fn is_little_endian() -> bool; } pub trait GcovReader { fn read_string(&mut self) -> Result; fn read_u32(&mut self) -> Result; fn read_counter(&mut self) -> Result; fn get_version(&self, buf: &[u8]) -> u32; fn read_version(&mut self) -> Result; fn get_pos(&self) -> usize; fn get_stem(&self) -> &str; fn skip_u32(&mut self) -> Result<(), GcovReaderError>; fn skip(&mut self, len: usize) -> Result<(), GcovReaderError>; fn is_little_endian(&self) -> bool { E::is_little_endian() } } pub struct LittleEndian; impl Endian for LittleEndian { fn is_little_endian() -> bool { true } } pub struct BigEndian; impl Endian for BigEndian { fn is_little_endian() -> bool { false } } enum FileType { Gcno, Gcda, } #[derive(Default)] pub struct Gcno { version: u32, checksum: u32, #[allow(dead_code)] cwd: Option, programcounts: u32, runcounts: u32, functions: Vec, ident_to_fun: FxHashMap, } #[derive(Debug)] struct GcovFunction { identifier: u32, start_line: u32, #[allow(dead_code)] start_column: u32, end_line: u32, #[allow(dead_code)] end_column: u32, #[allow(dead_code)] artificial: u32, line_checksum: u32, cfg_checksum: u32, file_name: String, name: String, blocks: SmallVec<[GcovBlock; 16]>, edges: SmallVec<[GcovEdge; 16]>, real_edge_count: usize, lines: FxHashMap, executed: bool, } #[derive(Debug)] struct GcovBlock { no: usize, source: SmallVec<[usize; 2]>, destination: SmallVec<[usize; 2]>, lines: SmallVec<[u32; 16]>, line_max: u32, counter: u64, } #[derive(Debug)] struct GcovEdge { source: usize, destination: usize, flags: u32, counter: u64, cycles: u64, } impl GcovEdge { fn is_on_tree(&self) -> bool { (self.flags & GCOV_ARC_ON_TREE) != 0 } fn is_fake(&self) -> bool { (self.flags & GCOV_ARC_FAKE) != 0 } fn get_tree_mark(&self) -> &'static str { if self.is_on_tree() { "*" } else { "" } } } impl GcovBlock { fn new(no: usize) -> Self { Self { no, source: SmallVec::new(), destination: SmallVec::new(), lines: SmallVec::new(), line_max: 0, counter: 0, } } } pub struct GcovReaderBuf { stem: String, buffer: Vec, pos: usize, phantom: PhantomData, } macro_rules! read_u { ($ty: ty, $buf: expr) => {{ let size = std::mem::size_of::<$ty>(); let start = $buf.pos; $buf.pos += size; if $buf.pos <= $buf.buffer.len() { let val: $ty = unsafe { // data are aligned so it's safe to do that #[allow(clippy::transmute_ptr_to_ptr)] *std::mem::transmute::<*const u8, *const $ty>($buf.buffer[start..].as_ptr()) }; Ok(if $buf.is_little_endian() { val.to_le() } else { val.to_be() }) } else { Err(GcovReaderError::Str(format!( "Not enough data in buffer: cannot read integer in {}", $buf.get_stem() ))) } }}; } macro_rules! skip { ($size: expr, $buf: expr) => {{ $buf.pos += $size; if $buf.pos < $buf.buffer.len() { Ok(()) } else { Err(GcovReaderError::Str(format!( "Not enough data in buffer: cannot skip {} bytes in {}", $size, $buf.get_stem() ))) } }}; } impl GcovReaderBuf { pub fn new(stem: &str, buffer: Vec) -> GcovReaderBuf { GcovReaderBuf { stem: stem.to_string(), buffer, pos: 4, // we already read gcno or gcda phantom: PhantomData, } } } impl GcovReader for GcovReaderBuf { fn get_stem(&self) -> &str { &self.stem } #[inline(always)] fn skip_u32(&mut self) -> Result<(), GcovReaderError> { skip!(std::mem::size_of::(), self) } #[inline(always)] fn skip(&mut self, len: usize) -> Result<(), GcovReaderError> { skip!(len, self) } fn read_string(&mut self) -> Result { let len = read_u!(u32, self)?; if len == 0 { return Ok("".to_string()); } let len = len as usize * 4; let start = self.pos; self.pos += len; if self.pos <= self.buffer.len() { let bytes = &self.buffer[start..self.pos]; let i = len - bytes.iter().rev().position(|&x| x != 0).unwrap(); Ok(unsafe { std::str::from_utf8_unchecked(&bytes[..i]).to_string() }) } else { Err(GcovReaderError::Str(format!( "Not enough data in buffer: cannot read string in {}", self.get_stem() ))) } } #[inline(always)] fn read_u32(&mut self) -> Result { read_u!(u32, self) } #[inline(always)] fn read_counter(&mut self) -> Result { let lo = read_u!(u32, self)?; let hi = read_u!(u32, self)?; Ok((u64::from(hi) << 32) | u64::from(lo)) } fn get_version(&self, buf: &[u8]) -> u32 { if buf[2] >= b'A' { 100 * u32::from(buf[2] - b'A') + 10 * u32::from(buf[1] - b'0') + u32::from(buf[0] - b'0') } else { 10 * u32::from(buf[2] - b'0') + u32::from(buf[0] - b'0') } } fn read_version(&mut self) -> Result { let i = self.pos; if i + 4 <= self.buffer.len() { self.pos += 4; if self.is_little_endian() && self.buffer[i] == b'*' { Ok(self.get_version(&self.buffer[i + 1..i + 4])) } else if !self.is_little_endian() && self.buffer[i + 3] == b'*' { let buf = [self.buffer[i + 2], self.buffer[i + 1], self.buffer[i]]; Ok(self.get_version(&buf)) } else { let bytes = &self.buffer[i..i + 4]; Err(GcovReaderError::Str(format!( "Unexpected version: {} in {}", String::from_utf8_lossy(bytes), self.get_stem() ))) } } else { Err(GcovReaderError::Str(format!( "Not enough data in buffer: Cannot read version in {}", self.get_stem() ))) } } #[inline(always)] fn get_pos(&self) -> usize { self.pos } } impl Display for GcovReaderError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { GcovReaderError::Io(e) => write!(f, "{}", e), GcovReaderError::Str(e) => write!(f, "{}", e), } } } impl Debug for Gcno { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { for fun in &self.functions { writeln!( f, "===== {} ({}) @ {}:{}", fun.name, fun.identifier, fun.file_name, fun.start_line )?; for block in &fun.blocks { writeln!(f, "Block : {} Counter : {}", block.no, block.counter)?; if let Some((last, elmts)) = block.source.split_last() { write!(f, "\tSource Edges : ")?; for edge in elmts.iter().map(|i| &fun.edges[*i]) { write!(f, "{} ({}), ", edge.source, edge.counter)?; } let edge = &fun.edges[*last]; writeln!(f, "{} ({}), ", edge.source, edge.counter)?; } if let Some((last, elmts)) = block.destination.split_last() { write!(f, "\tDestination Edges : ")?; for edge in elmts.iter().map(|i| &fun.edges[*i]) { write!( f, "{}{} ({}), ", edge.get_tree_mark(), edge.destination, edge.counter )?; } let edge = &fun.edges[*last]; writeln!( f, "{}{} ({}), ", edge.get_tree_mark(), edge.destination, edge.counter )?; } if let Some((last, elmts)) = block.lines.split_last() { write!(f, "\tLines : ")?; for i in elmts { write!(f, "{},", i)?; } writeln!(f, "{},", last)?; } } } Ok(()) } } impl Gcno { pub fn new() -> Self { Gcno { version: 0, checksum: 0, cwd: None, programcounts: 0, runcounts: 0, functions: Vec::new(), ident_to_fun: FxHashMap::default(), } } fn guess_endianness( mut typ: [u8; 4], buffer: &[u8], stem: &str, ) -> Result { if 4 <= buffer.len() { let bytes = &buffer[..4]; if bytes == typ { // Little endian Ok(true) } else { typ.reverse(); if bytes == typ { // Big endian Ok(false) } else { Err(GcovReaderError::Str(format!( "Unexpected file type: {} in {}.", std::str::from_utf8(bytes).unwrap(), stem ))) } } } else { Err(GcovReaderError::Str(format!( "Not enough data in buffer: Cannot compare types in {}", stem ))) } } fn read(&mut self, typ: FileType, buf: Vec, stem: &str) -> Result<(), GcovReaderError> { let little_endian = Self::guess_endianness( match typ { FileType::Gcno => *b"oncg", _ => *b"adcg", }, &buf, stem, )?; if little_endian { match typ { FileType::Gcno => self.read_gcno(GcovReaderBuf::::new(stem, buf)), _ => self.read_gcda(GcovReaderBuf::::new(stem, buf)), } } else { match typ { FileType::Gcno => self.read_gcno(GcovReaderBuf::::new(stem, buf)), _ => self.read_gcda(GcovReaderBuf::::new(stem, buf)), } } } pub fn compute( stem: &str, gcno_buf: Vec, gcda_bufs: Vec>, branch_enabled: bool, ) -> Result, GcovReaderError> { let mut gcno = Self::new(); gcno.read(FileType::Gcno, gcno_buf, stem)?; for gcda_buf in gcda_bufs.into_iter() { gcno.read(FileType::Gcda, gcda_buf, stem)?; } gcno.stop(); Ok(gcno.finalize(branch_enabled)) } pub fn stop(&mut self) { for fun in self.functions.iter_mut() { fun.count_on_tree(self.version); } } pub fn read_gcno>( &mut self, mut reader: T, ) -> Result<(), GcovReaderError> { self.version = reader.read_version()?; self.checksum = reader.read_u32()?; if self.version >= 90 { self.cwd = Some(reader.read_string()?); } if self.version >= 80 { // hasUnexecutedBlocks reader.skip_u32()?; } self.read_functions(&mut reader) } fn read_edges + Sized>( fun: &mut GcovFunction, count: u32, reader: &mut T, ) -> Result<(), GcovReaderError> { let edges = &mut fun.edges; let blocks = &mut fun.blocks; let count = ((count - 1) / 2) as usize; let block_no = reader.read_u32()? as usize; if block_no <= blocks.len() { blocks[block_no].destination.reserve(count); for _ in 0..count { let dst_block_no = reader.read_u32()? as usize; let flags = reader.read_u32()?; let edges_count = edges.len(); edges.push(GcovEdge { source: block_no, destination: dst_block_no, flags, counter: 0, cycles: 0, }); let i = match blocks[block_no] .destination .binary_search_by(|x| edges[*x].destination.cmp(&dst_block_no)) { Ok(i) => i, Err(i) => i, }; blocks[block_no].destination.insert(i, edges_count); blocks[dst_block_no].source.push(edges_count); if (flags & GCOV_ARC_ON_TREE) == 0 { fun.real_edge_count += 1; } } } else { return Err(GcovReaderError::Str(format!( "Unexpected block number: {} (in {}) in {}", block_no, fun.name, reader.get_stem() ))); } Ok(()) } fn read_lines + Sized>( fun: &mut GcovFunction, version: u32, reader: &mut T, ) -> Result<(), GcovReaderError> { let block_no = reader.read_u32()? as usize; let mut must_take = true; if block_no <= fun.blocks.len() { let block = &mut fun.blocks[block_no]; let lines = &mut block.lines; loop { let line = reader.read_u32()?; if line != 0 { if !must_take || (version >= 80 && (line < fun.start_line || line > fun.end_line)) { continue; } lines.push(line); if line > block.line_max { block.line_max = line; } } else { let filename = reader.read_string()?; if filename.is_empty() { break; } must_take = filename == fun.file_name; // some lines in the block can come from an other file // TODO } } } else { return Err(GcovReaderError::Str(format!( "Unexpected block number: {} (in {}).", block_no, fun.name ))); } Ok(()) } fn read_blocks + Sized>( fun: &mut GcovFunction, length: u32, version: u32, reader: &mut T, ) -> Result<(), GcovReaderError> { if version < 80 { let length = length as usize; for no in 0..length { // flags, currently unused reader.skip_u32()?; fun.blocks.push(GcovBlock::new(no)); } } else { let length = reader.read_u32()? as usize; for no in 0..length { fun.blocks.push(GcovBlock::new(no)); } } Ok(()) } fn read_functions + Sized>( &mut self, reader: &mut T, ) -> Result<(), GcovReaderError> { while let Ok(tag) = reader.read_u32() { if tag == 0 { break; } let length = reader.read_u32()?; if tag == GCOV_TAG_FUNCTION { let identifier = reader.read_u32()?; let line_checksum = reader.read_u32()?; let cfg_checksum = if self.version >= 47 { reader.read_u32()? } else { 0 }; let name = reader.read_string()?; let (artificial, file_name, start_line, start_column, end_line, end_column) = if self.version < 80 { (0, reader.read_string()?, reader.read_u32()?, 0, 0, 0) } else { ( reader.read_u32()?, reader.read_string()?, reader.read_u32()?, reader.read_u32()?, reader.read_u32()?, if self.version >= 90 { reader.read_u32()? } else { 0 }, ) }; let pos = self.functions.len(); self.functions.push(GcovFunction { identifier, start_line, start_column, end_line, end_column, artificial, line_checksum, cfg_checksum, file_name, name, blocks: SmallVec::new(), edges: SmallVec::new(), real_edge_count: 0, lines: FxHashMap::default(), executed: false, }); self.ident_to_fun.insert(identifier, pos); } else if tag == GCOV_TAG_BLOCKS { let fun = if let Some(fun) = self.functions.last_mut() { fun } else { continue; }; Gcno::read_blocks(fun, length, self.version, reader)?; } else if tag == GCOV_TAG_ARCS { let fun = if let Some(fun) = self.functions.last_mut() { fun } else { continue; }; Gcno::read_edges(fun, length, reader)?; } else if tag == GCOV_TAG_LINES { let fun = if let Some(fun) = self.functions.last_mut() { fun } else { continue; }; Gcno::read_lines(fun, self.version, reader)?; } } Ok(()) } pub fn read_gcda>( &mut self, mut reader: T, ) -> Result<(), GcovReaderError> { let version = reader.read_version()?; if version != self.version { Err(GcovReaderError::Str(format!( "GCOV versions do not match in {}", reader.get_stem() ))) } else { let checksum = reader.read_u32()?; if checksum != self.checksum { Err(GcovReaderError::Str(format!( "File checksums do not match: {} != {} in {}", self.checksum, checksum, reader.get_stem() ))) } else { let mut current_fun_id: Option = None; while let Ok(tag) = reader.read_u32() { if tag == 0 { break; } let length = reader.read_u32()?; let mut pos = reader.get_pos(); if tag == GCOV_TAG_FUNCTION { if length == 0 { continue; } if length == 1 { return Err(GcovReaderError::Str(format!( "Invalid header length in {}", reader.get_stem() ))); } let id = reader.read_u32()?; let line_sum = reader.read_u32()?; let cfg_sum = if version >= 47 { reader.read_u32()? } else { 0 }; if let Some(fun_id) = self.ident_to_fun.get(&id) { let fun = &self.functions[*fun_id]; if line_sum != fun.line_checksum || cfg_sum != fun.cfg_checksum { return Err(GcovReaderError::Str(format!( "Checksum mismatch ({}, {}) != ({}, {}) in {}", line_sum, fun.line_checksum, cfg_sum, fun.cfg_checksum, reader.get_stem() ))); } current_fun_id = Some(*fun_id); } else { return Err(GcovReaderError::Str(format!( "Invalid function identifier {} in {}", id, reader.get_stem() ))); } } else if tag == GCOV_TAG_COUNTER_ARCS { let fun = if let Some(fun_id) = ¤t_fun_id { &mut self.functions[*fun_id] } else { continue; }; let count = length; let edges = &mut fun.edges; if fun.real_edge_count as u32 != count / 2 { return Err(GcovReaderError::Str(format!( "Unexpected number of edges (in {}) in {}", fun.name, reader.get_stem() ))); } for edge in edges.iter_mut() { if edge.is_on_tree() { continue; } let counter = reader.read_counter()?; edge.counter += counter; fun.blocks[edge.source].counter += counter; } } else if tag == GCOV_TAG_OBJECT_SUMMARY { let runcounts = reader.read_u32()?; reader.skip_u32()?; self.runcounts += if length == 9 { reader.read_u32()? } else { runcounts }; } else if tag == GCOV_TAG_PROGRAM_SUMMARY { if length > 0 { reader.skip_u32()?; reader.skip_u32()?; self.runcounts += reader.read_u32()?; } self.programcounts += 1; } pos += 4 * (length as usize); reader.skip(pos - reader.get_pos())?; } Ok(()) } } } fn collect_lines(&self) -> FxHashMap<&str, FxHashMap> { let mut results: FxHashMap<&str, FxHashMap> = FxHashMap::default(); for function in &self.functions { let lines = match results.entry(&function.file_name) { hash_map::Entry::Occupied(l) => l.into_mut(), hash_map::Entry::Vacant(p) => p.insert(FxHashMap::default()), }; for (line, counter) in &function.lines { match lines.entry(*line) { hash_map::Entry::Occupied(c) => { *c.into_mut() += *counter; } hash_map::Entry::Vacant(p) => { p.insert(*counter); } } } } results } pub fn dump( &mut self, path: &Path, file_name: &str, writer: &mut dyn Write, ) -> Result<(), GcovReaderError> { let file = File::open(path)?; let mut reader = BufReader::new(file); let mut source = String::new(); for fun in &mut self.functions { fun.add_line_count(); } let counters = self.collect_lines(); let counters = &counters[file_name]; reader.read_to_string(&mut source)?; let stem = PathBuf::from(file_name); let stem = stem.file_stem().unwrap().to_str().unwrap(); let mut n: u32 = 0; let has_runs = self.runcounts != 0; writeln!(writer, "{:>9}:{:>5}:Source:{}", "-", 0, file_name)?; writeln!(writer, "{:>9}:{:>5}:Graph:{}.gcno", "-", 0, stem)?; if has_runs { writeln!(writer, "{:>9}:{:>5}:Data:{}.gcda", "-", 0, stem)?; } else { writeln!(writer, "{:>9}:{:>5}:Data:-", "-", 0)?; } writeln!(writer, "{:>9}:{:>5}:Runs:{}", "-", 0, self.runcounts)?; writeln!( writer, "{:>9}:{:>5}:Programs:{}", "-", 0, i32::from(has_runs) )?; let mut iter = source.split('\n').peekable(); while let Some(line) = iter.next() { if iter.peek().is_none() && line.is_empty() { // We're on the last line and it's empty break; } n += 1; if let Some(counter) = counters.get(&n) { if *counter == 0 { writeln!(writer, "{:>9}:{:>5}:{}", "#####", n, line)?; } else { writeln!(writer, "{:>9}:{:>5}:{}", *counter, n, line)?; } } else { writeln!(writer, "{:>9}:{:>5}:{}", "-", n, line)?; } } Ok(()) } pub fn finalize(&mut self, branch_enabled: bool) -> Vec<(String, CovResult)> { let mut results: FxHashMap<&str, CovResult> = FxHashMap::default(); for fun in &mut self.functions { fun.add_line_count(); let res = match results.entry(&fun.file_name) { hash_map::Entry::Occupied(r) => r.into_mut(), hash_map::Entry::Vacant(p) => p.insert(CovResult { lines: BTreeMap::new(), branches: BTreeMap::new(), functions: FxHashMap::default(), }), }; res.functions.insert( fun.name.clone(), Function { start: fun.start_line, executed: fun.executed, }, ); if fun.executed { for (line, counter) in fun.lines.iter() { match res.lines.entry(*line) { btree_map::Entry::Occupied(c) => { *c.into_mut() += *counter; } btree_map::Entry::Vacant(p) => { p.insert(*counter); } } } } else { for line in fun.lines.keys() { res.lines.entry(*line).or_insert(0); } } if branch_enabled { for block in &fun.blocks { let line = if block.lines.is_empty() { let mut line_max = 0; for edge_no in block.source.iter() { let source = &fun.blocks[fun.edges[*edge_no].source]; line_max = line_max.max(source.line_max); } line_max } else { block.line_max }; if line == 0 { continue; } let taken: Vec<_> = block .destination .iter() .filter_map(|no| { let edge = &fun.edges[*no]; if edge.is_fake() { None } else { Some(fun.executed && edge.counter > 0) } }) .collect(); if taken.len() <= 1 { continue; } match res.branches.entry(line) { btree_map::Entry::Occupied(c) => { let v = c.into_mut(); v.extend_from_slice(&taken); } btree_map::Entry::Vacant(p) => { p.insert(taken); } } } } } let mut r = Vec::with_capacity(results.len()); for (k, v) in results.drain() { r.push((k.to_string(), v)); } r } } impl GcovFunction { fn get_cycle_count(edges: &mut [GcovEdge], path: &[usize]) -> u64 { let mut count = u64::MAX; for e in path.iter() { count = cmp::min(count, edges[*e].cycles); } for e in path { edges[*e].cycles -= count; } count } fn unblock( block: usize, blocked: &mut SmallVec<[usize; 4]>, block_lists: &mut SmallVec<[SmallVec<[usize; 2]>; 2]>, ) { if let Some(i) = blocked.iter().position(|x| *x == block) { blocked.remove(i); for b in block_lists.remove(i) { GcovFunction::unblock(b, blocked, block_lists); } } } fn look_for_circuit( fun_edges: &mut [GcovEdge], fun_blocks: &[GcovBlock], v: usize, start: usize, path: &mut SmallVec<[usize; 4]>, blocked: &mut SmallVec<[usize; 4]>, block_lists: &mut SmallVec<[SmallVec<[usize; 2]>; 2]>, blocks: &[usize], ) -> (bool, u64) { let mut count = 0; blocked.push(v); block_lists.push(SmallVec::new()); let mut found = false; let dsts = &fun_blocks[v].destination; for e in dsts { let w = fun_edges[*e].destination; if w >= start && blocks.contains(&w) { path.push(*e); if w == start { count += GcovFunction::get_cycle_count(fun_edges, path); found = true; } else if blocked.iter().all(|x| *x != w) { let (f, c) = GcovFunction::look_for_circuit( fun_edges, fun_blocks, w, start, path, blocked, block_lists, blocks, ); count += c; if f { found = true; } } path.pop(); } } if found { GcovFunction::unblock(v, blocked, block_lists); } else { for e in dsts { let w = fun_edges[*e].destination; if w >= start || blocks.contains(&w) { if let Some(i) = blocked.iter().position(|x| *x == w) { let list = &mut block_lists[i]; if list.iter().all(|x| *x != v) { list.push(v); } } } } } (found, count) } fn get_cycles_count( fun_edges: &mut [GcovEdge], fun_blocks: &[GcovBlock], blocks: &[usize], ) -> u64 { let mut count: u64 = 0; let mut path: SmallVec<[usize; 4]> = SmallVec::new(); let mut blocked: SmallVec<[usize; 4]> = SmallVec::new(); let mut block_lists: SmallVec<[SmallVec<[usize; 2]>; 2]> = SmallVec::new(); for b in blocks { path.clear(); blocked.clear(); block_lists.clear(); let (_, c) = GcovFunction::look_for_circuit( fun_edges, fun_blocks, *b, *b, &mut path, &mut blocked, &mut block_lists, blocks, ); count += c; } count } fn get_line_count( fun_edges: &mut [GcovEdge], fun_blocks: &[GcovBlock], blocks: &[usize], ) -> u64 { let mut count: u64 = 0; for b in blocks { let block = &fun_blocks[*b]; if block.no == 0 { count = block .destination .iter() .fold(count, |acc, e| acc + fun_edges[*e].counter); } else { for e in &block.source { let e = &fun_edges[*e]; let w = e.source; if !blocks.contains(&w) { count += e.counter; } } } for e in &block.destination { let e = &mut fun_edges[*e]; e.cycles = e.counter; } } count + GcovFunction::get_cycles_count(fun_edges, fun_blocks, blocks) } fn count_on_tree(&mut self, version: u32) { if self.blocks.len() >= 2 { let src_no = 0; let sink_no = if version < 48 { self.blocks.len() - 1 } else { 1 }; let edges_count = self.edges.len(); self.edges.push(GcovEdge { source: sink_no, destination: src_no, flags: GCOV_ARC_ON_TREE, counter: 0, cycles: 0, }); let i = match self.blocks[sink_no] .destination .binary_search_by(|x| self.edges[*x].destination.cmp(&src_no)) { Ok(i) => i, Err(i) => i, }; self.blocks[sink_no].destination.insert(i, edges_count); self.blocks[src_no].source.push(edges_count); let mut visited = FxHashSet::default(); for block_no in 0..self.blocks.len() { Self::propagate_counts(&self.blocks, &mut self.edges, block_no, None, &mut visited); } for edge in self.edges.iter().rev() { if edge.is_on_tree() { self.blocks[edge.source].counter += edge.counter; } } } } fn add_line_count(&mut self) { self.executed = self.edges.first().unwrap().counter > 0; if self.executed { let mut lines_to_block: FxHashMap> = FxHashMap::default(); for block in &self.blocks { let n = block.no; for line in &block.lines { match lines_to_block.entry(*line) { hash_map::Entry::Occupied(vec) => { vec.into_mut().push(n); } hash_map::Entry::Vacant(v) => { v.insert(vec![n]); } } } } self.lines.reserve(lines_to_block.len()); for (line, blocks) in lines_to_block { let count = if blocks.len() == 1 { self.blocks[blocks[0]].counter } else { GcovFunction::get_line_count(&mut self.edges, &self.blocks, &blocks) }; self.lines.insert(line, count); } } else { for block in &self.blocks { for line in &block.lines { self.lines.entry(*line).or_insert(0); } } } } fn propagate_counts( blocks: &SmallVec<[GcovBlock; 16]>, edges: &mut SmallVec<[GcovEdge; 16]>, block_no: usize, pred_arc: Option, visited: &mut FxHashSet, ) -> u64 { // For each basic block, the sum of incoming edge counts equals the sum of // outgoing edge counts by Kirchoff's circuit law. If the unmeasured arcs form a // spanning tree, the count for each unmeasured arc (GCOV_ARC_ON_TREE) can be // uniquely identified. // Prevent infinite recursion if !visited.insert(block_no) { return 0; } let mut positive_excess = 0; let mut negative_excess = 0; let block = &blocks[block_no]; for edge_id in block.source.iter() { if pred_arc != Some(*edge_id) { let edge = &edges[*edge_id]; positive_excess += if edge.is_on_tree() { let source = edge.source; Self::propagate_counts(blocks, edges, source, Some(*edge_id), visited) } else { edge.counter }; } } for edge_id in block.destination.iter() { if pred_arc != Some(*edge_id) { let edge = &edges[*edge_id]; negative_excess += if edge.is_on_tree() { let destination = edge.destination; Self::propagate_counts(blocks, edges, destination, Some(*edge_id), visited) } else { edge.counter }; } } let excess = if positive_excess >= negative_excess { positive_excess - negative_excess } else { negative_excess - positive_excess }; if let Some(id) = pred_arc { let edge = &mut edges[id]; edge.counter = excess; } excess } } #[cfg(test)] mod tests { use pretty_assertions::assert_eq; use super::*; use crate::defs::FunctionMap; fn from_path(gcno: &mut Gcno, typ: FileType, path: &str) { let path = PathBuf::from(path); let mut f = File::open(&path).unwrap(); let mut buf = Vec::new(); f.read_to_end(&mut buf).unwrap(); gcno.read(typ, buf, path.to_str().unwrap()).unwrap(); } fn get_input_string(path: &str) -> String { let path = PathBuf::from(path); let mut f = File::open(path).unwrap(); let mut input = String::new(); f.read_to_string(&mut input).unwrap(); input } fn get_input_vec(path: &str) -> Vec { let path = PathBuf::from(path); let mut f = File::open(path).unwrap(); let mut input = Vec::new(); f.read_to_end(&mut input).unwrap(); input } #[test] fn test_reader_gcno() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/reader.gcno"); let output = format!("{:?}", gcno); let input = get_input_string("test/llvm/reader.gcno.0.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/reader.gcno"); from_path(&mut gcno, FileType::Gcda, "test/llvm/reader.gcda"); gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/llvm/reader.gcno.1.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcc6() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/reader_gcc-6.gcno"); from_path(&mut gcno, FileType::Gcda, "test/reader_gcc-6.gcda"); gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/reader_gcc-6.gcno.1.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcc7() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/reader_gcc-7.gcno"); from_path(&mut gcno, FileType::Gcda, "test/reader_gcc-7.gcda"); gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/reader_gcc-7.gcno.1.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcc8() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/reader_gcc-8.gcno"); from_path(&mut gcno, FileType::Gcda, "test/reader_gcc-8.gcda"); gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/reader_gcc-8.gcno.1.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcc9() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/reader_gcc-9.gcno"); from_path(&mut gcno, FileType::Gcda, "test/reader_gcc-9.gcda"); gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/reader_gcc-9.gcno.1.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcc10() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/reader_gcc-10.gcno"); from_path(&mut gcno, FileType::Gcda, "test/reader_gcc-10.gcda"); gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/reader_gcc-10.gcno.1.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcda() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/reader.gcno"); for _ in 0..2 { from_path(&mut gcno, FileType::Gcda, "test/llvm/reader.gcda"); } gcno.stop(); let output = format!("{:?}", gcno); let input = get_input_string("test/llvm/reader.gcno.2.dump"); assert_eq!(output, input); } #[test] fn test_reader_gcno_counter() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/reader.gcno"); gcno.stop(); let mut output = Vec::new(); gcno.dump( &PathBuf::from("test/llvm/reader.c"), "reader.c", &mut output, ) .unwrap(); let input = get_input_vec("test/llvm/reader.c.0.gcov"); assert!(input == output); } #[test] fn test_reader_gcno_gcda_counter() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/reader.gcno"); from_path(&mut gcno, FileType::Gcda, "test/llvm/reader.gcda"); gcno.stop(); let mut output = Vec::new(); gcno.dump( &PathBuf::from("test/llvm/reader.c"), "reader.c", &mut output, ) .unwrap(); let input = get_input_vec("test/llvm/reader.c.1.gcov"); assert_eq!(output, input); } #[test] fn test_reader_gcno_gcda_gcda_counter() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/reader.gcno"); for _ in 0..2 { from_path(&mut gcno, FileType::Gcda, "test/llvm/reader.gcda"); } gcno.stop(); let mut output = Vec::new(); gcno.dump( &PathBuf::from("test/llvm/reader.c"), "reader.c", &mut output, ) .unwrap(); let input = get_input_vec("test/llvm/reader.c.2.gcov"); assert_eq!(output, input); } #[test] fn test_reader_finalize_file() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/file.gcno"); from_path(&mut gcno, FileType::Gcda, "test/llvm/file.gcda"); gcno.stop(); let result = gcno.finalize(true); let mut lines: BTreeMap = BTreeMap::new(); lines.insert(2, 1); let mut functions: FunctionMap = FxHashMap::default(); functions.insert( String::from("main"), Function { start: 1, executed: true, }, ); let branches: BTreeMap> = BTreeMap::new(); let expected = vec![( String::from("file.c"), CovResult { lines, branches, functions, }, )]; assert_eq!(result, expected); } #[test] fn test_reader_finalize_file_branch() { let mut gcno = Gcno::new(); from_path(&mut gcno, FileType::Gcno, "test/llvm/file_branch.gcno"); from_path(&mut gcno, FileType::Gcda, "test/llvm/file_branch.gcda"); gcno.stop(); let result = gcno.finalize(true); let mut lines: BTreeMap = BTreeMap::new(); [ (2, 2), (3, 1), (4, 1), (5, 1), (6, 1), (8, 1), (10, 2), (13, 1), (14, 0), (16, 1), (18, 1), (21, 0), (22, 0), (24, 0), (25, 0), (26, 0), (28, 0), (32, 1), ] .iter() .for_each(|x| { lines.insert(x.0, x.1); }); let mut functions: FunctionMap = FxHashMap::default(); functions.insert( String::from("foo"), Function { start: 1, executed: true, }, ); functions.insert( String::from("bar"), Function { start: 12, executed: true, }, ); functions.insert( String::from("oof"), Function { start: 20, executed: false, }, ); functions.insert( String::from("main"), Function { start: 31, executed: true, }, ); let mut branches: BTreeMap> = BTreeMap::new(); [ (2, vec![true, true]), (3, vec![true, false]), (13, vec![false, true]), (21, vec![false, false, false, false]), ] .iter() .for_each(|x| { branches.insert(x.0, x.1.clone()); }); let expected = vec![( String::from("file_branch.c"), CovResult { lines, branches, functions, }, )]; assert_eq!(result, expected); } } grcov-0.8.22/src/symlink.rs000064400000000000000000000024121046102023000136570ustar 00000000000000#[cfg(windows)] use std::path::Path; #[cfg(unix)] pub(crate) use std::os::unix::fs::symlink as symlink_file; /// Creates a symbolic link to a file. /// /// On Windows, creating a symbolic link to a file requires Administrator /// rights. Fall back to copying if creating the symbolic link fails. #[cfg(windows)] pub(crate) fn symlink_file, Q: AsRef>( original: P, link: Q, ) -> std::io::Result<()> { use std::sync::atomic::{AtomicBool, Ordering::SeqCst}; static HAVE_PRINTED_WARNING: AtomicBool = AtomicBool::new(false); std::os::windows::fs::symlink_file(&original, &link) .or_else(|_| { let _len: u64 = std::fs::copy(&original, &link)?; // Print a warning about symbolic links, but only once per grcov run. if HAVE_PRINTED_WARNING.compare_exchange(false, true, SeqCst, SeqCst).is_ok() { eprintln!( "Failed to create a symlink, but successfully copied file (as fallback).\n\ This is less efficient. You can enable symlinks without elevating to Administrator.\n\ See instructions at https://github.com/mozilla/grcov/blob/master/README.md#enabling-symlinks-on-windows"); } Ok(()) } ) } grcov-0.8.22/src/templates/badges/flat.svg000064400000000000000000000040051046102023000165150ustar 00000000000000{# Original badge design by https://github.com/badges/shields under the CC0-1.0 license. #} {%- if current >= 100 -%} {%- set width = 104 -%} {%- set position = 815 -%} {%- set text_length = 330 -%} {%- elif current >= 10 -%} {%- set width = 96 -%} {%- set position = 775 -%} {%- set text_length = 250 -%} {%- else -%} {%- set width = 90 -%} {%- set position = 745 -%} {%- set text_length = 190 -%} {%- endif -%} {%- if current >= hi_limit -%} {%- set color = "#97ca00" -%} {%- elif current >= med_limit -%} {%- set color = "#dfb317" -%} {%- else -%} {%- set color = "#e05d44" -%} {%- endif -%} coverage: {{current}}% coverage {{current}}% grcov-0.8.22/src/templates/badges/flat_square.svg000064400000000000000000000026201046102023000200760ustar 00000000000000{# Original badge design by https://github.com/badges/shields under the CC0-1.0 license. #} {%- if current >= 100 -%} {%- set width = 104 -%} {%- set position = 815 -%} {%- set text_length = 330 -%} {%- elif current >= 10 -%} {%- set width = 96 -%} {%- set position = 775 -%} {%- set text_length = 250 -%} {%- else -%} {%- set width = 90 -%} {%- set position = 745 -%} {%- set text_length = 190 -%} {%- endif -%} {%- if current >= hi_limit -%} {%- set color = "#97ca00" -%} {%- elif current >= med_limit -%} {%- set color = "#dfb317" -%} {%- else -%} {%- set color = "#e05d44" -%} {%- endif -%} coverage: {{current}}% coverage {{current}}% grcov-0.8.22/src/templates/badges/for_the_badge.svg000064400000000000000000000026541046102023000203470ustar 00000000000000{# Original badge design by https://github.com/badges/shields under the CC0-1.0 license. #} {%- if current >= 100 -%} {%- set width = 152 -%} {%- set position = 1215 -%} {%- set text_length = 370 -%} {%- elif current >= 10 -%} {%- set width = 142.5 -%} {%- set position = 1167.5 -%} {%- set text_length = 275 -%} {%- else -%} {%- set width = 133 -%} {%- set position = 1120 -%} {%- set text_length = 180 -%} {%- endif -%} {%- if current >= hi_limit -%} {%- set color = "#97ca00" -%} {%- elif current >= med_limit -%} {%- set color = "#dfb317" -%} {%- else -%} {%- set color = "#e05d44" -%} {%- endif -%} COVERAGE: {{current}}% COVERAGE {{current}}% grcov-0.8.22/src/templates/badges/plastic.svg000064400000000000000000000042271046102023000172340ustar 00000000000000{# Original badge design by https://github.com/badges/shields under the CC0-1.0 license. #} {%- if current >= 100 -%} {%- set width = 104 -%} {%- set position = 815 -%} {%- set text_length = 330 -%} {%- elif current >= 10 -%} {%- set width = 96 -%} {%- set position = 775 -%} {%- set text_length = 250 -%} {%- else -%} {%- set width = 90 -%} {%- set position = 745 -%} {%- set text_length = 190 -%} {%- endif -%} {%- if current >= hi_limit -%} {%- set color = "#97ca00" -%} {%- elif current >= med_limit -%} {%- set color = "#dfb317" -%} {%- else -%} {%- set color = "#e05d44" -%} {%- endif -%} coverage: {{current}}% coverage {{current}}% grcov-0.8.22/src/templates/badges/social.svg000064400000000000000000000043331046102023000170450ustar 00000000000000{# Original badge design by https://github.com/badges/shields under the CC0-1.0 license. #} {%- if current >= 100 -%} {%- set width = 105 -%} {%- set position = 855 -%} {%- set text_length = 290 -%} {%- elif current >= 10 -%} {%- set width = 99 -%} {%- set position = 825 -%} {%- set text_length = 230 -%} {%- else -%} {%- set width = 93 -%} {%- set position = 795 -%} {%- set text_length = 170 -%} {%- endif -%} Coverage: {{current}}% grcov-0.8.22/src/templates/base.html000064400000000000000000000013461046102023000154260ustar 00000000000000 {%- block head -%} {% block title %}{% endblock title %} {%- endblock head -%}
{%- block content -%}{%- endblock content -%}

Date: {{ date | date(format="%Y-%m-%d %H:%M") }}

grcov-0.8.22/src/templates/file.html000064400000000000000000000033531046102023000154330ustar 00000000000000{% import "macros.html" as macros %} {% extends "base.html" %} {% block title %}Grcov report - {{ current }} {% endblock title %} {% block content -%} {{ macros::summary(parents=parents, stats=stats, precision=precision) }}
{%- for item in items -%} {%- if item.1 > 0 -%} {%- set highlight = "success" -%} {%- set highlight_light = "success-light" -%} {%- set count = item.1 -%} {%- set aria_label = count -%} {%- elif item.1 < 0 -%} {% set highlight = "white" -%} {% set highlight_light = "white" -%} {% set count = "" -%} {%- set aria_label = "no coverage" -%} {%- else -%} {%- set highlight = "danger" -%} {%- set highlight_light = "danger-light" -%} {%- set count = "" -%} {%- set aria_label = "0" -%} {%- endif -%}
{{ count }}
{{ item.2 }}
{%- endfor -%}
{% endblock content -%} grcov-0.8.22/src/templates/index.html000064400000000000000000000023101046102023000156130ustar 00000000000000{% import "macros.html" as macros %} {% extends "base.html" %} {% block title %}Grcov report - {{ current }} {% endblock title %} {%- block content -%} {{ macros::summary(parents=parents, stats=stats, precision=precision) }} {% if branch_enabled %} {% endif %} {%- if kind == "Directory" -%} {%- for item, info in items -%} {{ macros::stats_line(name=item, url=item~"/index.html", stats=info.stats, precision=precision) }} {%- endfor -%} {%- else -%} {%- for item, info in items -%} {{ macros::stats_line(name=item, url=item~".html", stats=info.stats, precision=precision) }} {%- endfor -%} {%- endif -%}
{{ kind }} Line Coverage FunctionsBranches
{%- endblock content -%} grcov-0.8.22/src/templates/macros.html000064400000000000000000000061351046102023000160010ustar 00000000000000{% macro summary_line(kind, covered, total, precision) %} {%- set per = percent(num=covered, den=total) -%}

{{ kind | capitalize }}

{{ per | round(precision=precision) }} %

{% endmacro -%} {% macro summary(parents, stats, precision) %} {% endmacro %} {% macro stats_line(name, url, stats, precision) %} {%- set lines_per = percent(num=stats.covered_lines, den=stats.total_lines) -%} {%- set lines_sev = lines_per | severity(kind="lines") -%} {%- set functions_per = percent(num=stats.covered_funs, den=stats.total_funs) -%} {%- set functions_sev = functions_per | severity(kind="functions") -%} {% if branch_enabled %} {%- set branches_per = percent(num=stats.covered_branches, den=stats.total_branches) -%} {%- set branches_sev = branches_per | severity(kind="branches") -%} {% endif %} {{ name }} {{ lines_per | round(precision=precision) }}% {{ lines_per | round(precision=precision) }}% {{ stats.covered_lines }} / {{ stats.total_lines }} {{ functions_per | round(precision=precision) }}% {{ stats.covered_funs }} / {{ stats.total_funs }} {% if branch_enabled %} {{ branches_per | round(precision=precision) }}% {{ stats.covered_branches }} / {{ stats.total_branches }} {% endif %} {% endmacro %}