cargo-test-support-0.3.0/.cargo_vcs_info.json0000644000000001670000000000100146220ustar { "git": { "sha1": "2dbb1af80a2914475ba76827a312e29cedfa6b2f" }, "path_in_vcs": "crates/cargo-test-support" }cargo-test-support-0.3.0/Cargo.toml0000644000000047170000000000100126250ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.79" name = "cargo-test-support" version = "0.3.0" build = "build.rs" autobins = false autoexamples = false autotests = false autobenches = false description = "Testing framework for Cargo's testsuite." homepage = "https://github.com/rust-lang/cargo" readme = "README.md" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" [lib] name = "cargo_test_support" path = "src/lib.rs" doctest = false [dependencies.anstream] version = "0.6.13" [dependencies.anstyle] version = "1.0.6" [dependencies.anyhow] version = "1.0.82" [dependencies.cargo-test-macro] version = "0.3.0" [dependencies.cargo-util] version = "0.2.14" [dependencies.crates-io] version = "0.40.4" [dependencies.filetime] version = "0.2.23" [dependencies.flate2] version = "1.0.30" features = ["zlib"] default-features = false [dependencies.git2] version = "0.19.0" [dependencies.glob] version = "0.3.1" [dependencies.itertools] version = "0.13.0" [dependencies.pasetors] version = "0.6.8" features = [ "v3", "paserk", "std", "serde", ] [dependencies.regex] version = "1.10.4" [dependencies.serde] version = "1.0.199" features = ["derive"] [dependencies.serde_json] version = "1.0.116" [dependencies.snapbox] version = "0.6.9" features = [ "diff", "dir", "term-svg", "regex", "json", ] [dependencies.tar] version = "0.4.40" default-features = false [dependencies.time] version = "0.3.36" features = [ "parsing", "formatting", "serde", ] [dependencies.toml] version = "0.8.14" [dependencies.url] version = "2.5.0" [dependencies.walkdir] version = "2.5.0" [target."cfg(windows)".dependencies.windows-sys] version = "0.52" features = ["Win32_Storage_FileSystem"] [lints.clippy] dbg_macro = "warn" disallowed_methods = "warn" print_stderr = "warn" print_stdout = "warn" self_named_module_files = "warn" [lints.clippy.all] level = "allow" priority = -1 [lints.rust] rust_2018_idioms = "warn" [lints.rustdoc] private_intra_doc_links = "allow" cargo-test-support-0.3.0/Cargo.toml.orig000064400000000000000000000017221046102023000162770ustar 00000000000000[package] name = "cargo-test-support" version = "0.3.0" edition.workspace = true rust-version = "1.79" # MSRV:1 license.workspace = true homepage.workspace = true repository.workspace = true description = "Testing framework for Cargo's testsuite." [lib] doctest = false [dependencies] anstream.workspace = true anstyle.workspace = true anyhow.workspace = true cargo-test-macro.workspace = true cargo-util.workspace = true crates-io.workspace = true filetime.workspace = true flate2.workspace = true git2.workspace = true glob.workspace = true itertools.workspace = true pasetors.workspace = true regex.workspace = true serde = { workspace = true, features = ["derive"] } serde_json.workspace = true snapbox.workspace = true tar.workspace = true time.workspace = true toml.workspace = true url.workspace = true walkdir.workspace = true [target.'cfg(windows)'.dependencies] windows-sys = { workspace = true, features = ["Win32_Storage_FileSystem"] } [lints] workspace = true cargo-test-support-0.3.0/LICENSE-APACHE000064400000000000000000000251541046102023000153410ustar 00000000000000 Apache License Version 2.0, January 2004 https://www.apache.org/licenses/LICENSE-2.0 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cargo-test-support-0.3.0/LICENSE-MIT000064400000000000000000000017771046102023000150560ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. cargo-test-support-0.3.0/README.md000064400000000000000000000003761046102023000146730ustar 00000000000000WARNING: You might not want to use this outside of Cargo. * This is designed for testing Cargo itself. Use at your own risk. * No guarantee on any stability across versions. * No feature request would be accepted unless proved useful for testing Cargo. cargo-test-support-0.3.0/build.rs000064400000000000000000000004251046102023000150540ustar 00000000000000#![allow(clippy::disallowed_methods)] fn main() { println!("cargo:rustc-check-cfg=cfg(emulate_second_only_system)"); println!( "cargo:rustc-env=NATIVE_ARCH={}", std::env::var("TARGET").unwrap() ); println!("cargo:rerun-if-changed=build.rs"); } cargo-test-support-0.3.0/containers/apache/Dockerfile000064400000000000000000000014131046102023000207650ustar 00000000000000FROM httpd:2.4-alpine RUN apk add --no-cache git git-daemon openssl COPY bar /repos/bar WORKDIR /repos/bar RUN git config --global user.email "testuser@example.com" &&\ git config --global user.name "Test User" &&\ git config --system --add safe.directory '*' &&\ git init -b master . &&\ git add Cargo.toml src &&\ git commit -m "Initial commit" &&\ cd .. &&\ git clone --bare bar bar.git &&\ rm -rf bar WORKDIR / EXPOSE 443 WORKDIR /usr/local/apache2/conf COPY httpd-cargo.conf . RUN cat httpd-cargo.conf >> httpd.conf RUN openssl req -x509 -nodes -days 3650 -newkey rsa:2048 \ -keyout server.key -out server.crt \ -subj "/emailAddress=webmaster@example.com/C=US/ST=California/L=San Francisco/O=Rust/OU=Cargo/CN=127.0.0.1" WORKDIR / cargo-test-support-0.3.0/containers/apache/httpd-cargo.conf000064400000000000000000000005461046102023000220640ustar 00000000000000SetEnv GIT_PROJECT_ROOT /repos SetEnv GIT_HTTP_EXPORT_ALL ScriptAlias /repos /usr/libexec/git-core/git-http-backend/ LoadModule cgid_module modules/mod_cgid.so Require all granted Include conf/extra/httpd-ssl.conf LoadModule ssl_module modules/mod_ssl.so LoadModule socache_shmcb_module modules/mod_socache_shmcb.so cargo-test-support-0.3.0/containers/sshd/Dockerfile000064400000000000000000000014551046102023000205130ustar 00000000000000FROM alpine:3.20 RUN apk add --no-cache openssh git RUN ssh-keygen -A RUN addgroup -S testuser && adduser -S testuser -G testuser -s /bin/ash # NOTE: Ideally the password should be set to *, but I am uncertain how to do # that in alpine. It shouldn't matter since PermitEmptyPasswords is "no". RUN passwd -u testuser RUN mkdir /repos && chown testuser /repos COPY --chown=testuser:testuser bar /repos/bar USER testuser WORKDIR /repos/bar RUN git config --global user.email "testuser@example.com" &&\ git config --global user.name "Test User" &&\ git init -b master . &&\ git add Cargo.toml src &&\ git commit -m "Initial commit" &&\ cd .. &&\ git clone --bare bar bar.git &&\ rm -rf bar WORKDIR / USER root EXPOSE 22 ENTRYPOINT ["/usr/sbin/sshd", "-D", "-E", "/var/log/auth.log"] cargo-test-support-0.3.0/src/compare.rs000064400000000000000000000747351046102023000162110ustar 00000000000000//! Routines for comparing and diffing output. //! //! # Patterns //! //! Many of these functions support special markup to assist with comparing //! text that may vary or is otherwise uninteresting for the test at hand. The //! supported patterns are: //! //! - `[..]` is a wildcard that matches 0 or more characters on the same line //! (similar to `.*` in a regex). It is non-greedy. //! - `[EXE]` optionally adds `.exe` on Windows (empty string on other //! platforms). //! - `[ROOT]` is the path to the test directory's root. //! - `[CWD]` is the working directory of the process that was run. //! - There is a wide range of substitutions (such as `[COMPILING]` or //! `[WARNING]`) to match cargo's "status" output and allows you to ignore //! the alignment. See the source of `substitute_macros` for a complete list //! of substitutions. //! - `[DIRTY-MSVC]` (only when the line starts with it) would be replaced by //! `[DIRTY]` when `cfg(target_env = "msvc")` or the line will be ignored otherwise. //! Tests that work around [issue 7358](https://github.com/rust-lang/cargo/issues/7358) //! can use this to avoid duplicating the `with_stderr` call like: //! `if cfg!(target_env = "msvc") {e.with_stderr("...[DIRTY]...");} else {e.with_stderr("...");}`. //! //! # Normalization //! //! In addition to the patterns described above, the strings are normalized //! in such a way to avoid unwanted differences. The normalizations are: //! //! - Raw tab characters are converted to the string ``. This is helpful //! so that raw tabs do not need to be written in the expected string, and //! to avoid confusion of tabs vs spaces. //! - Backslashes are converted to forward slashes to deal with Windows paths. //! This helps so that all tests can be written assuming forward slashes. //! Other heuristics are applied to try to ensure Windows-style paths aren't //! a problem. //! - Carriage returns are removed, which can help when running on Windows. use crate::cross_compile::try_alternate; use crate::paths; use crate::{diff, rustc_host}; use anyhow::{bail, Context, Result}; use serde_json::Value; use std::fmt; use std::path::Path; use std::str; use url::Url; /// This makes it easier to write regex replacements that are guaranteed to only /// get compiled once macro_rules! regex { ($re:literal $(,)?) => {{ static RE: std::sync::OnceLock = std::sync::OnceLock::new(); RE.get_or_init(|| regex::Regex::new($re).unwrap()) }}; } /// Assertion policy for UI tests /// /// This emphasizes showing as much content as possible at the cost of more brittleness /// /// # Snapshots /// /// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable: /// /// - `skip`: do not run the tests /// - `ignore`: run the tests but ignore their failure /// - `verify`: run the tests /// - `overwrite`: update the snapshots based on the output of the tests /// /// # Patterns /// /// - `[..]` is a character wildcard, stopping at line breaks /// - `\n...\n` is a multi-line wildcard /// - `[EXE]` matches the exe suffix for the current platform /// - `[ROOT]` matches [`paths::root()`][crate::paths::root] /// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL /// /// # Normalization /// /// In addition to the patterns described above, text is normalized /// in such a way to avoid unwanted differences. The normalizations are: /// /// - Backslashes are converted to forward slashes to deal with Windows paths. /// This helps so that all tests can be written assuming forward slashes. /// Other heuristics are applied to try to ensure Windows-style paths aren't /// a problem. /// - Carriage returns are removed, which can help when running on Windows. pub fn assert_ui() -> snapbox::Assert { let mut subs = snapbox::Redactions::new(); subs.extend(MIN_LITERAL_REDACTIONS.into_iter().cloned()) .unwrap(); add_test_support_redactions(&mut subs); add_regex_redactions(&mut subs); snapbox::Assert::new() .action_env(snapbox::assert::DEFAULT_ACTION_ENV) .redact_with(subs) } /// Assertion policy for functional end-to-end tests /// /// This emphasizes showing as much content as possible at the cost of more brittleness /// /// # Snapshots /// /// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable: /// /// - `skip`: do not run the tests /// - `ignore`: run the tests but ignore their failure /// - `verify`: run the tests /// - `overwrite`: update the snapshots based on the output of the tests /// /// # Patterns /// /// - `[..]` is a character wildcard, stopping at line breaks /// - `\n...\n` is a multi-line wildcard /// - `[EXE]` matches the exe suffix for the current platform /// - `[ROOT]` matches [`paths::root()`][crate::paths::root] /// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL /// /// # Normalization /// /// In addition to the patterns described above, text is normalized /// in such a way to avoid unwanted differences. The normalizations are: /// /// - Backslashes are converted to forward slashes to deal with Windows paths. /// This helps so that all tests can be written assuming forward slashes. /// Other heuristics are applied to try to ensure Windows-style paths aren't /// a problem. /// - Carriage returns are removed, which can help when running on Windows. pub fn assert_e2e() -> snapbox::Assert { let mut subs = snapbox::Redactions::new(); subs.extend(MIN_LITERAL_REDACTIONS.into_iter().cloned()) .unwrap(); subs.extend(E2E_LITERAL_REDACTIONS.into_iter().cloned()) .unwrap(); add_test_support_redactions(&mut subs); add_regex_redactions(&mut subs); snapbox::Assert::new() .action_env(snapbox::assert::DEFAULT_ACTION_ENV) .redact_with(subs) } fn add_test_support_redactions(subs: &mut snapbox::Redactions) { let root = paths::root(); // Use `from_file_path` instead of `from_dir_path` so the trailing slash is // put in the users output, rather than hidden in the variable let root_url = url::Url::from_file_path(&root).unwrap().to_string(); subs.insert("[ROOT]", root).unwrap(); subs.insert("[ROOTURL]", root_url).unwrap(); subs.insert("[HOST_TARGET]", rustc_host()).unwrap(); if let Some(alt_target) = try_alternate() { subs.insert("[ALT_TARGET]", alt_target).unwrap(); } } fn add_regex_redactions(subs: &mut snapbox::Redactions) { // For e2e tests subs.insert( "[ELAPSED]", regex!(r"\[FINISHED\].*in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), ) .unwrap(); // for UI tests subs.insert( "[ELAPSED]", regex!(r"Finished.*in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), ) .unwrap(); // output from libtest subs.insert( "[ELAPSED]", regex!(r"; finished in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), ) .unwrap(); subs.insert( "[FILE_NUM]", regex!(r"\[(REMOVED|SUMMARY)\] (?[0-9]+) files"), ) .unwrap(); subs.insert( "[FILE_SIZE]", regex!(r"(?[0-9]+(\.[0-9]+)?([a-zA-Z]i)?)B\s"), ) .unwrap(); subs.insert( "[HASH]", regex!(r"home/\.cargo/registry/src/-(?[a-z0-9]+)"), ) .unwrap(); subs.insert( "[HASH]", regex!(r"\.cargo/target/(?[0-9a-f]{2}/[0-9a-f]{14})"), ) .unwrap(); subs.insert("[HASH]", regex!(r"/[a-z0-9\-_]+-(?[0-9a-f]{16})")) .unwrap(); subs.insert( "[AVG_ELAPSED]", regex!(r"(?[0-9]+(\.[0-9]+)?) ns/iter"), ) .unwrap(); subs.insert( "[JITTER]", regex!(r"ns/iter \(\+/- (?[0-9]+(\.[0-9]+)?)\)"), ) .unwrap(); // Following 3 subs redact: // "1719325877.527949100s, 61549498ns after last build at 1719325877.466399602s" // "1719503592.218193216s, 1h 1s after last build at 1719499991.982681034s" // into "[DIRTY_REASON_NEW_TIME], [DIRTY_REASON_DIFF] after last build at [DIRTY_REASON_OLD_TIME]" subs.insert( "[TIME_DIFF_AFTER_LAST_BUILD]", regex!(r"(?[0-9]+(\.[0-9]+)?s, (\s?[0-9]+(\.[0-9]+)?(s|ns|h))+ after last build at [0-9]+(\.[0-9]+)?s)"), ) .unwrap(); } static MIN_LITERAL_REDACTIONS: &[(&str, &str)] = &[ ("[EXE]", std::env::consts::EXE_SUFFIX), ("[BROKEN_PIPE]", "Broken pipe (os error 32)"), ("[BROKEN_PIPE]", "The pipe is being closed. (os error 232)"), // Unix message for an entity was not found ("[NOT_FOUND]", "No such file or directory (os error 2)"), // Windows message for an entity was not found ( "[NOT_FOUND]", "The system cannot find the file specified. (os error 2)", ), ( "[NOT_FOUND]", "The system cannot find the path specified. (os error 3)", ), ("[NOT_FOUND]", "Access is denied. (os error 5)"), ("[NOT_FOUND]", "program not found"), // Unix message for exit status ("[EXIT_STATUS]", "exit status"), // Windows message for exit status ("[EXIT_STATUS]", "exit code"), ]; static E2E_LITERAL_REDACTIONS: &[(&str, &str)] = &[ ("[RUNNING]", " Running"), ("[COMPILING]", " Compiling"), ("[CHECKING]", " Checking"), ("[COMPLETED]", " Completed"), ("[CREATED]", " Created"), ("[CREATING]", " Creating"), ("[CREDENTIAL]", " Credential"), ("[DOWNGRADING]", " Downgrading"), ("[FINISHED]", " Finished"), ("[ERROR]", "error:"), ("[WARNING]", "warning:"), ("[NOTE]", "note:"), ("[HELP]", "help:"), ("[DOCUMENTING]", " Documenting"), ("[SCRAPING]", " Scraping"), ("[FRESH]", " Fresh"), ("[DIRTY]", " Dirty"), ("[LOCKING]", " Locking"), ("[UPDATING]", " Updating"), ("[UPGRADING]", " Upgrading"), ("[ADDING]", " Adding"), ("[REMOVING]", " Removing"), ("[REMOVED]", " Removed"), ("[UNCHANGED]", " Unchanged"), ("[DOCTEST]", " Doc-tests"), ("[PACKAGING]", " Packaging"), ("[PACKAGED]", " Packaged"), ("[DOWNLOADING]", " Downloading"), ("[DOWNLOADED]", " Downloaded"), ("[UPLOADING]", " Uploading"), ("[UPLOADED]", " Uploaded"), ("[VERIFYING]", " Verifying"), ("[ARCHIVING]", " Archiving"), ("[INSTALLING]", " Installing"), ("[REPLACING]", " Replacing"), ("[UNPACKING]", " Unpacking"), ("[SUMMARY]", " Summary"), ("[FIXED]", " Fixed"), ("[FIXING]", " Fixing"), ("[IGNORED]", " Ignored"), ("[INSTALLED]", " Installed"), ("[REPLACED]", " Replaced"), ("[BUILDING]", " Building"), ("[LOGIN]", " Login"), ("[LOGOUT]", " Logout"), ("[YANK]", " Yank"), ("[OWNER]", " Owner"), ("[MIGRATING]", " Migrating"), ("[EXECUTABLE]", " Executable"), ("[SKIPPING]", " Skipping"), ("[WAITING]", " Waiting"), ("[PUBLISHED]", " Published"), ("[BLOCKING]", " Blocking"), ("[GENERATED]", " Generated"), ("[OPENING]", " Opening"), ]; /// Normalizes the output so that it can be compared against the expected value. fn normalize_actual(actual: &str, cwd: Option<&Path>) -> String { // It's easier to read tabs in outputs if they don't show up as literal // hidden characters let actual = actual.replace('\t', ""); if cfg!(windows) { // Let's not deal with \r\n vs \n on windows... let actual = actual.replace('\r', ""); normalize_windows(&actual, cwd) } else { actual } } /// Normalizes the expected string so that it can be compared against the actual output. fn normalize_expected(expected: &str, cwd: Option<&Path>) -> String { let expected = replace_dirty_msvc(expected); let expected = substitute_macros(&expected); if cfg!(windows) { normalize_windows(&expected, cwd) } else { let expected = match cwd { None => expected, Some(cwd) => expected.replace("[CWD]", &cwd.display().to_string()), }; let expected = expected.replace("[ROOT]", &paths::root().display().to_string()); expected } } fn replace_dirty_msvc_impl(s: &str, is_msvc: bool) -> String { if is_msvc { s.replace("[DIRTY-MSVC]", "[DIRTY]") } else { use itertools::Itertools; let mut new = s .lines() .filter(|it| !it.starts_with("[DIRTY-MSVC]")) .join("\n"); if s.ends_with("\n") { new.push_str("\n"); } new } } fn replace_dirty_msvc(s: &str) -> String { replace_dirty_msvc_impl(s, cfg!(target_env = "msvc")) } /// Normalizes text for both actual and expected strings on Windows. fn normalize_windows(text: &str, cwd: Option<&Path>) -> String { // Let's not deal with / vs \ (windows...) let text = text.replace('\\', "/"); // Weirdness for paths on Windows extends beyond `/` vs `\` apparently. // Namely paths like `c:\` and `C:\` are equivalent and that can cause // issues. The return value of `env::current_dir()` may return a // lowercase drive name, but we round-trip a lot of values through `Url` // which will auto-uppercase the drive name. To just ignore this // distinction we try to canonicalize as much as possible, taking all // forms of a path and canonicalizing them to one. let replace_path = |s: &str, path: &Path, with: &str| { let path_through_url = Url::from_file_path(path).unwrap().to_file_path().unwrap(); let path1 = path.display().to_string().replace('\\', "/"); let path2 = path_through_url.display().to_string().replace('\\', "/"); s.replace(&path1, with) .replace(&path2, with) .replace(with, &path1) }; let text = match cwd { None => text, Some(p) => replace_path(&text, p, "[CWD]"), }; // Similar to cwd above, perform similar treatment to the root path // which in theory all of our paths should otherwise get rooted at. let root = paths::root(); let text = replace_path(&text, &root, "[ROOT]"); text } fn substitute_macros(input: &str) -> String { let mut result = input.to_owned(); for &(pat, subst) in MIN_LITERAL_REDACTIONS { result = result.replace(pat, subst) } for &(pat, subst) in E2E_LITERAL_REDACTIONS { result = result.replace(pat, subst) } result } /// Compares one string against another, checking that they both match. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. /// /// - `description` explains where the output is from (usually "stdout" or "stderr"). /// - `other_output` is other output to display in the error (usually stdout or stderr). pub(crate) fn match_exact( expected: &str, actual: &str, description: &str, other_output: &str, cwd: Option<&Path>, ) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(WildStr::new).collect(); let a: Vec<_> = actual.lines().map(WildStr::new).collect(); if e == a { return Ok(()); } let diff = diff::colored_diff(&e, &a); bail!( "{} did not match:\n\ {}\n\n\ other output:\n\ {}\n", description, diff, other_output, ); } /// Convenience wrapper around [`match_exact`] which will panic on error. #[track_caller] pub(crate) fn assert_match_exact(expected: &str, actual: &str) { if let Err(e) = match_exact(expected, actual, "", "", None) { crate::panic_error("", e); } } /// Checks that the given string contains the given lines, ignoring the order /// of the lines. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub(crate) fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let mut a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect(); // match more-constrained lines first, although in theory we'll // need some sort of recursive match here. This handles the case // that you expect "a\n[..]b" and two lines are printed out, // "ab\n"a", where technically we do match unordered but a naive // search fails to find this. This simple sort at least gets the // test suite to pass for now, but we may need to get more fancy // if tests start failing again. a.sort_by_key(|s| s.line.len()); let mut changes = Vec::new(); let mut a_index = 0; let mut failure = false; use crate::diff::Change; for (e_i, e_line) in e.into_iter().enumerate() { match a.iter().position(|a_line| e_line == *a_line) { Some(index) => { let a_line = a.remove(index); changes.push(Change::Keep(e_i, index, a_line)); a_index += 1; } None => { failure = true; changes.push(Change::Remove(e_i, e_line)); } } } for unmatched in a { failure = true; changes.push(Change::Add(a_index, unmatched)); a_index += 1; } if failure { bail!( "Expected lines did not match (ignoring order):\n{}\n", diff::render_colored_changes(&changes) ); } else { Ok(()) } } /// Checks that the given string contains the given contiguous lines /// somewhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub(crate) fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect(); if e.len() == 0 { bail!("expected length must not be zero"); } for window in a.windows(e.len()) { if window == e { return Ok(()); } } bail!( "expected to find:\n\ {}\n\n\ did not find in output:\n\ {}", expected, actual ); } /// Checks that the given string does not contain the given contiguous lines /// anywhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub(crate) fn match_does_not_contain( expected: &str, actual: &str, cwd: Option<&Path>, ) -> Result<()> { if match_contains(expected, actual, cwd).is_ok() { bail!( "expected not to find:\n\ {}\n\n\ but found in output:\n\ {}", expected, actual ); } else { Ok(()) } } /// Checks that the given string contains the given contiguous lines /// somewhere, and should be repeated `number` times. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub(crate) fn match_contains_n( expected: &str, number: usize, actual: &str, cwd: Option<&Path>, ) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect(); if e.len() == 0 { bail!("expected length must not be zero"); } let matches = a.windows(e.len()).filter(|window| *window == e).count(); if matches == number { Ok(()) } else { bail!( "expected to find {} occurrences of:\n\ {}\n\n\ but found {} matches in the output:\n\ {}", number, expected, matches, actual ) } } /// Checks that the given string has a line that contains the given patterns, /// and that line also does not contain the `without` patterns. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. /// /// See [`crate::Execs::with_stderr_line_without`] for an example and cautions /// against using. pub(crate) fn match_with_without( actual: &str, with: &[String], without: &[String], cwd: Option<&Path>, ) -> Result<()> { let actual = normalize_actual(actual, cwd); let norm = |s: &String| format!("[..]{}[..]", normalize_expected(s, cwd)); let with: Vec<_> = with.iter().map(norm).collect(); let without: Vec<_> = without.iter().map(norm).collect(); let with_wild: Vec<_> = with.iter().map(|w| WildStr::new(w)).collect(); let without_wild: Vec<_> = without.iter().map(|w| WildStr::new(w)).collect(); let matches: Vec<_> = actual .lines() .map(WildStr::new) .filter(|line| with_wild.iter().all(|with| with == line)) .filter(|line| !without_wild.iter().any(|without| without == line)) .collect(); match matches.len() { 0 => bail!( "Could not find expected line in output.\n\ With contents: {:?}\n\ Without contents: {:?}\n\ Actual stderr:\n\ {}\n", with, without, actual ), 1 => Ok(()), _ => bail!( "Found multiple matching lines, but only expected one.\n\ With contents: {:?}\n\ Without contents: {:?}\n\ Matching lines:\n\ {}\n", with, without, itertools::join(matches, "\n") ), } } /// Checks that the given string of JSON objects match the given set of /// expected JSON objects. /// /// See [`crate::Execs::with_json`] for more details. pub(crate) fn match_json(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let (exp_objs, act_objs) = collect_json_objects(expected, actual)?; if exp_objs.len() != act_objs.len() { bail!( "expected {} json lines, got {}, stdout:\n{}", exp_objs.len(), act_objs.len(), actual ); } for (exp_obj, act_obj) in exp_objs.iter().zip(act_objs) { find_json_mismatch(exp_obj, &act_obj, cwd)?; } Ok(()) } /// Checks that the given string of JSON objects match the given set of /// expected JSON objects, ignoring their order. /// /// See [`crate::Execs::with_json_contains_unordered`] for more details and /// cautions when using. pub(crate) fn match_json_contains_unordered( expected: &str, actual: &str, cwd: Option<&Path>, ) -> Result<()> { let (exp_objs, mut act_objs) = collect_json_objects(expected, actual)?; for exp_obj in exp_objs { match act_objs .iter() .position(|act_obj| find_json_mismatch(&exp_obj, act_obj, cwd).is_ok()) { Some(index) => act_objs.remove(index), None => { bail!( "Did not find expected JSON:\n\ {}\n\ Remaining available output:\n\ {}\n", serde_json::to_string_pretty(&exp_obj).unwrap(), itertools::join( act_objs.iter().map(|o| serde_json::to_string(o).unwrap()), "\n" ) ); } }; } Ok(()) } fn collect_json_objects( expected: &str, actual: &str, ) -> Result<(Vec, Vec)> { let expected_objs: Vec<_> = expected .split("\n\n") .map(|expect| { expect .parse() .with_context(|| format!("failed to parse expected JSON object:\n{}", expect)) }) .collect::>()?; let actual_objs: Vec<_> = actual .lines() .filter(|line| line.starts_with('{')) .map(|line| { line.parse() .with_context(|| format!("failed to parse JSON object:\n{}", line)) }) .collect::>()?; Ok((expected_objs, actual_objs)) } /// Compares JSON object for approximate equality. /// You can use `[..]` wildcard in strings (useful for OS-dependent things such /// as paths). You can use a `"{...}"` string literal as a wildcard for /// arbitrary nested JSON (useful for parts of object emitted by other programs /// (e.g., rustc) rather than Cargo itself). pub(crate) fn find_json_mismatch( expected: &Value, actual: &Value, cwd: Option<&Path>, ) -> Result<()> { match find_json_mismatch_r(expected, actual, cwd) { Some((expected_part, actual_part)) => bail!( "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", serde_json::to_string_pretty(expected).unwrap(), serde_json::to_string_pretty(&actual).unwrap(), serde_json::to_string_pretty(expected_part).unwrap(), serde_json::to_string_pretty(actual_part).unwrap(), ), None => Ok(()), } } fn find_json_mismatch_r<'a>( expected: &'a Value, actual: &'a Value, cwd: Option<&Path>, ) -> Option<(&'a Value, &'a Value)> { use serde_json::Value::*; match (expected, actual) { (&Number(ref l), &Number(ref r)) if l == r => None, (&Bool(l), &Bool(r)) if l == r => None, (&String(ref l), _) if l == "{...}" => None, (&String(ref l), &String(ref r)) => { if match_exact(l, r, "", "", cwd).is_err() { Some((expected, actual)) } else { None } } (&Array(ref l), &Array(ref r)) => { if l.len() != r.len() { return Some((expected, actual)); } l.iter() .zip(r.iter()) .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Object(ref l), &Object(ref r)) => { let mut expected_entries = l.iter(); let mut actual_entries = r.iter(); loop { match (expected_entries.next(), actual_entries.next()) { (None, None) => return None, (Some((expected_key, expected_value)), Some((actual_key, actual_value))) if expected_key == actual_key => { if let mismatch @ Some(_) = find_json_mismatch_r(expected_value, actual_value, cwd) { return mismatch; } } _ => return Some((expected, actual)), } } } (&Null, &Null) => None, // Magic string literal `"{...}"` acts as wildcard for any sub-JSON. _ => Some((expected, actual)), } } /// A single line string that supports `[..]` wildcard matching. pub(crate) struct WildStr<'a> { has_meta: bool, line: &'a str, } impl<'a> WildStr<'a> { pub fn new(line: &'a str) -> WildStr<'a> { WildStr { has_meta: line.contains("[..]"), line, } } } impl<'a> PartialEq for WildStr<'a> { fn eq(&self, other: &Self) -> bool { match (self.has_meta, other.has_meta) { (false, false) => self.line == other.line, (true, false) => meta_cmp(self.line, other.line), (false, true) => meta_cmp(other.line, self.line), (true, true) => panic!("both lines cannot have [..]"), } } } fn meta_cmp(a: &str, mut b: &str) -> bool { for (i, part) in a.split("[..]").enumerate() { match b.find(part) { Some(j) => { if i == 0 && j != 0 { return false; } b = &b[j + part.len()..]; } None => return false, } } b.is_empty() || a.ends_with("[..]") } impl fmt::Display for WildStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.line) } } impl fmt::Debug for WildStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.line) } } #[cfg(test)] mod test { use snapbox::assert_data_eq; use snapbox::prelude::*; use snapbox::str; use super::*; #[test] fn wild_str_cmp() { for (a, b) in &[ ("a b", "a b"), ("a[..]b", "a b"), ("a[..]", "a b"), ("[..]", "a b"), ("[..]b", "a b"), ] { assert_eq!(WildStr::new(a), WildStr::new(b)); } for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] { assert_ne!(WildStr::new(a), WildStr::new(b)); } } #[test] fn dirty_msvc() { let case = |expected: &str, wild: &str, msvc: bool| { assert_eq!(expected, &replace_dirty_msvc_impl(wild, msvc)); }; // no replacements case("aa", "aa", false); case("aa", "aa", true); // with replacements case( "\ [DIRTY] a", "\ [DIRTY-MSVC] a", true, ); case( "", "\ [DIRTY-MSVC] a", false, ); case( "\ [DIRTY] a [COMPILING] a", "\ [DIRTY-MSVC] a [COMPILING] a", true, ); case( "\ [COMPILING] a", "\ [DIRTY-MSVC] a [COMPILING] a", false, ); // test trailing newline behavior case( "\ A B ", "\ A B ", true, ); case( "\ A B ", "\ A B ", false, ); case( "\ A B", "\ A B", true, ); case( "\ A B", "\ A B", false, ); case( "\ [DIRTY] a ", "\ [DIRTY-MSVC] a ", true, ); case( "\n", "\ [DIRTY-MSVC] a ", false, ); case( "\ [DIRTY] a", "\ [DIRTY-MSVC] a", true, ); case( "", "\ [DIRTY-MSVC] a", false, ); } #[test] fn redact_elapsed_time() { let mut subs = snapbox::Redactions::new(); add_regex_redactions(&mut subs); assert_data_eq!( subs.redact("[FINISHED] `release` profile [optimized] target(s) in 5.5s"), str!["[FINISHED] `release` profile [optimized] target(s) in [ELAPSED]s"].raw() ); assert_data_eq!( subs.redact("[FINISHED] `release` profile [optimized] target(s) in 1m 05s"), str!["[FINISHED] `release` profile [optimized] target(s) in [ELAPSED]s"].raw() ); } } cargo-test-support-0.3.0/src/containers.rs000064400000000000000000000217211046102023000167130ustar 00000000000000//! Support for testing using Docker containers. //! //! The [`Container`] type is a builder for configuring a container to run. //! After you call `launch`, you can use the [`ContainerHandle`] to interact //! with the running container. //! //! Tests using containers must use `#[cargo_test(container_test)]` to disable //! them unless the CARGO_CONTAINER_TESTS environment variable is set. use cargo_util::ProcessBuilder; use std::collections::HashMap; use std::io::Read; use std::path::PathBuf; use std::process::Command; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; use tar::Header; /// A builder for configuring a container to run. pub struct Container { /// The host directory that forms the basis of the Docker image. build_context: PathBuf, /// Files to copy over to the image. files: Vec, } /// A handle to a running container. /// /// You can use this to interact with the container. pub struct ContainerHandle { /// The name of the container. name: String, /// The IP address of the container. /// /// NOTE: This is currently unused, but may be useful so I left it in. /// This can only be used on Linux. macOS and Windows docker doesn't allow /// direct connection to the container. pub ip_address: String, /// Port mappings of container_port to host_port for ports exposed via EXPOSE. pub port_mappings: HashMap, } impl Container { pub fn new(context_dir: &str) -> Container { assert!(std::env::var_os("CARGO_CONTAINER_TESTS").is_some()); let mut build_context = PathBuf::from(env!("CARGO_MANIFEST_DIR")); build_context.push("containers"); build_context.push(context_dir); Container { build_context, files: Vec::new(), } } /// Adds a file to be copied into the container. pub fn file(mut self, file: MkFile) -> Self { self.files.push(file); self } /// Starts the container. pub fn launch(mut self) -> ContainerHandle { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let name = format!("cargo_test_{id}"); remove_if_exists(&name); self.create_container(&name); self.copy_files(&name); self.start_container(&name); let info = self.container_inspect(&name); let ip_address = if cfg!(target_os = "linux") { info[0]["NetworkSettings"]["IPAddress"] .as_str() .unwrap() .to_string() } else { // macOS and Windows can't make direct connections to the // container. It only works through exposed ports or mapped ports. "127.0.0.1".to_string() }; let port_mappings = self.port_mappings(&info); self.wait_till_ready(&port_mappings); ContainerHandle { name, ip_address, port_mappings, } } fn create_container(&self, name: &str) { static BUILD_LOCK: Mutex<()> = Mutex::new(()); let image_base = self.build_context.file_name().unwrap(); let image_name = format!("cargo-test-{}", image_base.to_str().unwrap()); let _lock = BUILD_LOCK .lock() .map_err(|_| panic!("previous docker build failed, unable to run test")); ProcessBuilder::new("docker") .args(&["build", "--tag", image_name.as_str()]) .arg(&self.build_context) .exec_with_output() .unwrap(); ProcessBuilder::new("docker") .args(&[ "container", "create", "--publish-all", "--rm", "--name", name, ]) .arg(image_name) .exec_with_output() .unwrap(); } fn copy_files(&mut self, name: &str) { if self.files.is_empty() { return; } let mut ar = tar::Builder::new(Vec::new()); let files = std::mem::replace(&mut self.files, Vec::new()); for mut file in files { ar.append_data(&mut file.header, &file.path, file.contents.as_slice()) .unwrap(); } let ar = ar.into_inner().unwrap(); ProcessBuilder::new("docker") .args(&["cp", "-"]) .arg(format!("{name}:/")) .stdin(ar) .exec_with_output() .unwrap(); } fn start_container(&self, name: &str) { ProcessBuilder::new("docker") .args(&["container", "start"]) .arg(name) .exec_with_output() .unwrap(); } fn container_inspect(&self, name: &str) -> serde_json::Value { let output = ProcessBuilder::new("docker") .args(&["inspect", name]) .exec_with_output() .unwrap(); serde_json::from_slice(&output.stdout).unwrap() } /// Returns the mapping of container_port->host_port for ports that were /// exposed with EXPOSE. fn port_mappings(&self, info: &serde_json::Value) -> HashMap { info[0]["NetworkSettings"]["Ports"] .as_object() .unwrap() .iter() .map(|(key, value)| { let key = key .strip_suffix("/tcp") .expect("expected TCP only ports") .parse() .unwrap(); let values = value.as_array().unwrap(); let value = values .iter() .find(|value| value["HostIp"].as_str().unwrap() == "0.0.0.0") .expect("expected localhost IP"); let host_port = value["HostPort"].as_str().unwrap().parse().unwrap(); (key, host_port) }) .collect() } fn wait_till_ready(&self, port_mappings: &HashMap) { for port in port_mappings.values() { let mut ok = false; for _ in 0..30 { match std::net::TcpStream::connect(format!("127.0.0.1:{port}")) { Ok(_) => { ok = true; break; } Err(e) => { if e.kind() != std::io::ErrorKind::ConnectionRefused { panic!("unexpected localhost connection error: {e:?}"); } std::thread::sleep(std::time::Duration::new(1, 0)); } } } if !ok { panic!("no listener on localhost port {port}"); } } } } impl ContainerHandle { /// Executes a program inside a running container. pub fn exec(&self, args: &[&str]) -> std::process::Output { ProcessBuilder::new("docker") .args(&["container", "exec", &self.name]) .args(args) .exec_with_output() .unwrap() } /// Returns the contents of a file inside the container. pub fn read_file(&self, path: &str) -> String { let output = ProcessBuilder::new("docker") .args(&["cp", &format!("{}:{}", self.name, path), "-"]) .exec_with_output() .unwrap(); let mut ar = tar::Archive::new(output.stdout.as_slice()); let mut entry = ar.entries().unwrap().next().unwrap().unwrap(); let mut contents = String::new(); entry.read_to_string(&mut contents).unwrap(); contents } } impl Drop for ContainerHandle { fn drop(&mut self) { // To help with debugging, this will keep the container alive. if std::env::var_os("CARGO_CONTAINER_TEST_KEEP").is_some() { return; } remove_if_exists(&self.name); } } fn remove_if_exists(name: &str) { if let Err(e) = Command::new("docker") .args(&["container", "rm", "--force", name]) .output() { panic!("failed to run docker: {e}"); } } /// Builder for configuring a file to copy into a container. pub struct MkFile { path: String, contents: Vec, header: Header, } impl MkFile { /// Defines a file to add to the container. /// /// This should be passed to `Container::file`. /// /// The path is the path inside the container to create the file. pub fn path(path: &str) -> MkFile { MkFile { path: path.to_string(), contents: Vec::new(), header: Header::new_gnu(), } } pub fn contents(mut self, contents: impl Into>) -> Self { self.contents = contents.into(); self.header.set_size(self.contents.len() as u64); self } pub fn mode(mut self, mode: u32) -> Self { self.header.set_mode(mode); self } pub fn uid(mut self, uid: u64) -> Self { self.header.set_uid(uid); self } pub fn gid(mut self, gid: u64) -> Self { self.header.set_gid(gid); self } } cargo-test-support-0.3.0/src/cross_compile.rs000064400000000000000000000214241046102023000174070ustar 00000000000000//! Support for cross-compile tests with the `--target` flag. //! //! Note that cross-testing is very limited. You need to install the //! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa). //! //! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests //! if you are unable to use the alternate target. Unfortunately 32-bit //! support on macOS is going away, so macOS users are out of luck. //! //! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI. use crate::{basic_manifest, main_file, project}; use cargo_util::ProcessError; use std::env; use std::fmt::Write; use std::process::{Command, Output}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Once; /// Whether or not the resulting cross binaries can run on the host. static CAN_RUN_ON_HOST: AtomicBool = AtomicBool::new(false); pub fn disabled() -> bool { // First, disable if requested. match env::var("CFG_DISABLE_CROSS_TESTS") { Ok(ref s) if *s == "1" => return true, _ => {} } // Cross tests are only tested to work on macos, linux, and MSVC windows. if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) { return true; } // It's not particularly common to have a cross-compilation setup, so // try to detect that before we fail a bunch of tests through no fault // of the user. static CAN_BUILD_CROSS_TESTS: AtomicBool = AtomicBool::new(false); static CHECK: Once = Once::new(); let cross_target = alternate(); let run_cross_test = || -> anyhow::Result { let p = project() .at("cross_test") .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0")) .file("src/main.rs", &main_file(r#""testing!""#, &[])) .build(); let build_result = p .cargo("build --target") .arg(&cross_target) .exec_with_output(); if build_result.is_ok() { CAN_BUILD_CROSS_TESTS.store(true, Ordering::SeqCst); } let result = p .cargo("run --target") .arg(&cross_target) .exec_with_output(); if result.is_ok() { CAN_RUN_ON_HOST.store(true, Ordering::SeqCst); } build_result }; CHECK.call_once(|| { drop(run_cross_test()); }); if CAN_BUILD_CROSS_TESTS.load(Ordering::SeqCst) { // We were able to compile a simple project, so the user has the // necessary `std::` bits installed. Therefore, tests should not // be disabled. return false; } // We can't compile a simple cross project. We want to warn the user // by failing a single test and having the remainder of the cross tests // pass. We don't use `std::sync::Once` here because panicking inside its // `call_once` method would poison the `Once` instance, which is not what // we want. static HAVE_WARNED: AtomicBool = AtomicBool::new(false); if HAVE_WARNED.swap(true, Ordering::SeqCst) { // We are some other test and somebody else is handling the warning. // Just disable the current test. return true; } // We are responsible for warning the user, which we do by panicking. let mut message = format!( " Cannot cross compile to {}. This failure can be safely ignored. If you would prefer to not see this failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\". Alternatively, you can install the necessary libraries to enable cross compilation tests. Cross compilation tests depend on your host platform. ", cross_target ); if cfg!(target_os = "linux") { message.push_str( " Linux cross tests target i686-unknown-linux-gnu, which requires the ability to build and run 32-bit targets. This requires the 32-bit libraries to be installed. For example, on Ubuntu, run `sudo apt install gcc-multilib` to install the necessary libraries. ", ); } else if cfg!(all(target_os = "macos", target_arch = "aarch64")) { message.push_str( " macOS on aarch64 cross tests to target x86_64-apple-darwin. This should be natively supported via Xcode, nothing additional besides the rustup target should be needed. ", ); } else if cfg!(target_os = "macos") { message.push_str( " macOS on x86_64 cross tests to target x86_64-apple-ios, which requires the iOS SDK to be installed. This should be included with Xcode automatically. If you are using the Xcode command line tools, you'll need to install the full Xcode app (from the Apple App Store), and switch to it with this command: sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer Some cross-tests want to *run* the executables on the host. These tests will be ignored if this is not possible. On macOS, this means you need an iOS simulator installed to run these tests. To install a simulator, open Xcode, go to preferences > Components, and download the latest iOS simulator. ", ); } else if cfg!(target_os = "windows") { message.push_str( " Windows cross tests target i686-pc-windows-msvc, which requires the ability to build and run 32-bit targets. This should work automatically if you have properly installed Visual Studio build tools. ", ); } else { // The check at the top should prevent this. panic!("platform should have been skipped"); } let rustup_available = Command::new("rustup").output().is_ok(); if rustup_available { write!( message, " Make sure that the appropriate `rustc` target is installed with rustup: rustup target add {} ", cross_target ) .unwrap(); } else { write!( message, " rustup does not appear to be installed. Make sure that the appropriate `rustc` target is installed for the target `{}`. ", cross_target ) .unwrap(); } // Show the actual error message. match run_cross_test() { Ok(_) => message.push_str("\nUh oh, second run succeeded?\n"), Err(err) => match err.downcast_ref::() { Some(proc_err) => write!(message, "\nTest error: {}\n", proc_err).unwrap(), None => write!(message, "\nUnexpected non-process error: {}\n", err).unwrap(), }, } panic!("{}", message); } /// The arch triple of the test-running host. pub fn native() -> &'static str { env!("NATIVE_ARCH") } pub fn native_arch() -> &'static str { match native() .split("-") .next() .expect("Target triple has unexpected format") { "x86_64" => "x86_64", "aarch64" => "aarch64", "i686" => "x86", _ => panic!("This test should be gated on cross_compile::disabled."), } } /// The alternate target-triple to build with. /// /// Only use this function on tests that check `cross_compile::disabled`. pub fn alternate() -> &'static str { try_alternate().expect("This test should be gated on cross_compile::disabled.") } /// A possible alternate target-triple to build with. pub(crate) fn try_alternate() -> Option<&'static str> { if cfg!(all(target_os = "macos", target_arch = "aarch64")) { Some("x86_64-apple-darwin") } else if cfg!(target_os = "macos") { Some("x86_64-apple-ios") } else if cfg!(target_os = "linux") { Some("i686-unknown-linux-gnu") } else if cfg!(all(target_os = "windows", target_env = "msvc")) { Some("i686-pc-windows-msvc") } else if cfg!(all(target_os = "windows", target_env = "gnu")) { Some("i686-pc-windows-gnu") } else { None } } pub fn alternate_arch() -> &'static str { if cfg!(target_os = "macos") { "x86_64" } else { "x86" } } /// A target-triple that is neither the host nor the target. /// /// Rustc may not work with it and it's alright, apart from being a /// valid target triple it is supposed to be used only as a /// placeholder for targets that should not be considered. pub fn unused() -> &'static str { "wasm32-unknown-unknown" } /// Whether or not the host can run cross-compiled executables. pub fn can_run_on_host() -> bool { if disabled() { return false; } // macos is currently configured to cross compile to x86_64-apple-ios // which requires a simulator to run. Azure's CI image appears to have the // SDK installed, but are not configured to launch iOS images with a // simulator. if cfg!(target_os = "macos") { if CAN_RUN_ON_HOST.load(Ordering::SeqCst) { return true; } else { println!("Note: Cannot run on host, skipping."); return false; } } else { assert!(CAN_RUN_ON_HOST.load(Ordering::SeqCst)); return true; } } cargo-test-support-0.3.0/src/diff.rs000064400000000000000000000116551046102023000154630ustar 00000000000000//! A simple Myers diff implementation. //! //! This focuses on being short and simple, and the expense of being //! inefficient. A key characteristic here is that this supports cargotest's //! `[..]` wildcard matching. That means things like hashing can't be used. //! Since Cargo's output tends to be small, this should be sufficient. use std::fmt; use std::io::Write; /// A single line change to be applied to the original. #[derive(Debug, Eq, PartialEq)] pub enum Change { Add(usize, T), Remove(usize, T), Keep(usize, usize, T), } pub fn diff<'a, T>(a: &'a [T], b: &'a [T]) -> Vec> where T: PartialEq, { if a.is_empty() && b.is_empty() { return vec![]; } let mut diff = vec![]; for (prev_x, prev_y, x, y) in backtrack(&a, &b) { if x == prev_x { diff.push(Change::Add(prev_y + 1, &b[prev_y])); } else if y == prev_y { diff.push(Change::Remove(prev_x + 1, &a[prev_x])); } else { diff.push(Change::Keep(prev_x + 1, prev_y + 1, &a[prev_x])); } } diff.reverse(); diff } fn shortest_edit(a: &[T], b: &[T]) -> Vec> where T: PartialEq, { let max = a.len() + b.len(); let mut v = vec![0; 2 * max + 1]; let mut trace = vec![]; for d in 0..=max { trace.push(v.clone()); for k in (0..=(2 * d)).step_by(2) { let mut x = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) { // Move down v[max - d + k + 1] } else { // Move right v[max - d + k - 1] + 1 }; let mut y = x + d - k; // Step diagonally as far as possible. while x < a.len() && y < b.len() && a[x] == b[y] { x += 1; y += 1; } v[max - d + k] = x; // Return if reached the bottom-right position. if x >= a.len() && y >= b.len() { return trace; } } } panic!("finished without hitting end?"); } fn backtrack(a: &[T], b: &[T]) -> Vec<(usize, usize, usize, usize)> where T: PartialEq, { let mut result = vec![]; let mut x = a.len(); let mut y = b.len(); let max = x + y; for (d, v) in shortest_edit(a, b).iter().enumerate().rev() { let k = x + d - y; let prev_k = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) { k + 1 } else { k - 1 }; let prev_x = v[max - d + prev_k]; let prev_y = (prev_x + d).saturating_sub(prev_k); while x > prev_x && y > prev_y { result.push((x - 1, y - 1, x, y)); x -= 1; y -= 1; } if d > 0 { result.push((prev_x, prev_y, x, y)); } x = prev_x; y = prev_y; } return result; } pub fn colored_diff<'a, T>(a: &'a [T], b: &'a [T]) -> String where T: PartialEq + fmt::Display, { let changes = diff(a, b); render_colored_changes(&changes) } pub fn render_colored_changes(changes: &[Change]) -> String { // anstyle is not very ergonomic, but I don't want to bring in another dependency. let red = anstyle::AnsiColor::Red.on_default().render(); let green = anstyle::AnsiColor::Green.on_default().render(); let dim = (anstyle::Style::new() | anstyle::Effects::DIMMED).render(); let bold = (anstyle::Style::new() | anstyle::Effects::BOLD).render(); let reset = anstyle::Reset.render(); let choice = if crate::is_ci() { // Don't use color on CI. Even though GitHub can display colors, it // makes reading the raw logs more difficult. anstream::ColorChoice::Never } else { anstream::AutoStream::choice(&std::io::stdout()) }; let mut buffer = anstream::AutoStream::new(Vec::new(), choice); for change in changes { let (nums, sign, color, text) = match change { Change::Add(i, s) => (format!(" {:<4} ", i), '+', green, s), Change::Remove(i, s) => (format!("{:<4} ", i), '-', red, s), Change::Keep(x, y, s) => (format!("{:<4}{:<4} ", x, y), ' ', dim, s), }; writeln!( buffer, "{dim}{nums}{reset}{bold}{sign}{reset}{color}{text}{reset}" ) .unwrap(); } String::from_utf8(buffer.into_inner()).unwrap() } #[cfg(test)] pub fn compare(a: &str, b: &str) { let a: Vec<_> = a.chars().collect(); let b: Vec<_> = b.chars().collect(); let changes = diff(&a, &b); let mut result = vec![]; for change in changes { match change { Change::Add(_, s) => result.push(*s), Change::Remove(_, _s) => {} Change::Keep(_, _, s) => result.push(*s), } } assert_eq!(b, result); } #[test] fn basic_tests() { compare("", ""); compare("A", ""); compare("", "B"); compare("ABCABBA", "CBABAC"); } cargo-test-support-0.3.0/src/git.rs000064400000000000000000000162201046102023000153270ustar 00000000000000/* # Git Testing Support ## Creating a git dependency `git::new()` is an easy way to create a new git repository containing a project that you can then use as a dependency. It will automatically add all the files you specify in the project and commit them to the repository. Example: ``` let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "1.0.0")) .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#) }); // Use the `url()` method to get the file url to the new repository. let p = project() .file("Cargo.toml", &format!(r#" [package] name = "a" version = "1.0.0" [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url())) .file("src/lib.rs", "extern crate dep1;") .build(); ``` ## Manually creating repositories `git::repo()` can be used to create a `RepoBuilder` which provides a way of adding files to a blank repository and committing them. If you want to then manipulate the repository (such as adding new files or tags), you can use `git2::Repository::open()` to open the repository and then use some of the helper functions in this file to interact with the repository. */ use crate::{path2url, project, Project, ProjectBuilder, SymlinkBuilder}; use std::fs; use std::path::{Path, PathBuf}; use std::sync::Once; use url::Url; #[must_use] pub struct RepoBuilder { repo: git2::Repository, files: Vec, } pub struct Repository(git2::Repository); /// Create a `RepoBuilder` to build a new git repository. /// /// Call `build()` to finalize and create the repository. pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } impl RepoBuilder { pub fn init(p: &Path) -> RepoBuilder { t!(fs::create_dir_all(p.parent().unwrap())); let repo = init(p); RepoBuilder { repo, files: Vec::new(), } } /// Add a file to the repository. pub fn file(self, path: &str, contents: &str) -> RepoBuilder { let mut me = self.nocommit_file(path, contents); me.files.push(PathBuf::from(path)); me } /// Create a symlink to a directory pub fn nocommit_symlink_dir>(self, dst: T, src: T) -> Self { let workdir = self.repo.workdir().unwrap(); SymlinkBuilder::new_dir(workdir.join(dst), workdir.join(src)).mk(); self } /// Add a file that will be left in the working directory, but not added /// to the repository. pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { let dst = self.repo.workdir().unwrap().join(path); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, contents)); self } /// Create the repository and commit the new files. pub fn build(self) -> Repository { { let mut index = t!(self.repo.index()); for file in self.files.iter() { t!(index.add_path(file)); } t!(index.write()); let id = t!(index.write_tree()); let tree = t!(self.repo.find_tree(id)); let sig = t!(self.repo.signature()); t!(self .repo .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[])); } let RepoBuilder { repo, .. } = self; Repository(repo) } } impl Repository { pub fn root(&self) -> &Path { self.0.workdir().unwrap() } pub fn url(&self) -> Url { path2url(self.0.workdir().unwrap().to_path_buf()) } pub fn revparse_head(&self) -> String { self.0 .revparse_single("HEAD") .expect("revparse HEAD") .id() .to_string() } } /// Initialize a new repository at the given path. pub fn init(path: &Path) -> git2::Repository { default_search_path(); let repo = t!(git2::Repository::init(path)); default_repo_cfg(&repo); repo } fn default_search_path() { use crate::paths::global_root; use git2::{opts::set_search_path, ConfigLevel}; static INIT: Once = Once::new(); INIT.call_once(|| unsafe { let path = global_root().join("blank_git_search_path"); t!(set_search_path(ConfigLevel::System, &path)); t!(set_search_path(ConfigLevel::Global, &path)); t!(set_search_path(ConfigLevel::XDG, &path)); t!(set_search_path(ConfigLevel::ProgramData, &path)); }) } fn default_repo_cfg(repo: &git2::Repository) { let mut cfg = t!(repo.config()); t!(cfg.set_str("user.email", "foo@bar.com")); t!(cfg.set_str("user.name", "Foo Bar")); } /// Create a new git repository with a project. pub fn new(name: &str, callback: F) -> Project where F: FnOnce(ProjectBuilder) -> ProjectBuilder, { new_repo(name, callback).0 } /// Create a new git repository with a project. /// Returns both the Project and the git Repository. pub fn new_repo(name: &str, callback: F) -> (Project, git2::Repository) where F: FnOnce(ProjectBuilder) -> ProjectBuilder, { let mut git_project = project().at(name); git_project = callback(git_project); let git_project = git_project.build(); let repo = init(&git_project.root()); add(&repo); commit(&repo); (git_project, repo) } /// Add all files in the working directory to the git index. pub fn add(repo: &git2::Repository) { let mut index = t!(repo.index()); t!(index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)); t!(index.write()); } /// Add a git submodule to the repository. pub fn add_submodule<'a>( repo: &'a git2::Repository, url: &str, path: &Path, ) -> git2::Submodule<'a> { let path = path.to_str().unwrap().replace(r"\", "/"); let mut s = t!(repo.submodule(url, Path::new(&path), false)); let subrepo = t!(s.open()); default_repo_cfg(&subrepo); t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")); let mut origin = t!(subrepo.find_remote("origin")); t!(origin.fetch(&Vec::::new(), None, None)); t!(subrepo.checkout_head(None)); t!(s.add_finalize()); s } /// Commit changes to the git repository. pub fn commit(repo: &git2::Repository) -> git2::Oid { let tree_id = t!(t!(repo.index()).write_tree()); let sig = t!(repo.signature()); let mut parents = Vec::new(); if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) { parents.push(t!(repo.find_commit(parent))) } let parents = parents.iter().collect::>(); t!(repo.commit( Some("HEAD"), &sig, &sig, "test", &t!(repo.find_tree(tree_id)), &parents )) } /// Create a new tag in the git repository. pub fn tag(repo: &git2::Repository, name: &str) { let head = repo.head().unwrap().target().unwrap(); t!(repo.tag( name, &t!(repo.find_object(head, None)), &t!(repo.signature()), "make a new tag", false )); } /// Returns true if gitoxide is globally activated. /// /// That way, tests that normally use `git2` can transparently use `gitoxide`. pub fn cargo_uses_gitoxide() -> bool { std::env::var_os("__CARGO_USE_GITOXIDE_INSTEAD_OF_GIT2").map_or(false, |value| value == "1") } cargo-test-support-0.3.0/src/install.rs000064400000000000000000000017041046102023000162130ustar 00000000000000use crate::paths; use std::env::consts::EXE_SUFFIX; use std::path::{Path, PathBuf}; /// Used by `cargo install` tests to assert an executable binary /// has been installed. Example usage: /// ```no_run /// use cargo_test_support::install::assert_has_installed_exe; /// use cargo_test_support::install::cargo_home; /// /// assert_has_installed_exe(cargo_home(), "foo"); /// ``` #[track_caller] pub fn assert_has_installed_exe>(path: P, name: &'static str) { assert!(check_has_installed_exe(path, name)); } #[track_caller] pub fn assert_has_not_installed_exe>(path: P, name: &'static str) { assert!(!check_has_installed_exe(path, name)); } fn check_has_installed_exe>(path: P, name: &'static str) -> bool { path.as_ref().join("bin").join(exe(name)).is_file() } pub fn cargo_home() -> PathBuf { paths::home().join(".cargo") } pub fn exe(name: &str) -> String { format!("{}{}", name, EXE_SUFFIX) } cargo-test-support-0.3.0/src/lib.rs000064400000000000000000001425151046102023000153210ustar 00000000000000//! # Cargo test support. //! //! See for a guide on writing tests. //! //! WARNING: You might not want to use this outside of Cargo. //! //! * This is designed for testing Cargo itself. Use at your own risk. //! * No guarantee on any stability across versions. //! * No feature request would be accepted unless proved useful for testing Cargo. #![allow(clippy::disallowed_methods)] #![allow(clippy::print_stderr)] #![allow(clippy::print_stdout)] use std::env; use std::ffi::OsStr; use std::fmt::Write; use std::fs; use std::os; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::sync::OnceLock; use std::thread::JoinHandle; use std::time::{self, Duration}; use anyhow::{bail, Result}; use cargo_util::{is_ci, ProcessBuilder, ProcessError}; use snapbox::IntoData as _; use url::Url; use self::paths::CargoPathExt; #[macro_export] macro_rules! t { ($e:expr) => { match $e { Ok(e) => e, Err(e) => $crate::panic_error(&format!("failed running {}", stringify!($e)), e), } }; } pub use snapbox::file; pub use snapbox::str; pub use snapbox::utils::current_dir; #[track_caller] pub fn panic_error(what: &str, err: impl Into) -> ! { let err = err.into(); pe(what, err); #[track_caller] fn pe(what: &str, err: anyhow::Error) -> ! { let mut result = format!("{}\nerror: {}", what, err); for cause in err.chain().skip(1) { let _ = writeln!(result, "\nCaused by:"); let _ = write!(result, "{}", cause); } panic!("\n{}", result); } } pub use cargo_test_macro::cargo_test; pub mod compare; pub mod containers; pub mod cross_compile; mod diff; pub mod git; pub mod install; pub mod paths; pub mod publish; pub mod registry; pub mod tools; pub mod prelude { pub use crate::cargo_test; pub use crate::ArgLine; pub use crate::CargoCommand; pub use crate::ChannelChanger; pub use crate::TestEnv; pub use snapbox::IntoData; } /* * * ===== Builders ===== * */ #[derive(PartialEq, Clone)] struct FileBuilder { path: PathBuf, body: String, executable: bool, } impl FileBuilder { pub fn new(path: PathBuf, body: &str, executable: bool) -> FileBuilder { FileBuilder { path, body: body.to_string(), executable: executable, } } fn mk(&mut self) { if self.executable { let mut path = self.path.clone().into_os_string(); write!(path, "{}", env::consts::EXE_SUFFIX).unwrap(); self.path = path.into(); } self.dirname().mkdir_p(); fs::write(&self.path, &self.body) .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e)); #[cfg(unix)] if self.executable { use std::os::unix::fs::PermissionsExt; let mut perms = fs::metadata(&self.path).unwrap().permissions(); let mode = perms.mode(); perms.set_mode(mode | 0o111); fs::set_permissions(&self.path, perms).unwrap(); } } fn dirname(&self) -> &Path { self.path.parent().unwrap() } } #[derive(PartialEq, Clone)] struct SymlinkBuilder { dst: PathBuf, src: PathBuf, src_is_dir: bool, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst, src, src_is_dir: false, } } pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst, src, src_is_dir: true, } } #[cfg(unix)] fn mk(&self) { self.dirname().mkdir_p(); t!(os::unix::fs::symlink(&self.dst, &self.src)); } #[cfg(windows)] fn mk(&mut self) { self.dirname().mkdir_p(); if self.src_is_dir { t!(os::windows::fs::symlink_dir(&self.dst, &self.src)); } else { if let Some(ext) = self.dst.extension() { if ext == env::consts::EXE_EXTENSION { self.src.set_extension(ext); } } t!(os::windows::fs::symlink_file(&self.dst, &self.src)); } } fn dirname(&self) -> &Path { self.src.parent().unwrap() } } /// A cargo project to run tests against. /// /// See [`ProjectBuilder`] or [`Project::from_template`] to get started. pub struct Project { root: PathBuf, } /// Create a project to run tests against /// /// The project can be constructed programmatically or from the filesystem with [`Project::from_template`] #[must_use] pub struct ProjectBuilder { root: Project, files: Vec, symlinks: Vec, no_manifest: bool, } impl ProjectBuilder { /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.root() } /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.root.target_debug_dir() } pub fn new(root: PathBuf) -> ProjectBuilder { ProjectBuilder { root: Project { root }, files: vec![], symlinks: vec![], no_manifest: false, } } pub fn at>(mut self, path: P) -> Self { self.root = Project { root: paths::root().join(path), }; self } /// Adds a file to the project. pub fn file>(mut self, path: B, body: &str) -> Self { self._file(path.as_ref(), body, false); self } /// Adds an executable file to the project. pub fn executable>(mut self, path: B, body: &str) -> Self { self._file(path.as_ref(), body, true); self } fn _file(&mut self, path: &Path, body: &str, executable: bool) { self.files.push(FileBuilder::new( self.root.root().join(path), body, executable, )); } /// Adds a symlink to a file to the project. pub fn symlink>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new( self.root.root().join(dst), self.root.root().join(src), )); self } /// Create a symlink to a directory pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( self.root.root().join(dst), self.root.root().join(src), )); self } pub fn no_manifest(mut self) -> Self { self.no_manifest = true; self } /// Creates the project. pub fn build(mut self) -> Project { // First, clean the directory if it already exists self.rm_root(); // Create the empty directory self.root.root().mkdir_p(); let manifest_path = self.root.root().join("Cargo.toml"); if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) { self._file( Path::new("Cargo.toml"), &basic_manifest("foo", "0.0.1"), false, ) } let past = time::SystemTime::now() - Duration::new(1, 0); let ftime = filetime::FileTime::from_system_time(past); for file in self.files.iter_mut() { file.mk(); if is_coarse_mtime() { // Place the entire project 1 second in the past to ensure // that if cargo is called multiple times, the 2nd call will // see targets as "fresh". Without this, if cargo finishes in // under 1 second, the second call will see the mtime of // source == mtime of output and consider it dirty. filetime::set_file_times(&file.path, ftime, ftime).unwrap(); } } for symlink in self.symlinks.iter_mut() { symlink.mk(); } let ProjectBuilder { root, .. } = self; root } fn rm_root(&self) { self.root.root().rm_rf() } } impl Project { /// Copy the test project from a fixed state pub fn from_template(template_path: impl AsRef) -> Self { let root = paths::root(); let project_root = root.join("case"); snapbox::dir::copy_template(template_path.as_ref(), &project_root).unwrap(); Self { root: project_root } } /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.clone() } /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target` pub fn build_dir(&self) -> PathBuf { self.root().join("target") } /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.build_dir().join("debug") } /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo` pub fn url(&self) -> Url { path2url(self.root()) } /// Path to an example built as a library. /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib` pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { self.target_debug_dir() .join("examples") .join(paths::get_lib_filename(name, kind)) } /// Path to a debug binary. /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo` pub fn bin(&self, b: &str) -> PathBuf { self.build_dir() .join("debug") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } /// Path to a release binary. /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo` pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir() .join("release") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } /// Path to a debug binary for a specific target triple. /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo` pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug").join(&format!( "{}{}", b, env::consts::EXE_SUFFIX )) } /// Returns an iterator of paths matching the glob pattern, which is /// relative to the project root. pub fn glob>(&self, pattern: P) -> glob::Paths { let pattern = self.root().join(pattern); glob::glob(pattern.to_str().expect("failed to convert pattern to str")) .expect("failed to glob") } /// Changes the contents of an existing file. pub fn change_file(&self, path: &str, body: &str) { FileBuilder::new(self.root().join(path), body, false).mk() } /// Creates a `ProcessBuilder` to run a program in the project /// and wrap it in an Execs to assert on the execution. /// Example: /// p.process(&p.bin("foo")) /// .with_stdout("bar\n") /// .run(); pub fn process>(&self, program: T) -> Execs { let mut p = process(program); p.cwd(self.root()); execs().with_process_builder(p) } /// Creates a `ProcessBuilder` to run cargo. /// Arguments can be separated by spaces. /// Example: /// p.cargo("build --bin foo").run(); pub fn cargo(&self, cmd: &str) -> Execs { let cargo = cargo_exe(); let mut execs = self.process(&cargo); if let Some(ref mut p) = execs.process_builder { p.env("CARGO", cargo); p.arg_line(cmd); } execs } /// Safely run a process after `cargo build`. /// /// Windows has a problem where a process cannot be reliably /// be replaced, removed, or renamed immediately after executing it. /// The action may fail (with errors like Access is denied), or /// it may succeed, but future attempts to use the same filename /// will fail with "Already Exists". /// /// If you have a test that needs to do `cargo run` multiple /// times, you should instead use `cargo build` and use this /// method to run the executable. Each time you call this, /// use a new name for `dst`. /// See rust-lang/cargo#5481. pub fn rename_run(&self, src: &str, dst: &str) -> Execs { let src = self.bin(src); let dst = self.bin(dst); fs::rename(&src, &dst) .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); self.process(dst) } /// Returns the contents of `Cargo.lock`. pub fn read_lockfile(&self) -> String { self.read_file("Cargo.lock") } /// Returns the contents of a path in the project root pub fn read_file(&self, path: &str) -> String { let full = self.root().join(path); fs::read_to_string(&full) .unwrap_or_else(|e| panic!("could not read file {}: {}", full.display(), e)) } /// Modifies `Cargo.toml` to remove all commented lines. pub fn uncomment_root_manifest(&self) { let contents = self.read_file("Cargo.toml").replace("#", ""); fs::write(self.root().join("Cargo.toml"), contents).unwrap(); } pub fn symlink(&self, src: impl AsRef, dst: impl AsRef) { let src = self.root().join(src.as_ref()); let dst = self.root().join(dst.as_ref()); #[cfg(unix)] { if let Err(e) = os::unix::fs::symlink(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } #[cfg(windows)] { if src.is_dir() { if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } else { if let Err(e) = os::windows::fs::symlink_file(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } } } } // Generates a project layout pub fn project() -> ProjectBuilder { ProjectBuilder::new(paths::root().join("foo")) } // Generates a project layout in given directory pub fn project_in(dir: &str) -> ProjectBuilder { ProjectBuilder::new(paths::root().join(dir).join("foo")) } // Generates a project layout inside our fake home dir pub fn project_in_home(name: &str) -> ProjectBuilder { ProjectBuilder::new(paths::home().join(name)) } // === Helpers === pub fn main_file(println: &str, deps: &[&str]) -> String { let mut buf = String::new(); for dep in deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } buf.push_str("fn main() { println!("); buf.push_str(println); buf.push_str("); }\n"); buf } pub fn cargo_exe() -> PathBuf { snapbox::cmd::cargo_bin("cargo") } /// This is the raw output from the process. /// /// This is similar to `std::process::Output`, however the `status` is /// translated to the raw `code`. This is necessary because `ProcessError` /// does not have access to the raw `ExitStatus` because `ProcessError` needs /// to be serializable (for the Rustc cache), and `ExitStatus` does not /// provide a constructor. pub struct RawOutput { pub code: Option, pub stdout: Vec, pub stderr: Vec, } #[must_use] #[derive(Clone)] pub struct Execs { ran: bool, process_builder: Option, expect_stdout: Option, expect_stdin: Option, expect_stderr: Option, expect_exit_code: Option, expect_stdout_data: Option, expect_stderr_data: Option, expect_stdout_contains: Vec, expect_stderr_contains: Vec, expect_stdout_contains_n: Vec<(String, usize)>, expect_stdout_not_contains: Vec, expect_stderr_not_contains: Vec, expect_stdout_unordered: Vec, expect_stderr_unordered: Vec, expect_stderr_with_without: Vec<(Vec, Vec)>, expect_json: Option, expect_json_contains_unordered: Option, stream_output: bool, assert: snapbox::Assert, } impl Execs { pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs { self.process_builder = Some(p); self } /// Verifies that stdout is equal to the given lines. /// See [`compare`] for supported patterns. #[deprecated(note = "replaced with `Execs::with_stdout_data(expected)`")] pub fn with_stdout(&mut self, expected: S) -> &mut Self { self.expect_stdout = Some(expected.to_string()); self } /// Verifies that stderr is equal to the given lines. /// See [`compare`] for supported patterns. #[deprecated(note = "replaced with `Execs::with_stderr_data(expected)`")] pub fn with_stderr(&mut self, expected: S) -> &mut Self { self.expect_stderr = Some(expected.to_string()); self } /// Verifies that stdout is equal to the given lines. /// /// See [`compare::assert_e2e`] for assertion details. pub fn with_stdout_data(&mut self, expected: impl snapbox::IntoData) -> &mut Self { self.expect_stdout_data = Some(expected.into_data()); self } /// Verifies that stderr is equal to the given lines. /// /// See [`compare::assert_e2e`] for assertion details. pub fn with_stderr_data(&mut self, expected: impl snapbox::IntoData) -> &mut Self { self.expect_stderr_data = Some(expected.into_data()); self } /// Writes the given lines to stdin. pub fn with_stdin(&mut self, expected: S) -> &mut Self { self.expect_stdin = Some(expected.to_string()); self } /// Verifies the exit code from the process. /// /// This is not necessary if the expected exit code is `0`. pub fn with_status(&mut self, expected: i32) -> &mut Self { self.expect_exit_code = Some(expected); self } /// Removes exit code check for the process. /// /// By default, the expected exit code is `0`. pub fn without_status(&mut self) -> &mut Self { self.expect_exit_code = None; self } /// Verifies that stdout contains the given contiguous lines somewhere in /// its output. /// /// See [`compare`] for supported patterns. #[deprecated(note = "replaced with `Execs::with_stdout_data(expected)`")] pub fn with_stdout_contains(&mut self, expected: S) -> &mut Self { self.expect_stdout_contains.push(expected.to_string()); self } /// Verifies that stderr contains the given contiguous lines somewhere in /// its output. /// /// See [`compare`] for supported patterns. #[deprecated(note = "replaced with `Execs::with_stderr_data(expected)`")] pub fn with_stderr_contains(&mut self, expected: S) -> &mut Self { self.expect_stderr_contains.push(expected.to_string()); self } /// Verifies that stdout contains the given contiguous lines somewhere in /// its output, and should be repeated `number` times. /// /// See [`compare`] for supported patterns. #[deprecated(note = "replaced with `Execs::with_stdout_data(expected)`")] pub fn with_stdout_contains_n(&mut self, expected: S, number: usize) -> &mut Self { self.expect_stdout_contains_n .push((expected.to_string(), number)); self } /// Verifies that stdout does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// /// See note on [`Self::with_stderr_does_not_contain`]. #[deprecated] pub fn with_stdout_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stdout_not_contains.push(expected.to_string()); self } /// Verifies that stderr does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// /// Care should be taken when using this method because there is a /// limitless number of possible things that *won't* appear. A typo means /// your test will pass without verifying the correct behavior. If /// possible, write the test first so that it fails, and then implement /// your fix/feature to make it pass. #[deprecated] pub fn with_stderr_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stderr_not_contains.push(expected.to_string()); self } /// Verifies that all of the stdout output is equal to the given lines, /// ignoring the order of the lines. /// /// See [`Execs::with_stderr_unordered`] for more details. #[deprecated(note = "replaced with `Execs::with_stdout_data(expected.unordered())`")] pub fn with_stdout_unordered(&mut self, expected: S) -> &mut Self { self.expect_stdout_unordered.push(expected.to_string()); self } /// Verifies that all of the stderr output is equal to the given lines, /// ignoring the order of the lines. /// /// See [`compare`] for supported patterns. /// /// This is useful when checking the output of `cargo build -v` since /// the order of the output is not always deterministic. /// Recommend use `with_stderr_contains` instead unless you really want to /// check *every* line of output. /// /// Be careful when using patterns such as `[..]`, because you may end up /// with multiple lines that might match, and this is not smart enough to /// do anything like longest-match. For example, avoid something like: /// /// ```text /// [RUNNING] `rustc [..] /// [RUNNING] `rustc --crate-name foo [..] /// ``` /// /// This will randomly fail if the other crate name is `bar`, and the /// order changes. #[deprecated(note = "replaced with `Execs::with_stderr_data(expected.unordered())`")] pub fn with_stderr_unordered(&mut self, expected: S) -> &mut Self { self.expect_stderr_unordered.push(expected.to_string()); self } /// Verify that a particular line appears in stderr with and without the /// given substrings. Exactly one line must match. /// /// The substrings are matched as `contains`. Example: /// /// ```no_run /// use cargo_test_support::execs; /// /// execs().with_stderr_line_without( /// &[ /// "[RUNNING] `rustc --crate-name build_script_build", /// "-C opt-level=3", /// ], /// &["-C debuginfo", "-C incremental"], /// ); /// ``` /// /// This will check that a build line includes `-C opt-level=3` but does /// not contain `-C debuginfo` or `-C incremental`. /// /// Be careful writing the `without` fragments, see note in /// `with_stderr_does_not_contain`. #[deprecated] pub fn with_stderr_line_without( &mut self, with: &[S], without: &[S], ) -> &mut Self { let with = with.iter().map(|s| s.to_string()).collect(); let without = without.iter().map(|s| s.to_string()).collect(); self.expect_stderr_with_without.push((with, without)); self } /// Verifies the JSON output matches the given JSON. /// /// This is typically used when testing cargo commands that emit JSON. /// Each separate JSON object should be separated by a blank line. /// Example: /// /// ```rust,ignore /// assert_that( /// p.cargo("metadata"), /// execs().with_json(r#" /// {"example": "abc"} /// /// {"example": "def"} /// "#) /// ); /// ``` /// /// - Objects should match in the order given. /// - The order of arrays is ignored. /// - Strings support patterns described in [`compare`]. /// - Use `"{...}"` to match any object. #[deprecated(note = "replaced with `Execs::with_stdout_data(expected.json_lines())`")] pub fn with_json(&mut self, expected: &str) -> &mut Self { self.expect_json = Some(expected.to_string()); self } /// Verifies JSON output contains the given objects (in any order) somewhere /// in its output. /// /// CAUTION: Be very careful when using this. Make sure every object is /// unique (not a subset of one another). Also avoid using objects that /// could possibly match multiple output lines unless you're very sure of /// what you are doing. /// /// See `with_json` for more detail. #[deprecated] pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self { match &mut self.expect_json_contains_unordered { None => self.expect_json_contains_unordered = Some(expected.to_string()), Some(e) => { e.push_str("\n\n"); e.push_str(expected); } } self } /// Forward subordinate process stdout/stderr to the terminal. /// Useful for printf debugging of the tests. /// CAUTION: CI will fail if you leave this in your test! #[allow(unused)] pub fn stream(&mut self) -> &mut Self { self.stream_output = true; self } pub fn arg>(&mut self, arg: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.arg(arg); } self } pub fn args>(&mut self, args: &[T]) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.args(args); } self } pub fn cwd>(&mut self, path: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { if let Some(cwd) = p.get_cwd() { let new_path = cwd.join(path.as_ref()); p.cwd(new_path); } else { p.cwd(path); } } self } fn get_cwd(&self) -> Option<&Path> { self.process_builder.as_ref().and_then(|p| p.get_cwd()) } pub fn env>(&mut self, key: &str, val: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env(key, val); } self } pub fn env_remove(&mut self, key: &str) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env_remove(key); } self } pub fn exec_with_output(&mut self) -> Result { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); p.exec_with_output() } pub fn build_command(&mut self) -> Command { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); p.build_command() } /// Enables nightly features for testing /// /// The list of reasons should be why nightly cargo is needed. If it is /// because of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]` pub fn masquerade_as_nightly_cargo(&mut self, reasons: &[&str]) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.masquerade_as_nightly_cargo(reasons); } self } /// Overrides the crates.io URL for testing. /// /// Can be used for testing crates-io functionality where alt registries /// cannot be used. pub fn replace_crates_io(&mut self, url: &Url) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS", url.as_str()); } self } pub fn overlay_registry(&mut self, url: &Url, path: &str) -> &mut Self { if let Some(ref mut p) = self.process_builder { let env_value = format!("{}={}", url, path); p.env( "__CARGO_TEST_DEPENDENCY_CONFUSION_VULNERABILITY_DO_NOT_USE_THIS", env_value, ); } self } pub fn enable_split_debuginfo_packed(&mut self) -> &mut Self { self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "packed"); self } pub fn enable_mac_dsym(&mut self) -> &mut Self { if cfg!(target_os = "macos") { return self.enable_split_debuginfo_packed(); } self } #[track_caller] pub fn run(&mut self) { self.ran = true; let mut p = (&self.process_builder).clone().unwrap(); if let Some(stdin) = self.expect_stdin.take() { p.stdin(stdin); } if let Err(e) = self.match_process(&p) { panic_error(&format!("test failed running {}", p), e); } } /// Runs the process, checks the expected output, and returns the first /// JSON object on stdout. #[track_caller] pub fn run_json(&mut self) -> serde_json::Value { self.ran = true; let p = (&self.process_builder).clone().unwrap(); match self.match_process(&p) { Err(e) => panic_error(&format!("test failed running {}", p), e), Ok(output) => serde_json::from_slice(&output.stdout).unwrap_or_else(|e| { panic!( "\nfailed to parse JSON: {}\n\ output was:\n{}\n", e, String::from_utf8_lossy(&output.stdout) ); }), } } #[track_caller] pub fn run_output(&mut self, output: &Output) { self.ran = true; if let Err(e) = self.match_output(output.status.code(), &output.stdout, &output.stderr) { panic_error("process did not return the expected result", e) } } #[track_caller] fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) { if self.expect_exit_code.unwrap_or(0) != 0 && self.expect_stdout.is_none() && self.expect_stdin.is_none() && self.expect_stderr.is_none() && self.expect_stdout_data.is_none() && self.expect_stderr_data.is_none() && self.expect_stdout_contains.is_empty() && self.expect_stderr_contains.is_empty() && self.expect_stdout_contains_n.is_empty() && self.expect_stdout_not_contains.is_empty() && self.expect_stderr_not_contains.is_empty() && self.expect_stdout_unordered.is_empty() && self.expect_stderr_unordered.is_empty() && self.expect_stderr_with_without.is_empty() && self.expect_json.is_none() && self.expect_json_contains_unordered.is_none() { panic!( "`with_status()` is used, but no output is checked.\n\ The test must check the output to ensure the correct error is triggered.\n\ --- stdout\n{}\n--- stderr\n{}", String::from_utf8_lossy(stdout), String::from_utf8_lossy(stderr), ); } } #[track_caller] fn match_process(&self, process: &ProcessBuilder) -> Result { println!("running {}", process); let res = if self.stream_output { if is_ci() { panic!("`.stream()` is for local debugging") } process.exec_with_streaming( &mut |out| { println!("{}", out); Ok(()) }, &mut |err| { eprintln!("{}", err); Ok(()) }, true, ) } else { process.exec_with_output() }; match res { Ok(out) => { self.match_output(out.status.code(), &out.stdout, &out.stderr)?; return Ok(RawOutput { stdout: out.stdout, stderr: out.stderr, code: out.status.code(), }); } Err(e) => { if let Some(ProcessError { stdout: Some(stdout), stderr: Some(stderr), code, .. }) = e.downcast_ref::() { self.match_output(*code, stdout, stderr)?; return Ok(RawOutput { stdout: stdout.to_vec(), stderr: stderr.to_vec(), code: *code, }); } bail!("could not exec process {}: {:?}", process, e) } } } #[track_caller] fn match_output(&self, code: Option, stdout: &[u8], stderr: &[u8]) -> Result<()> { self.verify_checks_output(stdout, stderr); let stdout = std::str::from_utf8(stdout).expect("stdout is not utf8"); let stderr = std::str::from_utf8(stderr).expect("stderr is not utf8"); let cwd = self.get_cwd(); match self.expect_exit_code { None => {} Some(expected) if code == Some(expected) => {} Some(expected) => bail!( "process exited with code {} (expected {})\n--- stdout\n{}\n--- stderr\n{}", code.unwrap_or(-1), expected, stdout, stderr ), } if let Some(expect_stdout) = &self.expect_stdout { compare::match_exact(expect_stdout, stdout, "stdout", stderr, cwd)?; } if let Some(expect_stderr) = &self.expect_stderr { compare::match_exact(expect_stderr, stderr, "stderr", stdout, cwd)?; } if let Some(expect_stdout_data) = &self.expect_stdout_data { if let Err(err) = self.assert.try_eq( Some(&"stdout"), stdout.into_data(), expect_stdout_data.clone(), ) { panic!("{err}") } } if let Some(expect_stderr_data) = &self.expect_stderr_data { if let Err(err) = self.assert.try_eq( Some(&"stderr"), stderr.into_data(), expect_stderr_data.clone(), ) { panic!("{err}") } } for expect in self.expect_stdout_contains.iter() { compare::match_contains(expect, stdout, cwd)?; } for expect in self.expect_stderr_contains.iter() { compare::match_contains(expect, stderr, cwd)?; } for &(ref expect, number) in self.expect_stdout_contains_n.iter() { compare::match_contains_n(expect, number, stdout, cwd)?; } for expect in self.expect_stdout_not_contains.iter() { compare::match_does_not_contain(expect, stdout, cwd)?; } for expect in self.expect_stderr_not_contains.iter() { compare::match_does_not_contain(expect, stderr, cwd)?; } for expect in self.expect_stdout_unordered.iter() { compare::match_unordered(expect, stdout, cwd)?; } for expect in self.expect_stderr_unordered.iter() { compare::match_unordered(expect, stderr, cwd)?; } for (with, without) in self.expect_stderr_with_without.iter() { compare::match_with_without(stderr, with, without, cwd)?; } if let Some(ref expect_json) = self.expect_json { compare::match_json(expect_json, stdout, cwd)?; } if let Some(ref expected) = self.expect_json_contains_unordered { compare::match_json_contains_unordered(expected, stdout, cwd)?; } Ok(()) } } impl Drop for Execs { fn drop(&mut self) { if !self.ran && !std::thread::panicking() { panic!("forgot to run this command"); } } } pub fn execs() -> Execs { Execs { ran: false, process_builder: None, expect_stdout: None, expect_stderr: None, expect_stdin: None, expect_exit_code: Some(0), expect_stdout_data: None, expect_stderr_data: None, expect_stdout_contains: Vec::new(), expect_stderr_contains: Vec::new(), expect_stdout_contains_n: Vec::new(), expect_stdout_not_contains: Vec::new(), expect_stderr_not_contains: Vec::new(), expect_stdout_unordered: Vec::new(), expect_stderr_unordered: Vec::new(), expect_stderr_with_without: Vec::new(), expect_json: None, expect_json_contains_unordered: None, stream_output: false, assert: compare::assert_e2e(), } } pub fn basic_manifest(name: &str, version: &str) -> String { format!( r#" [package] name = "{}" version = "{}" authors = [] edition = "2015" "#, name, version ) } pub fn basic_bin_manifest(name: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] edition = "2015" [[bin]] name = "{}" "#, name, name ) } pub fn basic_lib_manifest(name: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] edition = "2015" [lib] name = "{}" "#, name, name ) } pub fn path2url>(p: P) -> Url { Url::from_file_path(p).ok().unwrap() } struct RustcInfo { verbose_version: String, host: String, } impl RustcInfo { fn new() -> RustcInfo { let output = ProcessBuilder::new("rustc") .arg("-vV") .exec_with_output() .expect("rustc should exec"); let verbose_version = String::from_utf8(output.stdout).expect("utf8 output"); let host = verbose_version .lines() .filter_map(|line| line.strip_prefix("host: ")) .next() .expect("verbose version has host: field") .to_string(); RustcInfo { verbose_version, host, } } } fn rustc_info() -> &'static RustcInfo { static RUSTC_INFO: OnceLock = OnceLock::new(); RUSTC_INFO.get_or_init(RustcInfo::new) } /// The rustc host such as `x86_64-unknown-linux-gnu`. pub fn rustc_host() -> &'static str { &rustc_info().host } /// The host triple suitable for use in a cargo environment variable (uppercased). pub fn rustc_host_env() -> String { rustc_host().to_uppercase().replace('-', "_") } pub fn is_nightly() -> bool { let vv = &rustc_info().verbose_version; // CARGO_TEST_DISABLE_NIGHTLY is set in rust-lang/rust's CI so that all // nightly-only tests are disabled there. Otherwise, it could make it // difficult to land changes which would need to be made simultaneously in // rust-lang/cargo and rust-lan/rust, which isn't possible. env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err() && (vv.contains("-nightly") || vv.contains("-dev")) } pub fn process>(t: T) -> ProcessBuilder { _process(t.as_ref()) } fn _process(t: &OsStr) -> ProcessBuilder { let mut p = ProcessBuilder::new(t); p.cwd(&paths::root()).test_env(); p } /// Enable nightly features for testing pub trait ChannelChanger { /// The list of reasons should be why nightly cargo is needed. If it is /// because of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]`. fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self; } impl ChannelChanger for &mut ProcessBuilder { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } impl ChannelChanger for snapbox::cmd::Command { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } /// Establish a process's test environment pub trait TestEnv: Sized { fn test_env(mut self) -> Self { // In general just clear out all cargo-specific configuration already in the // environment. Our tests all assume a "default configuration" unless // specified otherwise. for (k, _v) in env::vars() { if k.starts_with("CARGO_") { self = self.env_remove(&k); } } if env::var_os("RUSTUP_TOOLCHAIN").is_some() { // Override the PATH to avoid executing the rustup wrapper thousands // of times. This makes the testsuite run substantially faster. static RUSTC_DIR: OnceLock = OnceLock::new(); let rustc_dir = RUSTC_DIR.get_or_init(|| { match ProcessBuilder::new("rustup") .args(&["which", "rustc"]) .exec_with_output() { Ok(output) => { let s = std::str::from_utf8(&output.stdout).expect("utf8").trim(); let mut p = PathBuf::from(s); p.pop(); p } Err(e) => { panic!("RUSTUP_TOOLCHAIN was set, but could not run rustup: {}", e); } } }); let path = env::var_os("PATH").unwrap_or_default(); let paths = env::split_paths(&path); let new_path = env::join_paths(std::iter::once(rustc_dir.clone()).chain(paths)).unwrap(); self = self.env("PATH", new_path); } self = self .current_dir(&paths::root()) .env("HOME", paths::home()) .env("CARGO_HOME", paths::home().join(".cargo")) .env("__CARGO_TEST_ROOT", paths::global_root()) // Force Cargo to think it's on the stable channel for all tests, this // should hopefully not surprise us as we add cargo features over time and // cargo rides the trains. .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable") // Keeps cargo within its sandbox. .env("__CARGO_TEST_DISABLE_GLOBAL_KNOWN_HOST", "1") // Set retry sleep to 1 millisecond. .env("__CARGO_TEST_FIXED_RETRY_SLEEP_MS", "1") // Incremental generates a huge amount of data per test, which we // don't particularly need. Tests that specifically need to check // the incremental behavior should turn this back on. .env("CARGO_INCREMENTAL", "0") // Don't read the system git config which is out of our control. .env("GIT_CONFIG_NOSYSTEM", "1") .env_remove("__CARGO_DEFAULT_LIB_METADATA") .env_remove("ALL_PROXY") .env_remove("EMAIL") .env_remove("GIT_AUTHOR_EMAIL") .env_remove("GIT_AUTHOR_NAME") .env_remove("GIT_COMMITTER_EMAIL") .env_remove("GIT_COMMITTER_NAME") .env_remove("http_proxy") .env_remove("HTTPS_PROXY") .env_remove("https_proxy") .env_remove("MAKEFLAGS") .env_remove("MFLAGS") .env_remove("MSYSTEM") // assume cmd.exe everywhere on windows .env_remove("RUSTC") .env_remove("RUSTC_WORKSPACE_WRAPPER") .env_remove("RUSTC_WRAPPER") .env_remove("RUSTDOC") .env_remove("RUSTDOCFLAGS") .env_remove("RUSTFLAGS") .env_remove("SSH_AUTH_SOCK") // ensure an outer agent is never contacted .env_remove("USER") // not set on some rust-lang docker images .env_remove("XDG_CONFIG_HOME") // see #2345 .env_remove("OUT_DIR"); // see #13204 if cfg!(windows) { self = self.env("USERPROFILE", paths::home()); } self } fn current_dir>(self, path: S) -> Self; fn env>(self, key: &str, value: S) -> Self; fn env_remove(self, key: &str) -> Self; } impl TestEnv for &mut ProcessBuilder { fn current_dir>(self, path: S) -> Self { let path = path.as_ref(); self.cwd(path) } fn env>(self, key: &str, value: S) -> Self { self.env(key, value) } fn env_remove(self, key: &str) -> Self { self.env_remove(key) } } impl TestEnv for snapbox::cmd::Command { fn current_dir>(self, path: S) -> Self { self.current_dir(path) } fn env>(self, key: &str, value: S) -> Self { self.env(key, value) } fn env_remove(self, key: &str) -> Self { self.env_remove(key) } } /// Test the cargo command pub trait CargoCommand { fn cargo_ui() -> Self; } impl CargoCommand for snapbox::cmd::Command { fn cargo_ui() -> Self { Self::new(cargo_exe()) .with_assert(compare::assert_ui()) .env("CARGO_TERM_COLOR", "always") .test_env() } } /// Add a list of arguments as a line pub trait ArgLine: Sized { fn arg_line(mut self, s: &str) -> Self { for mut arg in s.split_whitespace() { if (arg.starts_with('"') && arg.ends_with('"')) || (arg.starts_with('\'') && arg.ends_with('\'')) { arg = &arg[1..(arg.len() - 1).max(1)]; } else if arg.contains(&['"', '\''][..]) { panic!("shell-style argument parsing is not supported") } self = self.arg(arg); } self } fn arg>(self, s: S) -> Self; } impl ArgLine for &mut ProcessBuilder { fn arg>(self, s: S) -> Self { self.arg(s) } } impl ArgLine for snapbox::cmd::Command { fn arg>(self, s: S) -> Self { self.arg(s) } } pub fn cargo_process(s: &str) -> Execs { let cargo = cargo_exe(); let mut p = process(&cargo); p.env("CARGO", cargo); p.arg_line(s); execs().with_process_builder(p) } pub fn git_process(s: &str) -> ProcessBuilder { let mut p = process("git"); p.arg_line(s); p } pub fn sleep_ms(ms: u64) { ::std::thread::sleep(Duration::from_millis(ms)); } /// Returns `true` if the local filesystem has low-resolution mtimes. pub fn is_coarse_mtime() -> bool { // If the filetime crate is being used to emulate HFS then // return `true`, without looking at the actual hardware. cfg!(emulate_second_only_system) || // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS // filesystem, (or any filesystem with low-resolution mtimes). However, // that's tricky to detect, so for now just deal with CI. cfg!(target_os = "macos") && is_ci() } /// Some CI setups are much slower then the equipment used by Cargo itself. /// Architectures that do not have a modern processor, hardware emulation, etc. /// This provides a way for those setups to increase the cut off for all the time based test. pub fn slow_cpu_multiplier(main: u64) -> Duration { static SLOW_CPU_MULTIPLIER: OnceLock = OnceLock::new(); let slow_cpu_multiplier = SLOW_CPU_MULTIPLIER.get_or_init(|| { env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER") .ok() .and_then(|m| m.parse().ok()) .unwrap_or(1) }); Duration::from_secs(slow_cpu_multiplier * main) } #[cfg(windows)] pub fn symlink_supported() -> bool { if is_ci() { // We want to be absolutely sure this runs on CI. return true; } let src = paths::root().join("symlink_src"); fs::write(&src, "").unwrap(); let dst = paths::root().join("symlink_dst"); let result = match os::windows::fs::symlink_file(&src, &dst) { Ok(_) => { fs::remove_file(&dst).unwrap(); true } Err(e) => { eprintln!( "symlinks not supported: {:?}\n\ Windows 10 users should enable developer mode.", e ); false } }; fs::remove_file(&src).unwrap(); return result; } #[cfg(not(windows))] pub fn symlink_supported() -> bool { true } /// The error message for ENOENT. pub fn no_such_file_err_msg() -> String { std::io::Error::from_raw_os_error(2).to_string() } /// Helper to retry a function `n` times. /// /// The function should return `Some` when it is ready. pub fn retry(n: u32, mut f: F) -> R where F: FnMut() -> Option, { let mut count = 0; let start = std::time::Instant::now(); loop { if let Some(r) = f() { return r; } count += 1; if count > n { panic!( "test did not finish within {n} attempts ({:?} total)", start.elapsed() ); } sleep_ms(100); } } #[test] #[should_panic(expected = "test did not finish")] fn retry_fails() { retry(2, || None::<()>); } /// Helper that waits for a thread to finish, up to `n` tenths of a second. pub fn thread_wait_timeout(n: u32, thread: JoinHandle) -> T { retry(n, || thread.is_finished().then_some(())); thread.join().unwrap() } /// Helper that runs some function, and waits up to `n` tenths of a second for /// it to finish. pub fn threaded_timeout(n: u32, f: F) -> R where F: FnOnce() -> R + Send + 'static, R: Send + 'static, { let thread = std::thread::spawn(|| f()); thread_wait_timeout(n, thread) } cargo-test-support-0.3.0/src/paths.rs000064400000000000000000000242161046102023000156670ustar 00000000000000use filetime::FileTime; use std::cell::RefCell; use std::env; use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; use std::sync::OnceLock; static CARGO_INTEGRATION_TEST_DIR: &str = "cit"; static GLOBAL_ROOT: OnceLock>> = OnceLock::new(); /// This is used when running cargo is pre-CARGO_TARGET_TMPDIR /// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough. fn global_root_legacy() -> PathBuf { let mut path = t!(env::current_exe()); path.pop(); // chop off exe name path.pop(); // chop off "deps" path.push("tmp"); path.mkdir_p(); path } fn set_global_root(tmp_dir: Option<&'static str>) { let mut lock = GLOBAL_ROOT .get_or_init(|| Default::default()) .lock() .unwrap(); if lock.is_none() { let mut root = match tmp_dir { Some(tmp_dir) => PathBuf::from(tmp_dir), None => global_root_legacy(), }; root.push(CARGO_INTEGRATION_TEST_DIR); *lock = Some(root); } } pub fn global_root() -> PathBuf { let lock = GLOBAL_ROOT .get_or_init(|| Default::default()) .lock() .unwrap(); match lock.as_ref() { Some(p) => p.clone(), None => unreachable!("GLOBAL_ROOT not set yet"), } } // We need to give each test a unique id. The test name could serve this // purpose, but the `test` crate doesn't have a way to obtain the current test // name.[*] Instead, we used the `cargo-test-macro` crate to automatically // insert an init function for each test that sets the test name in a thread // local variable. // // [*] It does set the thread name, but only when running concurrently. If not // running concurrently, all tests are run on the main thread. thread_local! { static TEST_ID: RefCell> = RefCell::new(None); } pub struct TestIdGuard { _private: (), } pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); TEST_ID.with(|n| *n.borrow_mut() = Some(id)); let guard = TestIdGuard { _private: () }; set_global_root(tmp_dir); let r = root(); r.rm_rf(); r.mkdir_p(); guard } impl Drop for TestIdGuard { fn drop(&mut self) { TEST_ID.with(|n| *n.borrow_mut() = None); } } pub fn root() -> PathBuf { let id = TEST_ID.with(|n| { n.borrow().expect( "Tests must use the `#[cargo_test]` attribute in \ order to be able to use the crate root.", ) }); let mut root = global_root(); root.push(&format!("t{}", id)); root } pub fn home() -> PathBuf { let mut path = root(); path.push("home"); path.mkdir_p(); path } pub trait CargoPathExt { fn rm_rf(&self); fn mkdir_p(&self); /// Returns a list of all files and directories underneath the given /// directory, recursively, including the starting path. fn ls_r(&self) -> Vec; fn move_into_the_past(&self) { self.move_in_time(|sec, nsec| (sec - 3600, nsec)) } fn move_into_the_future(&self) { self.move_in_time(|sec, nsec| (sec + 3600, nsec)) } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32); } impl CargoPathExt for Path { fn rm_rf(&self) { let meta = match self.symlink_metadata() { Ok(meta) => meta, Err(e) => { if e.kind() == ErrorKind::NotFound { return; } panic!("failed to remove {:?}, could not read: {:?}", self, e); } }; // There is a race condition between fetching the metadata and // actually performing the removal, but we don't care all that much // for our tests. if meta.is_dir() { if let Err(e) = fs::remove_dir_all(self) { panic!("failed to remove {:?}: {:?}", self, e) } } else if let Err(e) = fs::remove_file(self) { panic!("failed to remove {:?}: {:?}", self, e) } } fn mkdir_p(&self) { fs::create_dir_all(self) .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e)) } fn ls_r(&self) -> Vec { walkdir::WalkDir::new(self) .sort_by_file_name() .into_iter() .filter_map(|e| e.map(|e| e.path().to_owned()).ok()) .collect() } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32), { if self.is_file() { time_travel(self, &travel_amount); } else { recurse(self, &self.join("target"), &travel_amount); } fn recurse(p: &Path, bad: &Path, travel_amount: &F) where F: Fn(i64, u32) -> (i64, u32), { if p.is_file() { time_travel(p, travel_amount) } else if !p.starts_with(bad) { for f in t!(fs::read_dir(p)) { let f = t!(f).path(); recurse(&f, bad, travel_amount); } } } fn time_travel(path: &Path, travel_amount: &F) where F: Fn(i64, u32) -> (i64, u32), { let stat = t!(path.symlink_metadata()); let mtime = FileTime::from_last_modification_time(&stat); let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds()); let newtime = FileTime::from_unix_time(sec, nsec); // Sadly change_file_times has a failure mode where a readonly file // cannot have its times changed on windows. do_op(path, "set file times", |path| { filetime::set_file_times(path, newtime, newtime) }); } } } fn do_op(path: &Path, desc: &str, mut f: F) where F: FnMut(&Path) -> io::Result<()>, { match f(path) { Ok(()) => {} Err(ref e) if e.kind() == ErrorKind::PermissionDenied => { let mut p = t!(path.metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(path, p)); // Unix also requires the parent to not be readonly for example when // removing files let parent = path.parent().unwrap(); let mut p = t!(parent.metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(parent, p)); f(path).unwrap_or_else(|e| { panic!("failed to {} {}: {}", desc, path.display(), e); }) } Err(e) => { panic!("failed to {} {}: {}", desc, path.display(), e); } } } /// Get the filename for a library. /// /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" /// /// For example, dynamic library named "foo" would return: /// - macOS: "libfoo.dylib" /// - Windows: "foo.dll" /// - Unix: "libfoo.so" pub fn get_lib_filename(name: &str, kind: &str) -> String { let prefix = get_lib_prefix(kind); let extension = get_lib_extension(kind); format!("{}{}.{}", prefix, name, extension) } pub fn get_lib_prefix(kind: &str) -> &str { match kind { "lib" | "rlib" => "lib", "staticlib" | "dylib" | "proc-macro" => { if cfg!(windows) { "" } else { "lib" } } _ => unreachable!(), } } pub fn get_lib_extension(kind: &str) -> &str { match kind { "lib" | "rlib" => "rlib", "staticlib" => { if cfg!(windows) { "lib" } else { "a" } } "dylib" | "proc-macro" => { if cfg!(windows) { "dll" } else if cfg!(target_os = "macos") { "dylib" } else { "so" } } _ => unreachable!(), } } /// Returns the sysroot as queried from rustc. pub fn sysroot() -> String { let output = Command::new("rustc") .arg("--print=sysroot") .output() .expect("rustc to run"); assert!(output.status.success()); let sysroot = String::from_utf8(output.stdout).unwrap(); sysroot.trim().to_string() } /// Returns true if names such as aux.* are allowed. /// /// Traditionally, Windows did not allow a set of file names (see `is_windows_reserved` /// for a list). More recent versions of Windows have relaxed this restriction. This test /// determines whether we are running in a mode that allows Windows reserved names. #[cfg(windows)] pub fn windows_reserved_names_are_allowed() -> bool { use cargo_util::is_ci; // Ensure tests still run in CI until we need to migrate. if is_ci() { return false; } use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; use std::ptr; use windows_sys::Win32::Storage::FileSystem::GetFullPathNameW; let test_file_name: Vec<_> = OsStr::new("aux.rs").encode_wide().collect(); let buffer_length = unsafe { GetFullPathNameW(test_file_name.as_ptr(), 0, ptr::null_mut(), ptr::null_mut()) }; if buffer_length == 0 { // This means the call failed, so we'll conservatively assume reserved names are not allowed. return false; } let mut buffer = vec![0u16; buffer_length as usize]; let result = unsafe { GetFullPathNameW( test_file_name.as_ptr(), buffer_length, buffer.as_mut_ptr(), ptr::null_mut(), ) }; if result == 0 { // Once again, conservatively assume reserved names are not allowed if the // GetFullPathNameW call failed. return false; } // Under the old rules, a file name like aux.rs would get converted into \\.\aux, so // we detect this case by checking if the string starts with \\.\ // // Otherwise, the filename will be something like C:\Users\Foo\Documents\aux.rs let prefix: Vec<_> = OsStr::new("\\\\.\\").encode_wide().collect(); if buffer.starts_with(&prefix) { false } else { true } } cargo-test-support-0.3.0/src/publish.rs000064400000000000000000000176441046102023000162250ustar 00000000000000use crate::compare::{assert_match_exact, find_json_mismatch}; use crate::registry::{self, alt_api_path, FeatureMap}; use flate2::read::GzDecoder; use std::collections::{HashMap, HashSet}; use std::fs; use std::fs::File; use std::io::{self, prelude::*, SeekFrom}; use std::path::{Path, PathBuf}; use tar::Archive; fn read_le_u32(mut reader: R) -> io::Result where R: Read, { let mut buf = [0; 4]; reader.read_exact(&mut buf)?; Ok(u32::from_le_bytes(buf)) } /// Checks the result of a crate publish. pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, &[], ); } /// Checks the result of a crate publish, along with the contents of the files. pub fn validate_upload_with_contents( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, expected_contents, ); } /// Checks the result of a crate publish to an alternative registry. pub fn validate_alt_upload( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], ) { let new_path = alt_api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, &[], ); } fn _validate_upload( new_path: &Path, expected_json: &str, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let mut f = File::open(new_path).unwrap(); // 32-bit little-endian integer of length of JSON data. let json_sz = read_le_u32(&mut f).expect("read json length"); let mut json_bytes = vec![0; json_sz as usize]; f.read_exact(&mut json_bytes).expect("read JSON data"); let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) { panic!("{}", e); } // 32-bit little-endian integer of length of crate file. let crate_sz = read_le_u32(&mut f).expect("read crate length"); let mut krate_bytes = vec![0; crate_sz as usize]; f.read_exact(&mut krate_bytes).expect("read crate data"); // Check at end. let current = f.seek(SeekFrom::Current(0)).unwrap(); assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current); // Verify the tarball. validate_crate_contents( &krate_bytes[..], expected_crate_name, expected_files, expected_contents, ); } /// Checks the contents of a `.crate` file. /// /// - `expected_crate_name` should be something like `foo-0.0.1.crate`. /// - `expected_files` should be a complete list of files in the crate /// (relative to expected_crate_name). /// - `expected_contents` should be a list of `(file_name, contents)` tuples /// to validate the contents of the given file. Only the listed files will /// be checked (others will be ignored). pub fn validate_crate_contents( reader: impl Read, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let mut rdr = GzDecoder::new(reader); assert_eq!( rdr.header().unwrap().filename().unwrap(), expected_crate_name.as_bytes() ); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let mut ar = Archive::new(&contents[..]); let files: HashMap = ar .entries() .unwrap() .map(|entry| { let mut entry = entry.unwrap(); let name = entry.path().unwrap().into_owned(); let mut contents = String::new(); entry.read_to_string(&mut contents).unwrap(); (name, contents) }) .collect(); let base_crate_name = Path::new( expected_crate_name .strip_suffix(".crate") .expect("must end with .crate"), ); let actual_files: HashSet = files.keys().cloned().collect(); let expected_files: HashSet = expected_files .iter() .map(|name| base_crate_name.join(name)) .collect(); let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect(); let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect(); if !missing.is_empty() || !extra.is_empty() { panic!( "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n", missing, extra ); } if !expected_contents.is_empty() { for (e_file_name, e_file_contents) in expected_contents { let full_e_name = base_crate_name.join(e_file_name); let actual_contents = files .get(&full_e_name) .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name)); assert_match_exact(e_file_contents, actual_contents); } } } pub(crate) fn create_index_line( name: serde_json::Value, vers: &str, deps: Vec, cksum: &str, features: crate::registry::FeatureMap, yanked: bool, links: Option, rust_version: Option<&str>, v: Option, ) -> String { // This emulates what crates.io does to retain backwards compatibility. let (features, features2) = split_index_features(features.clone()); let mut json = serde_json::json!({ "name": name, "vers": vers, "deps": deps, "cksum": cksum, "features": features, "yanked": yanked, "links": links, }); if let Some(f2) = &features2 { json["features2"] = serde_json::json!(f2); json["v"] = serde_json::json!(2); } if let Some(v) = v { json["v"] = serde_json::json!(v); } if let Some(rust_version) = rust_version { json["rust_version"] = serde_json::json!(rust_version); } json.to_string() } pub(crate) fn write_to_index(registry_path: &Path, name: &str, line: String, local: bool) { let file = cargo_util::registry::make_dep_path(name, false); // Write file/line in the index. let dst = if local { registry_path.join("index").join(&file) } else { registry_path.join(&file) }; let prev = fs::read_to_string(&dst).unwrap_or_default(); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, prev + &line[..] + "\n")); // Add the new file to the index. if !local { let repo = t!(git2::Repository::open(®istry_path)); let mut index = t!(repo.index()); t!(index.add_path(Path::new(&file))); t!(index.write()); let id = t!(index.write_tree()); // Commit this change. let tree = t!(repo.find_tree(id)); let sig = t!(repo.signature()); let parent = t!(repo.refname_to_id("refs/heads/master")); let parent = t!(repo.find_commit(parent)); t!(repo.commit( Some("HEAD"), &sig, &sig, "Another commit", &tree, &[&parent] )); } } fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option) { let mut features2 = FeatureMap::new(); for (feat, values) in features.iter_mut() { if values .iter() .any(|value| value.starts_with("dep:") || value.contains("?/")) { let new_values = values.drain(..).collect(); features2.insert(feat.clone(), new_values); } } if features2.is_empty() { (features, None) } else { (features, Some(features2)) } } cargo-test-support-0.3.0/src/registry.rs000064400000000000000000001575631046102023000164340ustar 00000000000000use crate::git::repo; use crate::paths; use crate::publish::{create_index_line, write_to_index}; use cargo_util::paths::append; use cargo_util::Sha256; use flate2::write::GzEncoder; use flate2::Compression; use pasetors::keys::{AsymmetricPublicKey, AsymmetricSecretKey}; use pasetors::paserk::FormatAsPaserk; use pasetors::token::UntrustedToken; use std::collections::{BTreeMap, HashMap}; use std::fmt; use std::fs::{self, File}; use std::io::{BufRead, BufReader, Read, Write}; use std::net::{SocketAddr, TcpListener, TcpStream}; use std::path::{Path, PathBuf}; use std::thread::{self, JoinHandle}; use tar::{Builder, Header}; use time::format_description::well_known::Rfc3339; use time::{Duration, OffsetDateTime}; use url::Url; /// Gets the path to the local index pretending to be crates.io. This is a Git repo /// initialized with a `config.json` file pointing to `dl_path` for downloads /// and `api_path` for uploads. pub fn registry_path() -> PathBuf { generate_path("registry") } /// Gets the path for local web API uploads. Cargo will place the contents of a web API /// request here. For example, `api/v1/crates/new` is the result of publishing a crate. pub fn api_path() -> PathBuf { generate_path("api") } /// Gets the path where crates can be downloaded using the web API endpoint. Crates /// should be organized as `{name}/{version}/download` to match the web API /// endpoint. This is rarely used and must be manually set up. fn dl_path() -> PathBuf { generate_path("dl") } /// Gets the alternative-registry version of `registry_path`. fn alt_registry_path() -> PathBuf { generate_path("alternative-registry") } /// Gets the alternative-registry version of `registry_url`. fn alt_registry_url() -> Url { generate_url("alternative-registry") } /// Gets the alternative-registry version of `dl_path`. pub fn alt_dl_path() -> PathBuf { generate_path("alternative-dl") } /// Gets the alternative-registry version of `api_path`. pub fn alt_api_path() -> PathBuf { generate_path("alternative-api") } fn generate_path(name: &str) -> PathBuf { paths::root().join(name) } fn generate_url(name: &str) -> Url { Url::from_file_path(generate_path(name)).ok().unwrap() } #[derive(Clone)] pub enum Token { Plaintext(String), Keys(String, Option), } impl Token { /// This is a valid PASETO secret key. /// This one is already publicly available as part of the text of the RFC so is safe to use for tests. pub fn rfc_key() -> Token { Token::Keys( "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36" .to_string(), Some("sub".to_string()), ) } } type RequestCallback = Box Response>; /// A builder for initializing registries. pub struct RegistryBuilder { /// If set, configures an alternate registry with the given name. alternative: Option, /// The authorization token for the registry. token: Option, /// If set, the registry requires authorization for all operations. auth_required: bool, /// If set, serves the index over http. http_index: bool, /// If set, serves the API over http. http_api: bool, /// If set, config.json includes 'api' api: bool, /// Write the token in the configuration. configure_token: bool, /// Write the registry in configuration. configure_registry: bool, /// API responders. custom_responders: HashMap, /// Handler for 404 responses. not_found_handler: RequestCallback, /// If nonzero, the git index update to be delayed by the given number of seconds. delayed_index_update: usize, /// Credential provider in configuration credential_provider: Option, } pub struct TestRegistry { server: Option, index_url: Url, path: PathBuf, api_url: Url, dl_url: Url, token: Token, } impl TestRegistry { pub fn index_url(&self) -> &Url { &self.index_url } pub fn api_url(&self) -> &Url { &self.api_url } pub fn token(&self) -> &str { match &self.token { Token::Plaintext(s) => s, Token::Keys(_, _) => panic!("registry was not configured with a plaintext token"), } } pub fn key(&self) -> &str { match &self.token { Token::Plaintext(_) => panic!("registry was not configured with a secret key"), Token::Keys(s, _) => s, } } /// Shutdown the server thread and wait for it to stop. /// `Drop` automatically stops the server, but this additionally /// waits for the thread to stop. pub fn join(self) { if let Some(mut server) = self.server { server.stop(); let handle = server.handle.take().unwrap(); handle.join().unwrap(); } } } impl RegistryBuilder { #[must_use] pub fn new() -> RegistryBuilder { let not_found = |_req: &Request, _server: &HttpServer| -> Response { Response { code: 404, headers: vec![], body: b"not found".to_vec(), } }; RegistryBuilder { alternative: None, token: None, auth_required: false, http_api: false, http_index: false, api: true, configure_registry: true, configure_token: true, custom_responders: HashMap::new(), not_found_handler: Box::new(not_found), delayed_index_update: 0, credential_provider: None, } } /// Adds a custom HTTP response for a specific url #[must_use] pub fn add_responder Response>( mut self, url: impl Into, responder: R, ) -> Self { self.custom_responders .insert(url.into(), Box::new(responder)); self } #[must_use] pub fn not_found_handler Response>( mut self, responder: R, ) -> Self { self.not_found_handler = Box::new(responder); self } /// Configures the git index update to be delayed by the given number of seconds. #[must_use] pub fn delayed_index_update(mut self, delay: usize) -> Self { self.delayed_index_update = delay; self } /// Sets whether or not to initialize as an alternative registry. #[must_use] pub fn alternative_named(mut self, alt: &str) -> Self { self.alternative = Some(alt.to_string()); self } /// Sets whether or not to initialize as an alternative registry. #[must_use] pub fn alternative(self) -> Self { self.alternative_named("alternative") } /// Prevents placing a token in the configuration #[must_use] pub fn no_configure_token(mut self) -> Self { self.configure_token = false; self } /// Prevents adding the registry to the configuration. #[must_use] pub fn no_configure_registry(mut self) -> Self { self.configure_registry = false; self } /// Sets the token value #[must_use] pub fn token(mut self, token: Token) -> Self { self.token = Some(token); self } /// Sets this registry to require the authentication token for /// all operations. #[must_use] pub fn auth_required(mut self) -> Self { self.auth_required = true; self } /// Operate the index over http #[must_use] pub fn http_index(mut self) -> Self { self.http_index = true; self } /// Operate the api over http #[must_use] pub fn http_api(mut self) -> Self { self.http_api = true; self } /// The registry has no api. #[must_use] pub fn no_api(mut self) -> Self { self.api = false; self } /// The credential provider to configure for this registry. #[must_use] pub fn credential_provider(mut self, provider: &[&str]) -> Self { self.credential_provider = Some(format!("['{}']", provider.join("','"))); self } /// Initializes the registry. #[must_use] pub fn build(self) -> TestRegistry { let config_path = paths::home().join(".cargo/config.toml"); t!(fs::create_dir_all(config_path.parent().unwrap())); let prefix = if let Some(alternative) = &self.alternative { format!("{alternative}-") } else { String::new() }; let registry_path = generate_path(&format!("{prefix}registry")); let index_url = generate_url(&format!("{prefix}registry")); let api_url = generate_url(&format!("{prefix}api")); let dl_url = generate_url(&format!("{prefix}dl")); let dl_path = generate_path(&format!("{prefix}dl")); let api_path = generate_path(&format!("{prefix}api")); let token = self .token .unwrap_or_else(|| Token::Plaintext(format!("{prefix}sekrit"))); let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api { // No need to start the HTTP server. (None, index_url, api_url, dl_url) } else { let server = HttpServer::new( registry_path.clone(), dl_path, api_path.clone(), token.clone(), self.auth_required, self.custom_responders, self.not_found_handler, self.delayed_index_update, ); let index_url = if self.http_index { server.index_url() } else { index_url }; let api_url = if self.http_api { server.api_url() } else { api_url }; let dl_url = server.dl_url(); (Some(server), index_url, api_url, dl_url) }; let registry = TestRegistry { api_url, index_url, server, dl_url, path: registry_path, token, }; if self.configure_registry { if let Some(alternative) = &self.alternative { append( &config_path, format!( " [registries.{alternative}] index = '{}'", registry.index_url ) .as_bytes(), ) .unwrap(); if let Some(p) = &self.credential_provider { append( &config_path, &format!( " credential-provider = {p} " ) .as_bytes(), ) .unwrap() } } else { append( &config_path, format!( " [source.crates-io] replace-with = 'dummy-registry' [registries.dummy-registry] index = '{}'", registry.index_url ) .as_bytes(), ) .unwrap(); if let Some(p) = &self.credential_provider { append( &config_path, &format!( " [registry] credential-provider = {p} " ) .as_bytes(), ) .unwrap() } } } if self.configure_token { let credentials = paths::home().join(".cargo/credentials.toml"); match ®istry.token { Token::Plaintext(token) => { if let Some(alternative) = &self.alternative { append( &credentials, format!( r#" [registries.{alternative}] token = "{token}" "# ) .as_bytes(), ) .unwrap(); } else { append( &credentials, format!( r#" [registry] token = "{token}" "# ) .as_bytes(), ) .unwrap(); } } Token::Keys(key, subject) => { let mut out = if let Some(alternative) = &self.alternative { format!("\n[registries.{alternative}]\n") } else { format!("\n[registry]\n") }; out += &format!("secret-key = \"{key}\"\n"); if let Some(subject) = subject { out += &format!("secret-key-subject = \"{subject}\"\n"); } append(&credentials, out.as_bytes()).unwrap(); } } } let auth = if self.auth_required { r#","auth-required":true"# } else { "" }; let api = if self.api { format!(r#","api":"{}""#, registry.api_url) } else { String::new() }; // Initialize a new registry. repo(®istry.path) .file( "config.json", &format!(r#"{{"dl":"{}"{api}{auth}}}"#, registry.dl_url), ) .build(); fs::create_dir_all(api_path.join("api/v1/crates")).unwrap(); registry } } /// A builder for creating a new package in a registry. /// /// This uses "source replacement" using an automatically generated /// `.cargo/config` file to ensure that dependencies will use these packages /// instead of contacting crates.io. See `source-replacement.md` for more /// details on how source replacement works. /// /// Call `publish` to finalize and create the package. /// /// If no files are specified, an empty `lib.rs` file is automatically created. /// /// The `Cargo.toml` file is automatically generated based on the methods /// called on `Package` (for example, calling `dep()` will add to the /// `[dependencies]` automatically). You may also specify a `Cargo.toml` file /// to override the generated one. /// /// This supports different registry types: /// - Regular source replacement that replaces `crates.io` (the default). /// - A "local registry" which is a subset for vendoring (see /// `Package::local`). /// - An "alternative registry" which requires specifying the registry name /// (see `Package::alternative`). /// /// This does not support "directory sources". See `directory.rs` for /// `VendorPackage` which implements directory sources. /// /// # Example /// ```no_run /// use cargo_test_support::registry::Package; /// use cargo_test_support::project; /// /// // Publish package "a" depending on "b". /// Package::new("a", "1.0.0") /// .dep("b", "1.0.0") /// .file("src/lib.rs", r#" /// extern crate b; /// pub fn f() -> i32 { b::f() * 2 } /// "#) /// .publish(); /// /// // Publish package "b". /// Package::new("b", "1.0.0") /// .file("src/lib.rs", r#" /// pub fn f() -> i32 { 12 } /// "#) /// .publish(); /// /// // Create a project that uses package "a". /// let p = project() /// .file("Cargo.toml", r#" /// [package] /// name = "foo" /// version = "0.0.1" /// /// [dependencies] /// a = "1.0" /// "#) /// .file("src/main.rs", r#" /// extern crate a; /// fn main() { println!("{}", a::f()); } /// "#) /// .build(); /// /// p.cargo("run").with_stdout("24").run(); /// ``` #[must_use] pub struct Package { name: String, vers: String, deps: Vec, files: Vec, yanked: bool, features: FeatureMap, local: bool, alternative: bool, invalid_json: bool, proc_macro: bool, links: Option, rust_version: Option, cargo_features: Vec, v: Option, } pub(crate) type FeatureMap = BTreeMap>; #[derive(Clone)] pub struct Dependency { name: String, vers: String, kind: String, artifact: Option, bindep_target: Option, lib: bool, target: Option, features: Vec, registry: Option, package: Option, optional: bool, default_features: bool, public: bool, } /// Entry with data that corresponds to [`tar::EntryType`]. #[non_exhaustive] enum EntryData { Regular(String), Symlink(PathBuf), } /// A file to be created in a package. struct PackageFile { path: String, contents: EntryData, /// The Unix mode for the file. Note that when extracted on Windows, this /// is mostly ignored since it doesn't have the same style of permissions. mode: u32, /// If `true`, the file is created in the root of the tarfile, used for /// testing invalid packages. extra: bool, } const DEFAULT_MODE: u32 = 0o644; /// Initializes the on-disk registry and sets up the config so that crates.io /// is replaced with the one on disk. pub fn init() -> TestRegistry { RegistryBuilder::new().build() } /// Variant of `init` that initializes the "alternative" registry and crates.io /// replacement. pub fn alt_init() -> TestRegistry { init(); RegistryBuilder::new().alternative().build() } pub struct HttpServerHandle { addr: SocketAddr, handle: Option>, } impl HttpServerHandle { pub fn index_url(&self) -> Url { Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap() } pub fn api_url(&self) -> Url { Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap() } pub fn dl_url(&self) -> Url { Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap() } fn stop(&self) { if let Ok(mut stream) = TcpStream::connect(self.addr) { // shutdown the server let _ = stream.write_all(b"stop"); let _ = stream.flush(); } } } impl Drop for HttpServerHandle { fn drop(&mut self) { self.stop(); } } /// Request to the test http server #[derive(Clone)] pub struct Request { pub url: Url, pub method: String, pub body: Option>, pub authorization: Option, pub if_modified_since: Option, pub if_none_match: Option, } impl fmt::Debug for Request { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // body is not included as it can produce long debug outputs f.debug_struct("Request") .field("url", &self.url) .field("method", &self.method) .field("authorization", &self.authorization) .field("if_modified_since", &self.if_modified_since) .field("if_none_match", &self.if_none_match) .finish() } } /// Response from the test http server pub struct Response { pub code: u32, pub headers: Vec, pub body: Vec, } pub struct HttpServer { listener: TcpListener, registry_path: PathBuf, dl_path: PathBuf, api_path: PathBuf, addr: SocketAddr, token: Token, auth_required: bool, custom_responders: HashMap, not_found_handler: RequestCallback, delayed_index_update: usize, } /// A helper struct that collects the arguments for [`HttpServer::check_authorized`]. /// Based on looking at the request, these are the fields that the authentication header should attest to. struct Mutation<'a> { mutation: &'a str, name: Option<&'a str>, vers: Option<&'a str>, cksum: Option<&'a str>, } impl HttpServer { pub fn new( registry_path: PathBuf, dl_path: PathBuf, api_path: PathBuf, token: Token, auth_required: bool, custom_responders: HashMap, not_found_handler: RequestCallback, delayed_index_update: usize, ) -> HttpServerHandle { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let server = HttpServer { listener, registry_path, dl_path, api_path, addr, token, auth_required, custom_responders, not_found_handler, delayed_index_update, }; let handle = Some(thread::spawn(move || server.start())); HttpServerHandle { addr, handle } } fn start(&self) { let mut line = String::new(); 'server: loop { let (socket, _) = self.listener.accept().unwrap(); let mut buf = BufReader::new(socket); line.clear(); if buf.read_line(&mut line).unwrap() == 0 { // Connection terminated. continue; } // Read the "GET path HTTP/1.1" line. let mut parts = line.split_ascii_whitespace(); let method = parts.next().unwrap().to_ascii_lowercase(); if method == "stop" { // Shutdown the server. return; } let addr = self.listener.local_addr().unwrap(); let url = format!( "http://{}/{}", addr, parts.next().unwrap().trim_start_matches('/') ); let url = Url::parse(&url).unwrap(); // Grab headers we care about. let mut if_modified_since = None; let mut if_none_match = None; let mut authorization = None; let mut content_len = None; loop { line.clear(); if buf.read_line(&mut line).unwrap() == 0 { continue 'server; } if line == "\r\n" { // End of headers. line.clear(); break; } let (name, value) = line.split_once(':').unwrap(); let name = name.trim().to_ascii_lowercase(); let value = value.trim().to_string(); match name.as_str() { "if-modified-since" => if_modified_since = Some(value), "if-none-match" => if_none_match = Some(value), "authorization" => authorization = Some(value), "content-length" => content_len = Some(value), _ => {} } } let mut body = None; if let Some(con_len) = content_len { let len = con_len.parse::().unwrap(); let mut content = vec![0u8; len as usize]; buf.read_exact(&mut content).unwrap(); body = Some(content) } let req = Request { authorization, if_modified_since, if_none_match, method, url, body, }; println!("req: {:#?}", req); let response = self.route(&req); let buf = buf.get_mut(); write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap(); write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap(); write!(buf, "Connection: close\r\n").unwrap(); for header in response.headers { write!(buf, "{}\r\n", header).unwrap(); } write!(buf, "\r\n").unwrap(); buf.write_all(&response.body).unwrap(); buf.flush().unwrap(); } } fn check_authorized(&self, req: &Request, mutation: Option>) -> bool { let (private_key, private_key_subject) = if mutation.is_some() || self.auth_required { match &self.token { Token::Plaintext(token) => return Some(token) == req.authorization.as_ref(), Token::Keys(private_key, private_key_subject) => { (private_key.as_str(), private_key_subject) } } } else { assert!(req.authorization.is_none(), "unexpected token"); return true; }; macro_rules! t { ($e:expr) => { match $e { Some(e) => e, None => return false, } }; } let secret: AsymmetricSecretKey = private_key.try_into().unwrap(); let public: AsymmetricPublicKey = (&secret).try_into().unwrap(); let pub_key_id: pasetors::paserk::Id = (&public).into(); let mut paserk_pub_key_id = String::new(); FormatAsPaserk::fmt(&pub_key_id, &mut paserk_pub_key_id).unwrap(); // https://github.com/rust-lang/rfcs/blob/master/text/3231-cargo-asymmetric-tokens.md#how-the-registry-server-will-validate-an-asymmetric-token // - The PASETO is in v3.public format. let authorization = t!(&req.authorization); let untrusted_token = t!( UntrustedToken::::try_from(authorization) .ok() ); // - The PASETO validates using the public key it looked up based on the key ID. #[derive(serde::Deserialize, Debug)] struct Footer<'a> { url: &'a str, kip: &'a str, } let footer: Footer<'_> = t!(serde_json::from_slice(untrusted_token.untrusted_footer()).ok()); if footer.kip != paserk_pub_key_id { return false; } let trusted_token = t!( pasetors::version3::PublicToken::verify(&public, &untrusted_token, None, None,) .ok() ); // - The URL matches the registry base URL if footer.url != "https://github.com/rust-lang/crates.io-index" && footer.url != &format!("sparse+http://{}/index/", self.addr.to_string()) { return false; } // - The PASETO is still within its valid time period. #[derive(serde::Deserialize)] struct Message<'a> { iat: &'a str, sub: Option<&'a str>, mutation: Option<&'a str>, name: Option<&'a str>, vers: Option<&'a str>, cksum: Option<&'a str>, _challenge: Option<&'a str>, // todo: PASETO with challenges v: Option, } let message: Message<'_> = t!(serde_json::from_str(trusted_token.payload()).ok()); let token_time = t!(OffsetDateTime::parse(message.iat, &Rfc3339).ok()); let now = OffsetDateTime::now_utc(); if (now - token_time) > Duration::MINUTE { return false; } if private_key_subject.as_deref() != message.sub { return false; } // - If the claim v is set, that it has the value of 1. if let Some(v) = message.v { if v != 1 { return false; } } // - If the server issues challenges, that the challenge has not yet been answered. // todo: PASETO with challenges // - If the operation is a mutation: if let Some(mutation) = mutation { // - That the operation matches the mutation field and is one of publish, yank, or unyank. if message.mutation != Some(mutation.mutation) { return false; } // - That the package, and version match the request. if message.name != mutation.name { return false; } if message.vers != mutation.vers { return false; } // - If the mutation is publish, that the version has not already been published, and that the hash matches the request. if mutation.mutation == "publish" { if message.cksum != mutation.cksum { return false; } } } else { // - If the operation is a read, that the mutation field is not set. if message.mutation.is_some() || message.name.is_some() || message.vers.is_some() || message.cksum.is_some() { return false; } } true } /// Route the request fn route(&self, req: &Request) -> Response { // Check for custom responder if let Some(responder) = self.custom_responders.get(req.url.path()) { return responder(&req, self); } let path: Vec<_> = req.url.path()[1..].split('/').collect(); match (req.method.as_str(), path.as_slice()) { ("get", ["index", ..]) => { if !self.check_authorized(req, None) { self.unauthorized(req) } else { self.index(&req) } } ("get", ["dl", ..]) => { if !self.check_authorized(req, None) { self.unauthorized(req) } else { self.dl(&req) } } // publish ("put", ["api", "v1", "crates", "new"]) => self.check_authorized_publish(req), // The remainder of the operators in the test framework do nothing other than responding 'ok'. // // Note: We don't need to support anything real here because there are no tests that // currently require anything other than publishing via the http api. // yank / unyank ("delete" | "put", ["api", "v1", "crates", crate_name, version, mutation]) => { if !self.check_authorized( req, Some(Mutation { mutation, name: Some(crate_name), vers: Some(version), cksum: None, }), ) { self.unauthorized(req) } else { self.ok(&req) } } // owners ("get" | "put" | "delete", ["api", "v1", "crates", crate_name, "owners"]) => { if !self.check_authorized( req, Some(Mutation { mutation: "owners", name: Some(crate_name), vers: None, cksum: None, }), ) { self.unauthorized(req) } else { self.ok(&req) } } _ => self.not_found(&req), } } /// Unauthorized response pub fn unauthorized(&self, _req: &Request) -> Response { Response { code: 401, headers: vec![ r#"WWW-Authenticate: Cargo login_url="https://test-registry-login/me""#.to_string(), ], body: b"Unauthorized message from server.".to_vec(), } } /// Not found response pub fn not_found(&self, req: &Request) -> Response { (self.not_found_handler)(req, self) } /// Respond OK without doing anything pub fn ok(&self, _req: &Request) -> Response { Response { code: 200, headers: vec![], body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(), } } /// Return an internal server error (HTTP 500) pub fn internal_server_error(&self, _req: &Request) -> Response { Response { code: 500, headers: vec![], body: br#"internal server error"#.to_vec(), } } /// Serve the download endpoint pub fn dl(&self, req: &Request) -> Response { let file = self .dl_path .join(req.url.path().strip_prefix("/dl/").unwrap()); println!("{}", file.display()); if !file.exists() { return self.not_found(req); } return Response { body: fs::read(&file).unwrap(), code: 200, headers: vec![], }; } /// Serve the registry index pub fn index(&self, req: &Request) -> Response { let file = self .registry_path .join(req.url.path().strip_prefix("/index/").unwrap()); if !file.exists() { return self.not_found(req); } else { // Now grab info about the file. let data = fs::read(&file).unwrap(); let etag = Sha256::new().update(&data).finish_hex(); let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap()); // Start to construct our response: let mut any_match = false; let mut all_match = true; if let Some(expected) = &req.if_none_match { if &etag != expected { all_match = false; } else { any_match = true; } } if let Some(expected) = &req.if_modified_since { // NOTE: Equality comparison is good enough for tests. if &last_modified != expected { all_match = false; } else { any_match = true; } } if any_match && all_match { return Response { body: Vec::new(), code: 304, headers: vec![], }; } else { return Response { body: data, code: 200, headers: vec![ format!("ETag: \"{}\"", etag), format!("Last-Modified: {}", last_modified), ], }; } } } pub fn check_authorized_publish(&self, req: &Request) -> Response { if let Some(body) = &req.body { // Mimic the publish behavior for local registries by writing out the request // so tests can verify publishes made to either registry type. let path = self.api_path.join("api/v1/crates/new"); t!(fs::create_dir_all(path.parent().unwrap())); t!(fs::write(&path, body)); // Get the metadata of the package let (len, remaining) = body.split_at(4); let json_len = u32::from_le_bytes(len.try_into().unwrap()); let (json, remaining) = remaining.split_at(json_len as usize); let new_crate = serde_json::from_slice::(json).unwrap(); // Get the `.crate` file let (len, remaining) = remaining.split_at(4); let file_len = u32::from_le_bytes(len.try_into().unwrap()); let (file, _remaining) = remaining.split_at(file_len as usize); let file_cksum = cksum(&file); if !self.check_authorized( req, Some(Mutation { mutation: "publish", name: Some(&new_crate.name), vers: Some(&new_crate.vers), cksum: Some(&file_cksum), }), ) { return self.unauthorized(req); } let dst = self .dl_path .join(&new_crate.name) .join(&new_crate.vers) .join("download"); if self.delayed_index_update == 0 { save_new_crate(dst, new_crate, file, file_cksum, &self.registry_path); } else { let delayed_index_update = self.delayed_index_update; let registry_path = self.registry_path.clone(); let file = Vec::from(file); thread::spawn(move || { thread::sleep(std::time::Duration::new(delayed_index_update as u64, 0)); save_new_crate(dst, new_crate, &file, file_cksum, ®istry_path); }); } self.ok(&req) } else { Response { code: 400, headers: vec![], body: b"The request was missing a body".to_vec(), } } } } fn save_new_crate( dst: PathBuf, new_crate: crates_io::NewCrate, file: &[u8], file_cksum: String, registry_path: &Path, ) { // Write the `.crate` t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, file)); let deps = new_crate .deps .iter() .map(|dep| { let (name, package) = match &dep.explicit_name_in_toml { Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())), None => (dep.name.to_string(), None), }; serde_json::json!({ "name": name, "req": dep.version_req, "features": dep.features, "default_features": dep.default_features, "target": dep.target, "optional": dep.optional, "kind": dep.kind, "registry": dep.registry, "package": package, "artifact": dep.artifact, "bindep_target": dep.bindep_target, "lib": dep.lib, }) }) .collect::>(); let line = create_index_line( serde_json::json!(new_crate.name), &new_crate.vers, deps, &file_cksum, new_crate.features, false, new_crate.links, new_crate.rust_version.as_deref(), None, ); write_to_index(registry_path, &new_crate.name, line, false); } impl Package { /// Creates a new package builder. /// Call `publish()` to finalize and build the package. pub fn new(name: &str, vers: &str) -> Package { let config = paths::home().join(".cargo/config.toml"); if !config.exists() { init(); } Package { name: name.to_string(), vers: vers.to_string(), deps: Vec::new(), files: Vec::new(), yanked: false, features: BTreeMap::new(), local: false, alternative: false, invalid_json: false, proc_macro: false, links: None, rust_version: None, cargo_features: Vec::new(), v: None, } } /// Call with `true` to publish in a "local registry". /// /// See `source-replacement.html#local-registry-sources` for more details /// on local registries. See `local_registry.rs` for the tests that use /// this. pub fn local(&mut self, local: bool) -> &mut Package { self.local = local; self } /// Call with `true` to publish in an "alternative registry". /// /// The name of the alternative registry is called "alternative". /// /// See `src/doc/src/reference/registries.md` for more details on /// alternative registries. See `alt_registry.rs` for the tests that use /// this. pub fn alternative(&mut self, alternative: bool) -> &mut Package { self.alternative = alternative; self } /// Adds a file to the package. pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { self.file_with_mode(name, DEFAULT_MODE, contents) } /// Adds a file with a specific Unix mode. pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), contents: EntryData::Regular(contents.into()), mode, extra: false, }); self } /// Adds a symlink to a path to the package. pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package { self.files.push(PackageFile { path: dst.to_string(), contents: EntryData::Symlink(src.into()), mode: DEFAULT_MODE, extra: false, }); self } /// Adds an "extra" file that is not rooted within the package. /// /// Normal files are automatically placed within a directory named /// `$PACKAGE-$VERSION`. This allows you to override that behavior, /// typically for testing invalid behavior. pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), contents: EntryData::Regular(contents.to_string()), mode: DEFAULT_MODE, extra: true, }); self } /// Adds a normal dependency. Example: /// ```toml /// [dependencies] /// foo = {version = "1.0"} /// ``` pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(&Dependency::new(name, vers)) } /// Adds a dependency with the given feature. Example: /// ```toml /// [dependencies] /// foo = {version = "1.0", "features": ["feat1", "feat2"]} /// ``` pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package { self.add_dep(Dependency::new(name, vers).enable_features(features)) } /// Adds a platform-specific dependency. Example: /// ```toml /// [target.'cfg(windows)'.dependencies] /// foo = {version = "1.0"} /// ``` pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).target(target)) } /// Adds a dependency to the alternative registry. pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).registry("alternative")) } /// Adds a dev-dependency. Example: /// ```toml /// [dev-dependencies] /// foo = {version = "1.0"} /// ``` pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).dev()) } /// Adds a build-dependency. Example: /// ```toml /// [build-dependencies] /// foo = {version = "1.0"} /// ``` pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).build()) } pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package { self.deps.push(dep.clone()); self } /// Specifies whether or not the package is "yanked". pub fn yanked(&mut self, yanked: bool) -> &mut Package { self.yanked = yanked; self } /// Specifies whether or not this is a proc macro. pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package { self.proc_macro = proc_macro; self } /// Adds an entry in the `[features]` section. pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package { let deps = deps.iter().map(|s| s.to_string()).collect(); self.features.insert(name.to_string(), deps); self } /// Specify a minimal Rust version. pub fn rust_version(&mut self, rust_version: &str) -> &mut Package { self.rust_version = Some(rust_version.into()); self } /// Causes the JSON line emitted in the index to be invalid, presumably /// causing Cargo to skip over this version. pub fn invalid_json(&mut self, invalid: bool) -> &mut Package { self.invalid_json = invalid; self } pub fn links(&mut self, links: &str) -> &mut Package { self.links = Some(links.to_string()); self } pub fn cargo_feature(&mut self, feature: &str) -> &mut Package { self.cargo_features.push(feature.to_owned()); self } /// Sets the index schema version for this package. /// /// See `cargo::sources::registry::IndexPackage` for more information. pub fn schema_version(&mut self, version: u32) -> &mut Package { self.v = Some(version); self } /// Creates the package and place it in the registry. /// /// This does not actually use Cargo's publishing system, but instead /// manually creates the entry in the registry on the filesystem. /// /// Returns the checksum for the package. pub fn publish(&self) -> String { self.make_archive(); // Figure out what we're going to write into the index. let deps = self .deps .iter() .map(|dep| { // In the index, the `registry` is null if it is from the same registry. // In Cargo.toml, it is None if it is from crates.io. let registry_url = match (self.alternative, dep.registry.as_deref()) { (false, None) => None, (false, Some("alternative")) => Some(alt_registry_url().to_string()), (true, None) => { Some("https://github.com/rust-lang/crates.io-index".to_string()) } (true, Some("alternative")) => None, _ => panic!("registry_dep currently only supports `alternative`"), }; let artifact = if let Some(artifact) = &dep.artifact { serde_json::json!([artifact]) } else { serde_json::json!(null) }; serde_json::json!({ "name": dep.name, "req": dep.vers, "features": dep.features, "default_features": dep.default_features, "target": dep.target, "artifact": artifact, "bindep_target": dep.bindep_target, "lib": dep.lib, "optional": dep.optional, "kind": dep.kind, "registry": registry_url, "package": dep.package, "public": dep.public, }) }) .collect::>(); let cksum = { let c = t!(fs::read(&self.archive_dst())); cksum(&c) }; let name = if self.invalid_json { serde_json::json!(1) } else { serde_json::json!(self.name) }; let line = create_index_line( name, &self.vers, deps, &cksum, self.features.clone(), self.yanked, self.links.clone(), self.rust_version.as_deref(), self.v, ); let registry_path = if self.alternative { alt_registry_path() } else { registry_path() }; write_to_index(®istry_path, &self.name, line, self.local); cksum } fn make_archive(&self) { let dst = self.archive_dst(); t!(fs::create_dir_all(dst.parent().unwrap())); let f = t!(File::create(&dst)); let mut a = Builder::new(GzEncoder::new(f, Compression::none())); if !self .files .iter() .any(|PackageFile { path, .. }| path == "Cargo.toml") { self.append_manifest(&mut a); } if self.files.is_empty() { self.append( &mut a, "src/lib.rs", DEFAULT_MODE, &EntryData::Regular("".into()), ); } else { for PackageFile { path, contents, mode, extra, } in &self.files { if *extra { self.append_raw(&mut a, path, *mode, contents); } else { self.append(&mut a, path, *mode, contents); } } } } fn append_manifest(&self, ar: &mut Builder) { let mut manifest = String::new(); if !self.cargo_features.is_empty() { let mut features = String::new(); serde::Serialize::serialize( &self.cargo_features, toml::ser::ValueSerializer::new(&mut features), ) .unwrap(); manifest.push_str(&format!("cargo-features = {}\n\n", features)); } manifest.push_str(&format!( r#" [package] name = "{}" version = "{}" authors = [] "#, self.name, self.vers )); if let Some(version) = &self.rust_version { manifest.push_str(&format!("rust-version = \"{}\"", version)); } if !self.features.is_empty() { let features: Vec = self .features .iter() .map(|(feature, features)| { if features.is_empty() { format!("{} = []", feature) } else { format!( "{} = [{}]", feature, features .iter() .map(|s| format!("\"{}\"", s)) .collect::>() .join(", ") ) } }) .collect(); manifest.push_str(&format!("\n[features]\n{}", features.join("\n"))); } for dep in self.deps.iter() { let target = match dep.target { None => String::new(), Some(ref s) => format!("target.'{}'.", s), }; let kind = match &dep.kind[..] { "build" => "build-", "dev" => "dev-", _ => "", }; manifest.push_str(&format!( r#" [{}{}dependencies.{}] version = "{}" "#, target, kind, dep.name, dep.vers )); if dep.optional { manifest.push_str("optional = true\n"); } if let Some(artifact) = &dep.artifact { manifest.push_str(&format!("artifact = \"{}\"\n", artifact)); } if let Some(target) = &dep.bindep_target { manifest.push_str(&format!("target = \"{}\"\n", target)); } if dep.lib { manifest.push_str("lib = true\n"); } if let Some(registry) = &dep.registry { assert_eq!(registry, "alternative"); manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url())); } if !dep.default_features { manifest.push_str("default-features = false\n"); } if !dep.features.is_empty() { let mut features = String::new(); serde::Serialize::serialize( &dep.features, toml::ser::ValueSerializer::new(&mut features), ) .unwrap(); manifest.push_str(&format!("features = {}\n", features)); } if let Some(package) = &dep.package { manifest.push_str(&format!("package = \"{}\"\n", package)); } } if self.proc_macro { manifest.push_str("[lib]\nproc-macro = true\n"); } self.append( ar, "Cargo.toml", DEFAULT_MODE, &EntryData::Regular(manifest.into()), ); } fn append(&self, ar: &mut Builder, file: &str, mode: u32, contents: &EntryData) { self.append_raw( ar, &format!("{}-{}/{}", self.name, self.vers, file), mode, contents, ); } fn append_raw( &self, ar: &mut Builder, path: &str, mode: u32, contents: &EntryData, ) { let mut header = Header::new_ustar(); let contents = match contents { EntryData::Regular(contents) => contents.as_str(), EntryData::Symlink(src) => { header.set_entry_type(tar::EntryType::Symlink); t!(header.set_link_name(src)); "" // Symlink has no contents. } }; header.set_size(contents.len() as u64); t!(header.set_path(path)); header.set_mode(mode); header.set_cksum(); t!(ar.append(&header, contents.as_bytes())); } /// Returns the path to the compressed package file. pub fn archive_dst(&self) -> PathBuf { if self.local { let path = if self.alternative { alt_registry_path() } else { registry_path() }; path.join(format!("{}-{}.crate", self.name, self.vers)) } else if self.alternative { alt_dl_path() .join(&self.name) .join(&self.vers) .join("download") } else { dl_path().join(&self.name).join(&self.vers).join("download") } } } pub fn cksum(s: &[u8]) -> String { Sha256::new().update(s).finish_hex() } impl Dependency { pub fn new(name: &str, vers: &str) -> Dependency { Dependency { name: name.to_string(), vers: vers.to_string(), kind: "normal".to_string(), artifact: None, bindep_target: None, lib: false, target: None, features: Vec::new(), package: None, optional: false, registry: None, default_features: true, public: false, } } /// Changes this to `[build-dependencies]`. pub fn build(&mut self) -> &mut Self { self.kind = "build".to_string(); self } /// Changes this to `[dev-dependencies]`. pub fn dev(&mut self) -> &mut Self { self.kind = "dev".to_string(); self } /// Changes this to `[target.$target.dependencies]`. pub fn target(&mut self, target: &str) -> &mut Self { self.target = Some(target.to_string()); self } /// Change the artifact to be of the given kind, like "bin", or "staticlib", /// along with a specific target triple if provided. pub fn artifact(&mut self, kind: &str, target: Option) -> &mut Self { self.artifact = Some(kind.to_string()); self.bindep_target = target; self } /// Adds `registry = $registry` to this dependency. pub fn registry(&mut self, registry: &str) -> &mut Self { self.registry = Some(registry.to_string()); self } /// Adds `features = [ ... ]` to this dependency. pub fn enable_features(&mut self, features: &[&str]) -> &mut Self { self.features.extend(features.iter().map(|s| s.to_string())); self } /// Adds `package = ...` to this dependency. pub fn package(&mut self, pkg: &str) -> &mut Self { self.package = Some(pkg.to_string()); self } /// Changes this to an optional dependency. pub fn optional(&mut self, optional: bool) -> &mut Self { self.optional = optional; self } /// Changes this to an public dependency. pub fn public(&mut self, public: bool) -> &mut Self { self.public = public; self } /// Adds `default-features = false` if the argument is `false`. pub fn default_features(&mut self, default_features: bool) -> &mut Self { self.default_features = default_features; self } } cargo-test-support-0.3.0/src/tools.rs000064400000000000000000000112021046102023000156770ustar 00000000000000//! Common executables that can be reused by various tests. use crate::{basic_manifest, paths, project, Project}; use std::path::{Path, PathBuf}; use std::sync::Mutex; use std::sync::OnceLock; static ECHO_WRAPPER: OnceLock>> = OnceLock::new(); static ECHO: OnceLock>> = OnceLock::new(); static CLIPPY_DRIVER: OnceLock>> = OnceLock::new(); /// Returns the path to an executable that works as a wrapper around rustc. /// /// The wrapper will echo the command line it was called with to stderr. pub fn echo_wrapper() -> PathBuf { let mut lock = ECHO_WRAPPER .get_or_init(|| Default::default()) .lock() .unwrap(); if let Some(path) = &*lock { return path.clone(); } let p = project() .at(paths::global_root().join("rustc-echo-wrapper")) .file("Cargo.toml", &basic_manifest("rustc-echo-wrapper", "1.0.0")) .file( "src/main.rs", r#" use std::fs::read_to_string; use std::path::PathBuf; fn main() { // Handle args from `@path` argfile for rustc let args = std::env::args() .flat_map(|p| if let Some(p) = p.strip_prefix("@") { read_to_string(p).unwrap().lines().map(String::from).collect() } else { vec![p] }) .collect::>(); eprintln!("WRAPPER CALLED: {}", args[1..].join(" ")); let status = std::process::Command::new(&args[1]) .args(&args[2..]).status().unwrap(); std::process::exit(status.code().unwrap_or(1)); } "#, ) .build(); p.cargo("build").run(); let path = p.bin("rustc-echo-wrapper"); *lock = Some(path.clone()); path } /// Returns the path to an executable that prints its arguments. /// /// Do not expect this to be anything fancy. pub fn echo() -> PathBuf { let mut lock = ECHO.get_or_init(|| Default::default()).lock().unwrap(); if let Some(path) = &*lock { return path.clone(); } if let Ok(path) = cargo_util::paths::resolve_executable(Path::new("echo")) { *lock = Some(path.clone()); return path; } // Often on Windows, `echo` is not available. let p = project() .at(paths::global_root().join("basic-echo")) .file("Cargo.toml", &basic_manifest("basic-echo", "1.0.0")) .file( "src/main.rs", r#" fn main() { let mut s = String::new(); let mut it = std::env::args().skip(1).peekable(); while let Some(n) = it.next() { s.push_str(&n); if it.peek().is_some() { s.push(' '); } } println!("{}", s); } "#, ) .build(); p.cargo("build").run(); let path = p.bin("basic-echo"); *lock = Some(path.clone()); path } /// Returns a project which builds a cargo-echo simple subcommand pub fn echo_subcommand() -> Project { let p = project() .at("cargo-echo") .file("Cargo.toml", &basic_manifest("cargo-echo", "0.0.1")) .file( "src/main.rs", r#" fn main() { let args: Vec<_> = ::std::env::args().skip(1).collect(); println!("{}", args.join(" ")); } "#, ) .build(); p.cargo("build").run(); p } /// A wrapper around `rustc` instead of calling `clippy`. pub fn wrapped_clippy_driver() -> PathBuf { let mut lock = CLIPPY_DRIVER .get_or_init(|| Default::default()) .lock() .unwrap(); if let Some(path) = &*lock { return path.clone(); } let clippy_driver = project() .at(paths::global_root().join("clippy-driver")) .file("Cargo.toml", &basic_manifest("clippy-driver", "0.0.1")) .file( "src/main.rs", r#" fn main() { let mut args = std::env::args_os(); let _me = args.next().unwrap(); let rustc = args.next().unwrap(); let status = std::process::Command::new(rustc).args(args).status().unwrap(); std::process::exit(status.code().unwrap_or(1)); } "#, ) .build(); clippy_driver.cargo("build").run(); let path = clippy_driver.bin("clippy-driver"); *lock = Some(path.clone()); path }