insta-1.39.0/.cargo_vcs_info.json0000644000000001430000000000100122420ustar { "git": { "sha1": "922c68f71a90c1541d29e62f5bb2cd2151fce300" }, "path_in_vcs": "insta" }insta-1.39.0/Cargo.toml0000644000000046000000000000100102420ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" rust-version = "1.51.0" name = "insta" version = "1.39.0" authors = ["Armin Ronacher "] exclude = ["assets/*"] description = "A snapshot testing library for Rust" homepage = "https://insta.rs/" readme = "README.md" keywords = [ "snapshot", "testing", "jest", "approval", ] categories = ["development-tools::testing"] license = "Apache-2.0" repository = "https://github.com/mitsuhiko/insta" [package.metadata.docs.rs] all-features = true rustdoc-args = [ "--cfg", "docsrs", ] [dependencies.console] version = "0.15.4" optional = true default-features = false [dependencies.dep_csv] version = "=1.1.6" optional = true package = "csv" [dependencies.dep_ron] version = "0.7.1" optional = true package = "ron" [dependencies.dep_toml] version = "0.5.7" optional = true package = "toml" [dependencies.globset] version = "0.4.6" optional = true [dependencies.lazy_static] version = "1.4.0" [dependencies.linked-hash-map] version = "0.5.6" [dependencies.pest] version = "2.1.3" optional = true [dependencies.pest_derive] version = "2.1.0" optional = true [dependencies.regex] version = "1.6.0" features = [ "std", "unicode", ] optional = true default-features = false [dependencies.serde] version = "1.0.117" optional = true [dependencies.similar] version = "2.1.0" features = ["inline"] [dependencies.walkdir] version = "2.3.1" optional = true [dev-dependencies.rustc_version] version = "0.4.0" [dev-dependencies.serde] version = "1.0.117" features = ["derive"] [dev-dependencies.similar-asserts] version = "1.4.2" [features] _cargo_insta_internal = [] colors = ["console"] csv = [ "dep_csv", "serde", ] default = ["colors"] filters = ["regex"] glob = [ "walkdir", "globset", ] json = ["serde"] redactions = [ "pest", "pest_derive", "serde", ] ron = [ "dep_ron", "serde", ] toml = [ "dep_toml", "serde", ] yaml = ["serde"] insta-1.39.0/Cargo.toml.orig000064400000000000000000000040171046102023000137250ustar 00000000000000[package] name = "insta" version = "1.39.0" license = "Apache-2.0" authors = ["Armin Ronacher "] description = "A snapshot testing library for Rust" edition = "2018" rust-version = "1.51.0" homepage = "https://insta.rs/" repository = "https://github.com/mitsuhiko/insta" keywords = ["snapshot", "testing", "jest", "approval"] categories = ["development-tools::testing"] readme = "README.md" exclude = [ "assets/*" ] [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] [features] default = ["colors"] # when the redactions feature is enabled values can be redacted in serialized # snapshots. redactions = ["pest", "pest_derive", "serde"] # Enables support for running filters on snapshot filters = ["regex"] # Glob support glob = ["walkdir", "globset"] # Color support colors = ["console"] # Serialization formats # TODO: This could be cleaner by using "dep:csv" without renaming the dep, but # this technique allows for a lower MSRV csv = ["dep_csv", "serde"] json = ["serde"] ron = ["dep_ron", "serde"] toml = ["dep_toml", "serde"] yaml = ["serde"] # internal feature exclusive to cargo-insta _cargo_insta_internal = [] [dependencies] dep_csv = { package = "csv", version = "=1.1.6", optional = true } console = { version = "0.15.4", optional = true, default-features = false } pest = { version = "2.1.3", optional = true } pest_derive = { version = "2.1.0", optional = true } dep_ron = { package = "ron", version = "0.7.1", optional = true } dep_toml = { package = "toml", version = "0.5.7", optional = true } globset = { version = "0.4.6", optional = true } walkdir = { version = "2.3.1", optional = true } similar = { version = "2.1.0", features = ["inline"] } regex = { version = "1.6.0", default-features = false, optional = true, features = ["std", "unicode"] } serde = { version = "1.0.117", optional = true } linked-hash-map = "0.5.6" lazy_static = "1.4.0" [dev-dependencies] rustc_version = "0.4.0" serde = { version = "1.0.117", features = ["derive"] } similar-asserts = "1.4.2" insta-1.39.0/LICENSE000064400000000000000000000251371046102023000120510ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. insta-1.39.0/README.md000064400000000000000000000064561046102023000123260ustar 00000000000000

insta: a snapshot testing library for Rust

[![Build Status](https://github.com/mitsuhiko/insta/workflows/Tests/badge.svg?branch=master)](https://github.com/mitsuhiko/insta/actions?query=workflow%3ATests) [![Crates.io](https://img.shields.io/crates/d/insta.svg)](https://crates.io/crates/insta) [![License](https://img.shields.io/github/license/mitsuhiko/insta)](https://github.com/mitsuhiko/insta/blob/master/LICENSE) [![rustc 1.51.0](https://img.shields.io/badge/rust-1.51.0%2B-orange.svg)](https://img.shields.io/badge/rust-1.51.0%2B-orange.svg) [![Documentation](https://docs.rs/insta/badge.svg)](https://docs.rs/insta) [![VSCode Extension](https://img.shields.io/visual-studio-marketplace/v/mitsuhiko.insta?label=vscode%20extension)](https://marketplace.visualstudio.com/items?itemName=mitsuhiko.insta) ## Introduction Snapshots tests (also sometimes called approval tests) are tests that assert values against a reference value (the snapshot). This is similar to how `assert_eq!` lets you compare a value against a reference value but unlike simple string assertions, snapshot tests let you test against complex values and come with comprehensive tools to review changes. Snapshot tests are particularly useful if your reference values are very large or change often. ## Example ```rust #[test] fn test_hello_world() { insta::assert_debug_snapshot!(vec![1, 2, 3]); } ``` Curious? There is a screencast that shows the entire workflow: [watch the insta introduction screencast](https://www.youtube.com/watch?v=rCHrMqE4JOY&feature=youtu.be). Or if you're not into videos, read the [5 minute introduction](https://insta.rs/docs/quickstart/). Insta also supports inline snapshots which are stored right in your source file instead of separate files. This is accomplished by the companion [cargo-insta](https://github.com/mitsuhiko/insta/tree/master/cargo-insta) tool. ## Editor Support For looking at `.snap` files there is a [vscode extension](https://github.com/mitsuhiko/insta/tree/master/vscode-insta) which can syntax highlight snapshot files, review snapshots and more. It can be installed from the marketplace: [view on marketplace](https://marketplace.visualstudio.com/items?itemName=mitsuhiko.insta). ![jump to definition](https://raw.githubusercontent.com/mitsuhiko/insta/master/vscode-insta/images/jump-to-definition.gif) ## Diffing Insta uses [`similar`](https://github.com/mitsuhiko/similar) for all its diffing operations. You can use it independently of insta. You can use the [`similar-asserts`](https://github.com/mitsuhiko/similar-asserts) crate to get inline diffs for the standard `assert_eq!` macro to achieve insta like diffs for regular comparisons: ```rust use similar_asserts::assert_eq; fn main() { let reference = vec![1, 2, 3, 4]; assert_eq!(reference, (0..4).collect::>()); } ``` ## Sponsor If you like the project and find it useful you can [become a sponsor](https://github.com/sponsors/mitsuhiko). ## License and Links - [Project Website](https://insta.rs/) - [Documentation](https://docs.rs/insta/) - [Issue Tracker](https://github.com/mitsuhiko/insta/issues) - License: [Apache-2.0](https://github.com/mitsuhiko/insta/blob/master/LICENSE) insta-1.39.0/src/content/json.rs000064400000000000000000000414221046102023000146170ustar 00000000000000use std::fmt::{Display, Write}; use crate::content::Content; /// The maximum number of characters to print in a single line /// when [`to_string_pretty`] is used. const COMPACT_MAX_CHARS: usize = 120; pub fn format_float(value: T) -> String { let mut rv = format!("{}", value); if !rv.contains('.') { rv.push_str(".0"); } rv } #[derive(PartialEq, Eq, Copy, Clone, Debug)] pub enum Format { Condensed, SingleLine, Pretty, } /// Serializes a serializable to JSON. pub struct Serializer { out: String, format: Format, indentation: usize, } impl Serializer { /// Creates a new serializer that writes into the given writer. pub fn new() -> Serializer { Serializer { out: String::new(), format: Format::Condensed, indentation: 0, } } pub fn into_result(self) -> String { self.out } fn write_indentation(&mut self) { if self.format == Format::Pretty { write!(self.out, "{: ^1$}", "", self.indentation * 2).unwrap(); } } fn start_container(&mut self, c: char) { self.write_char(c); self.indentation += 1; } fn end_container(&mut self, c: char, empty: bool) { self.indentation -= 1; if self.format == Format::Pretty && !empty { self.write_char('\n'); self.write_indentation(); } self.write_char(c); } fn write_comma(&mut self, first: bool) { match self.format { Format::Pretty => { if first { self.write_char('\n'); } else { self.write_str(",\n"); } self.write_indentation(); } Format::Condensed => { if !first { self.write_char(','); } } Format::SingleLine => { if !first { self.write_str(", "); } } } } fn write_colon(&mut self) { match self.format { Format::Pretty | Format::SingleLine => self.write_str(": "), Format::Condensed => self.write_char(':'), } } fn serialize_array(&mut self, items: &[Content]) { self.start_container('['); for (idx, item) in items.iter().enumerate() { self.write_comma(idx == 0); self.serialize(item); } self.end_container(']', items.is_empty()); } fn serialize_object(&mut self, fields: &[(&str, Content)]) { self.start_container('{'); for (idx, (key, value)) in fields.iter().enumerate() { self.write_comma(idx == 0); self.write_escaped_str(key); self.write_colon(); self.serialize(value); } self.end_container('}', fields.is_empty()); } pub fn serialize(&mut self, value: &Content) { match value { Content::Bool(true) => self.write_str("true"), Content::Bool(false) => self.write_str("false"), Content::U8(n) => write!(self.out, "{}", n).unwrap(), Content::U16(n) => write!(self.out, "{}", n).unwrap(), Content::U32(n) => write!(self.out, "{}", n).unwrap(), Content::U64(n) => write!(self.out, "{}", n).unwrap(), Content::U128(n) => write!(self.out, "{}", n).unwrap(), Content::I8(n) => write!(self.out, "{}", n).unwrap(), Content::I16(n) => write!(self.out, "{}", n).unwrap(), Content::I32(n) => write!(self.out, "{}", n).unwrap(), Content::I64(n) => write!(self.out, "{}", n).unwrap(), Content::I128(n) => write!(self.out, "{}", n).unwrap(), Content::F32(f) => { if f.is_finite() { self.write_str(&format_float(f)); } else { self.write_str("null") } } Content::F64(f) => { if f.is_finite() { self.write_str(&format_float(f)); } else { self.write_str("null") } } Content::Char(c) => self.write_escaped_str(&(*c).to_string()), Content::String(s) => self.write_escaped_str(s), Content::Bytes(bytes) => { self.start_container('['); for (idx, byte) in bytes.iter().enumerate() { self.write_comma(idx == 0); self.write_str(&byte.to_string()); } self.end_container(']', bytes.is_empty()); } Content::None | Content::Unit | Content::UnitStruct(_) => self.write_str("null"), Content::Some(content) => self.serialize(content), Content::UnitVariant(_, _, variant) => self.write_escaped_str(variant), Content::NewtypeStruct(_, content) => self.serialize(content), Content::NewtypeVariant(_, _, variant, content) => { self.start_container('{'); self.write_comma(true); self.write_escaped_str(variant); self.write_colon(); self.serialize(content); self.end_container('}', false); } Content::Seq(seq) | Content::Tuple(seq) | Content::TupleStruct(_, seq) => { self.serialize_array(seq); } Content::TupleVariant(_, _, variant, seq) => { self.start_container('{'); self.write_comma(true); self.write_escaped_str(variant); self.write_colon(); self.serialize_array(seq); self.end_container('}', false); } Content::Map(map) => { self.start_container('{'); for (idx, (key, value)) in map.iter().enumerate() { self.write_comma(idx == 0); let real_key = key.resolve_inner(); if let Content::String(ref s) = real_key { self.write_escaped_str(s); } else if let Some(num) = real_key.as_i64() { self.write_escaped_str(&num.to_string()); } else if let Some(num) = real_key.as_i128() { self.write_escaped_str(&num.to_string()); } else { panic!("cannot serialize maps without string keys to JSON"); } self.write_colon(); self.serialize(value); } self.end_container('}', map.is_empty()); } Content::Struct(_, fields) => { self.serialize_object(fields); } Content::StructVariant(_, _, variant, fields) => { self.start_container('{'); self.write_comma(true); self.write_escaped_str(variant); self.write_colon(); self.serialize_object(fields); self.end_container('}', false); } } } fn write_str(&mut self, s: &str) { self.out.push_str(s); } fn write_char(&mut self, c: char) { self.out.push(c); } fn write_escaped_str(&mut self, value: &str) { self.write_char('"'); let bytes = value.as_bytes(); let mut start = 0; for (i, &byte) in bytes.iter().enumerate() { let escape = ESCAPE[byte as usize]; if escape == 0 { continue; } if start < i { self.write_str(&value[start..i]); } match escape { self::BB => self.write_str("\\b"), self::TT => self.write_str("\\t"), self::NN => self.write_str("\\n"), self::FF => self.write_str("\\f"), self::RR => self.write_str("\\r"), self::QU => self.write_str("\\\""), self::BS => self.write_str("\\\\"), self::U => { static HEX_DIGITS: [u8; 16] = *b"0123456789abcdef"; self.write_str("\\u00"); self.write_char(HEX_DIGITS[(byte >> 4) as usize] as char); self.write_char(HEX_DIGITS[(byte & 0xF) as usize] as char); } _ => unreachable!(), } start = i + 1; } if start != bytes.len() { self.write_str(&value[start..]); } self.write_char('"'); } } const BB: u8 = b'b'; // \x08 const TT: u8 = b't'; // \x09 const NN: u8 = b'n'; // \x0A const FF: u8 = b'f'; // \x0C const RR: u8 = b'r'; // \x0D const QU: u8 = b'"'; // \x22 const BS: u8 = b'\\'; // \x5C const U: u8 = b'u'; // \x00...\x1F except the ones above // Lookup table of escape sequences. A value of b'x' at index i means that byte // i is escaped as "\x" in JSON. A value of 0 means that byte i is not escaped. #[rustfmt::skip] static ESCAPE: [u8; 256] = [ // 1 2 3 4 5 6 7 8 9 A B C D E F U, U, U, U, U, U, U, U, BB, TT, NN, U, FF, RR, U, U, // 0 U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, // 1 0, 0, QU, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, BS, 0, 0, 0, // 5 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 6 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 7 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 8 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 9 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // A 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // B 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // C 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // D 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // E 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // F ]; /// Serializes a value to JSON. pub fn to_string(value: &Content) -> String { let mut ser = Serializer::new(); ser.serialize(value); ser.into_result() } /// Serializes a value to JSON in single-line format. #[allow(unused)] pub fn to_string_compact(value: &Content) -> String { let mut ser = Serializer::new(); ser.format = Format::SingleLine; ser.serialize(value); let rv = ser.into_result(); // this is pretty wasteful as we just format twice // but it's acceptable for the way this is used in // insta. if rv.chars().count() > COMPACT_MAX_CHARS { to_string_pretty(value) } else { rv } } /// Serializes a value to JSON pretty #[allow(unused)] pub fn to_string_pretty(value: &Content) -> String { let mut ser = Serializer::new(); ser.format = Format::Pretty; ser.serialize(value); ser.into_result() } #[test] fn test_to_string() { let json = to_string(&Content::Map(vec![ ( Content::from("environments"), Content::Seq(vec![ Content::from("development"), Content::from("production"), ]), ), (Content::from("cmdline"), Content::Seq(vec![])), (Content::from("extra"), Content::Map(vec![])), ])); crate::assert_snapshot!(&json, @r###"{"environments":["development","production"],"cmdline":[],"extra":{}}"###); } #[test] fn test_to_string_pretty() { let json = to_string_pretty(&Content::Map(vec![ ( Content::from("environments"), Content::Seq(vec![ Content::from("development"), Content::from("production"), ]), ), (Content::from("cmdline"), Content::Seq(vec![])), (Content::from("extra"), Content::Map(vec![])), ])); crate::assert_snapshot!(&json, @r###" { "environments": [ "development", "production" ], "cmdline": [], "extra": {} } "###); } #[test] fn test_to_string_num_keys() { let content = Content::Map(vec![ (Content::from(42u32), Content::from(true)), (Content::from(-23i32), Content::from(false)), ]); let json = to_string_pretty(&content); crate::assert_snapshot!(&json, @r###" { "42": true, "-23": false } "###); } #[test] fn test_to_string_pretty_complex() { let content = Content::Map(vec![ ( Content::from("is_alive"), Content::NewtypeStruct("Some", Content::from(true).into()), ), ( Content::from("newtype_variant"), Content::NewtypeVariant( "Foo", 0, "variant_a", Box::new(Content::Struct( "VariantA", vec![ ("field_a", Content::String("value_a".into())), ("field_b", 42u32.into()), ], )), ), ), ( Content::from("struct_variant"), Content::StructVariant( "Foo", 0, "variant_b", vec![ ("field_a", Content::String("value_a".into())), ("field_b", 42u32.into()), ], ), ), ( Content::from("tuple_variant"), Content::TupleVariant( "Foo", 0, "variant_c", vec![(Content::String("value_a".into())), (42u32.into())], ), ), (Content::from("empty_array"), Content::Seq(vec![])), (Content::from("empty_object"), Content::Map(vec![])), (Content::from("array"), Content::Seq(vec![true.into()])), ( Content::from("object"), Content::Map(vec![("foo".into(), true.into())]), ), ( Content::from("array_of_objects"), Content::Seq(vec![Content::Struct( "MyType", vec![ ("foo", Content::from("bar".to_string())), ("bar", Content::from("xxx".to_string())), ], )]), ), ( Content::from("unit_variant"), Content::UnitVariant("Stuff", 0, "value"), ), (Content::from("u8"), Content::U8(8)), (Content::from("u16"), Content::U16(16)), (Content::from("u32"), Content::U32(32)), (Content::from("u64"), Content::U64(64)), (Content::from("u128"), Content::U128(128)), (Content::from("i8"), Content::I8(8)), (Content::from("i16"), Content::I16(16)), (Content::from("i32"), Content::I32(32)), (Content::from("i64"), Content::I64(64)), (Content::from("i128"), Content::I128(128)), (Content::from("f32"), Content::F32(32.0)), (Content::from("f64"), Content::F64(64.0)), (Content::from("char"), Content::Char('A')), (Content::from("bytes"), Content::Bytes(b"hehe".to_vec())), (Content::from("null"), Content::None), (Content::from("unit"), Content::Unit), ( Content::from("crazy_string"), Content::String((0u8..=126).map(|x| x as char).collect()), ), ]); let json = to_string_pretty(&content); crate::assert_snapshot!(&json, @r###" { "is_alive": true, "newtype_variant": { "variant_a": { "field_a": "value_a", "field_b": 42 } }, "struct_variant": { "variant_b": { "field_a": "value_a", "field_b": 42 } }, "tuple_variant": { "variant_c": [ "value_a", 42 ] }, "empty_array": [], "empty_object": {}, "array": [ true ], "object": { "foo": true }, "array_of_objects": [ { "foo": "bar", "bar": "xxx" } ], "unit_variant": "value", "u8": 8, "u16": 16, "u32": 32, "u64": 64, "u128": 128, "i8": 8, "i16": 16, "i32": 32, "i64": 64, "i128": 128, "f32": 32.0, "f64": 64.0, "char": "A", "bytes": [ 104, 101, 104, 101 ], "null": null, "unit": null, "crazy_string": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } "###); } insta-1.39.0/src/content/mod.rs000064400000000000000000000277201046102023000144320ustar 00000000000000//! This module implements a generic `Content` type that can hold //! runtime typed data. //! //! It's modelled after serde's data format but it's in fact possible to use //! this independently of serde. The `yaml` and `json` support implemented //! here works without serde. Only `yaml` has an implemented parser but since //! YAML is a superset of JSON insta instead currently parses JSON via the //! YAML implementation. pub mod json; #[cfg(feature = "serde")] mod serialization; pub mod yaml; #[cfg(feature = "serde")] pub use serialization::*; use std::fmt; /// An internal error type for content related errors. #[derive(Debug)] pub enum Error { FailedParsingYaml(std::path::PathBuf), UnexpectedDataType, #[cfg(feature = "_cargo_insta_internal")] MissingField, #[cfg(feature = "_cargo_insta_internal")] FileIo(std::io::Error, std::path::PathBuf), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Error::FailedParsingYaml(p) => { f.write_str(format!("Failed parsing the YAML from {:?}", p.display()).as_str()) } Error::UnexpectedDataType => { f.write_str("The present data type wasn't what was expected") } #[cfg(feature = "_cargo_insta_internal")] Error::MissingField => f.write_str("A required field was missing"), #[cfg(feature = "_cargo_insta_internal")] Error::FileIo(e, p) => { f.write_str(format!("File error for {:?}: {}", p.display(), e).as_str()) } } } } impl std::error::Error for Error {} /// Represents variable typed content. /// /// This is used for the serialization system to represent values /// before the actual snapshots are written and is also exposed to /// dynamic redaction functions. /// /// Some enum variants are intentionally not exposed to user code. /// It's generally recommended to construct content objects by /// using the [`From`](std::convert::From) trait and by using the /// accessor methods to assert on it. /// /// While matching on the content is possible in theory it is /// recommended against. The reason for this is that the content /// enum holds variants that can "wrap" values where it's not /// expected. For instance if a field holds an `Option` /// you cannot use pattern matching to extract the string as it /// will be contained in an internal `Some` variant that is not /// exposed. On the other hand the `as_str` method will /// automatically resolve such internal wrappers. /// /// If you do need to pattern match you should use the /// `resolve_inner` method to resolve such internal wrappers. #[derive(Debug, Clone, PartialEq, PartialOrd)] pub enum Content { Bool(bool), U8(u8), U16(u16), U32(u32), U64(u64), U128(u128), I8(i8), I16(i16), I32(i32), I64(i64), I128(i128), F32(f32), F64(f64), Char(char), String(String), Bytes(Vec), #[doc(hidden)] None, #[doc(hidden)] Some(Box), #[doc(hidden)] Unit, #[doc(hidden)] UnitStruct(&'static str), #[doc(hidden)] UnitVariant(&'static str, u32, &'static str), #[doc(hidden)] NewtypeStruct(&'static str, Box), #[doc(hidden)] NewtypeVariant(&'static str, u32, &'static str, Box), Seq(Vec), #[doc(hidden)] Tuple(Vec), #[doc(hidden)] TupleStruct(&'static str, Vec), #[doc(hidden)] TupleVariant(&'static str, u32, &'static str, Vec), Map(Vec<(Content, Content)>), #[doc(hidden)] Struct(&'static str, Vec<(&'static str, Content)>), #[doc(hidden)] StructVariant( &'static str, u32, &'static str, Vec<(&'static str, Content)>, ), } macro_rules! impl_from { ($ty:ty, $newty:ident) => { impl From<$ty> for Content { fn from(value: $ty) -> Content { Content::$newty(value) } } }; } impl_from!(bool, Bool); impl_from!(u8, U8); impl_from!(u16, U16); impl_from!(u32, U32); impl_from!(u64, U64); impl_from!(u128, U128); impl_from!(i8, I8); impl_from!(i16, I16); impl_from!(i32, I32); impl_from!(i64, I64); impl_from!(i128, I128); impl_from!(f32, F32); impl_from!(f64, F64); impl_from!(char, Char); impl_from!(String, String); impl_from!(Vec, Bytes); impl From<()> for Content { fn from(_value: ()) -> Content { Content::Unit } } impl<'a> From<&'a str> for Content { fn from(value: &'a str) -> Content { Content::String(value.to_string()) } } impl<'a> From<&'a [u8]> for Content { fn from(value: &'a [u8]) -> Content { Content::Bytes(value.to_vec()) } } impl Content { /// This resolves the innermost content in a chain of /// wrapped content. /// /// For instance if you encounter an `Option>` /// field the content will be wrapped twice in an internal /// option wrapper. If you need to pattern match you will /// need in some situations to first resolve the inner value /// before such matching can take place as there is no exposed /// way to match on these wrappers. /// /// This method does not need to be called for the `as_` /// methods which resolve automatically. pub fn resolve_inner(&self) -> &Content { match *self { Content::Some(ref v) | Content::NewtypeStruct(_, ref v) | Content::NewtypeVariant(_, _, _, ref v) => v.resolve_inner(), ref other => other, } } /// Mutable version of [`resolve_inner`](Self::resolve_inner). pub fn resolve_inner_mut(&mut self) -> &mut Content { match *self { Content::Some(ref mut v) | Content::NewtypeStruct(_, ref mut v) | Content::NewtypeVariant(_, _, _, ref mut v) => v.resolve_inner_mut(), ref mut other => other, } } /// Returns the value as string pub fn as_str(&self) -> Option<&str> { match self.resolve_inner() { Content::String(ref s) => Some(s.as_str()), _ => None, } } /// Returns the value as bytes pub fn as_bytes(&self) -> Option<&[u8]> { match self.resolve_inner() { Content::Bytes(ref b) => Some(b), _ => None, } } /// Returns the value as slice of content values. pub fn as_slice(&self) -> Option<&[Content]> { match self.resolve_inner() { Content::Seq(ref v) | Content::Tuple(ref v) | Content::TupleVariant(_, _, _, ref v) => { Some(&v[..]) } _ => None, } } /// Returns true if the value is nil. pub fn is_nil(&self) -> bool { matches!(self.resolve_inner(), Content::None | Content::Unit) } /// Returns the value as bool pub fn as_bool(&self) -> Option { match *self.resolve_inner() { Content::Bool(val) => Some(val), _ => None, } } /// Returns the value as u64 pub fn as_u64(&self) -> Option { match *self.resolve_inner() { Content::U8(v) => Some(u64::from(v)), Content::U16(v) => Some(u64::from(v)), Content::U32(v) => Some(u64::from(v)), Content::U64(v) => Some(v), Content::U128(v) => { let rv = v as u64; if rv as u128 == v { Some(rv) } else { None } } Content::I8(v) if v >= 0 => Some(v as u64), Content::I16(v) if v >= 0 => Some(v as u64), Content::I32(v) if v >= 0 => Some(v as u64), Content::I64(v) if v >= 0 => Some(v as u64), Content::I128(v) => { let rv = v as u64; if rv as i128 == v { Some(rv) } else { None } } _ => None, } } /// Returns the value as u128 pub fn as_u128(&self) -> Option { match *self.resolve_inner() { Content::U128(v) => Some(v), Content::I128(v) if v >= 0 => Some(v as u128), _ => self.as_u64().map(u128::from), } } /// Returns the value as i64 pub fn as_i64(&self) -> Option { match *self.resolve_inner() { Content::U8(v) => Some(i64::from(v)), Content::U16(v) => Some(i64::from(v)), Content::U32(v) => Some(i64::from(v)), Content::U64(v) => { let rv = v as i64; if rv as u64 == v { Some(rv) } else { None } } Content::U128(v) => { let rv = v as i64; if rv as u128 == v { Some(rv) } else { None } } Content::I8(v) => Some(i64::from(v)), Content::I16(v) => Some(i64::from(v)), Content::I32(v) => Some(i64::from(v)), Content::I64(v) => Some(v), Content::I128(v) => { let rv = v as i64; if rv as i128 == v { Some(rv) } else { None } } _ => None, } } /// Returns the value as i128 pub fn as_i128(&self) -> Option { match *self.resolve_inner() { Content::U128(v) => { let rv = v as i128; if rv as u128 == v { Some(rv) } else { None } } Content::I128(v) => Some(v), _ => self.as_i64().map(i128::from), } } /// Returns the value as f64 pub fn as_f64(&self) -> Option { match *self.resolve_inner() { Content::F32(v) => Some(f64::from(v)), Content::F64(v) => Some(v), _ => None, } } /// Recursively walks the content structure mutably. /// /// The callback is invoked for every content in the tree. pub fn walk bool>(&mut self, visit: &mut F) { if !visit(self) { return; } match *self { Content::Some(ref mut inner) => { Self::walk(&mut *inner, visit); } Content::NewtypeStruct(_, ref mut inner) => { Self::walk(&mut *inner, visit); } Content::NewtypeVariant(_, _, _, ref mut inner) => { Self::walk(&mut *inner, visit); } Content::Seq(ref mut vec) => { for inner in vec.iter_mut() { Self::walk(inner, visit); } } Content::Map(ref mut vec) => { for inner in vec.iter_mut() { Self::walk(&mut inner.0, visit); Self::walk(&mut inner.1, visit); } } Content::Struct(_, ref mut vec) => { for inner in vec.iter_mut() { Self::walk(&mut inner.1, visit); } } Content::StructVariant(_, _, _, ref mut vec) => { for inner in vec.iter_mut() { Self::walk(&mut inner.1, visit); } } Content::Tuple(ref mut vec) => { for inner in vec.iter_mut() { Self::walk(inner, visit); } } Content::TupleStruct(_, ref mut vec) => { for inner in vec.iter_mut() { Self::walk(inner, visit); } } Content::TupleVariant(_, _, _, ref mut vec) => { for inner in vec.iter_mut() { Self::walk(inner, visit); } } _ => {} } } } insta-1.39.0/src/content/serialization.rs000064400000000000000000000402661046102023000165300ustar 00000000000000use std::cmp::Ordering; use std::marker::PhantomData; use crate::content::Content; use serde::{ser, Serialize, Serializer}; #[derive(PartialEq, PartialOrd, Debug)] pub enum Key<'a> { Bool(bool), U64(u64), I64(i64), F64(f64), U128(u128), I128(i128), Str(&'a str), Bytes(&'a [u8]), Other, } impl<'a> Eq for Key<'a> {} impl<'a> Ord for Key<'a> { fn cmp(&self, other: &Self) -> Ordering { self.partial_cmp(other).unwrap_or(Ordering::Less) } } impl Content { pub(crate) fn as_key(&self) -> Key<'_> { match *self.resolve_inner() { Content::Bool(val) => Key::Bool(val), Content::Char(val) => Key::U64(val as u64), Content::U16(val) => Key::U64(val.into()), Content::U32(val) => Key::U64(val.into()), Content::U64(val) => Key::U64(val), Content::U128(val) => Key::U128(val), Content::I16(val) => Key::I64(val.into()), Content::I32(val) => Key::I64(val.into()), Content::I64(val) => Key::I64(val), Content::I128(val) => Key::I128(val), Content::F32(val) => Key::F64(val.into()), Content::F64(val) => Key::F64(val), Content::String(ref val) => Key::Str(val.as_str()), Content::Bytes(ref val) => Key::Bytes(&val[..]), _ => Key::Other, } } pub(crate) fn sort_maps(&mut self) { self.walk(&mut |content| { if let Content::Map(ref mut items) = content { // try to compare by key first, if that fails compare by the // object value. That way some values normalize, and if we // can't normalize we still have a stable order. items.sort_by(|a, b| match (a.0.as_key(), b.0.as_key()) { (Key::Other, _) | (_, Key::Other) => { a.0.partial_cmp(&b.0).unwrap_or(Ordering::Equal) } (ref a, ref b) => a.cmp(b), }) } true }) } } #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] impl Serialize for Content { fn serialize(&self, serializer: S) -> Result where S: Serializer, { match *self { Content::Bool(b) => serializer.serialize_bool(b), Content::U8(u) => serializer.serialize_u8(u), Content::U16(u) => serializer.serialize_u16(u), Content::U32(u) => serializer.serialize_u32(u), Content::U64(u) => serializer.serialize_u64(u), Content::U128(u) => serializer.serialize_u128(u), Content::I8(i) => serializer.serialize_i8(i), Content::I16(i) => serializer.serialize_i16(i), Content::I32(i) => serializer.serialize_i32(i), Content::I64(i) => serializer.serialize_i64(i), Content::I128(i) => serializer.serialize_i128(i), Content::F32(f) => serializer.serialize_f32(f), Content::F64(f) => serializer.serialize_f64(f), Content::Char(c) => serializer.serialize_char(c), Content::String(ref s) => serializer.serialize_str(s), Content::Bytes(ref b) => serializer.serialize_bytes(b), Content::None => serializer.serialize_none(), Content::Some(ref c) => serializer.serialize_some(&**c), Content::Unit => serializer.serialize_unit(), Content::UnitStruct(n) => serializer.serialize_unit_struct(n), Content::UnitVariant(n, i, v) => serializer.serialize_unit_variant(n, i, v), Content::NewtypeStruct(n, ref c) => serializer.serialize_newtype_struct(n, &**c), Content::NewtypeVariant(n, i, v, ref c) => { serializer.serialize_newtype_variant(n, i, v, &**c) } Content::Seq(ref elements) => elements.serialize(serializer), Content::Tuple(ref elements) => { use serde::ser::SerializeTuple; let mut tuple = serializer.serialize_tuple(elements.len())?; for e in elements { tuple.serialize_element(e)?; } tuple.end() } Content::TupleStruct(n, ref fields) => { use serde::ser::SerializeTupleStruct; let mut ts = serializer.serialize_tuple_struct(n, fields.len())?; for f in fields { ts.serialize_field(f)?; } ts.end() } Content::TupleVariant(n, i, v, ref fields) => { use serde::ser::SerializeTupleVariant; let mut tv = serializer.serialize_tuple_variant(n, i, v, fields.len())?; for f in fields { tv.serialize_field(f)?; } tv.end() } Content::Map(ref entries) => { use serde::ser::SerializeMap; let mut map = serializer.serialize_map(Some(entries.len()))?; for (k, v) in entries { map.serialize_entry(k, v)?; } map.end() } Content::Struct(n, ref fields) => { use serde::ser::SerializeStruct; let mut s = serializer.serialize_struct(n, fields.len())?; for &(k, ref v) in fields { s.serialize_field(k, v)?; } s.end() } Content::StructVariant(n, i, v, ref fields) => { use serde::ser::SerializeStructVariant; let mut sv = serializer.serialize_struct_variant(n, i, v, fields.len())?; for &(k, ref v) in fields { sv.serialize_field(k, v)?; } sv.end() } } } } pub struct ContentSerializer { error: PhantomData, } impl ContentSerializer { pub fn new() -> Self { ContentSerializer { error: PhantomData } } } impl Serializer for ContentSerializer where E: ser::Error, { type Ok = Content; type Error = E; type SerializeSeq = SerializeSeq; type SerializeTuple = SerializeTuple; type SerializeTupleStruct = SerializeTupleStruct; type SerializeTupleVariant = SerializeTupleVariant; type SerializeMap = SerializeMap; type SerializeStruct = SerializeStruct; type SerializeStructVariant = SerializeStructVariant; fn serialize_bool(self, v: bool) -> Result { Ok(Content::Bool(v)) } fn serialize_i8(self, v: i8) -> Result { Ok(Content::I8(v)) } fn serialize_i16(self, v: i16) -> Result { Ok(Content::I16(v)) } fn serialize_i32(self, v: i32) -> Result { Ok(Content::I32(v)) } fn serialize_i64(self, v: i64) -> Result { Ok(Content::I64(v)) } fn serialize_i128(self, v: i128) -> Result { Ok(Content::I128(v)) } fn serialize_u8(self, v: u8) -> Result { Ok(Content::U8(v)) } fn serialize_u16(self, v: u16) -> Result { Ok(Content::U16(v)) } fn serialize_u32(self, v: u32) -> Result { Ok(Content::U32(v)) } fn serialize_u64(self, v: u64) -> Result { Ok(Content::U64(v)) } fn serialize_u128(self, v: u128) -> Result { Ok(Content::U128(v)) } fn serialize_f32(self, v: f32) -> Result { Ok(Content::F32(v)) } fn serialize_f64(self, v: f64) -> Result { Ok(Content::F64(v)) } fn serialize_char(self, v: char) -> Result { Ok(Content::Char(v)) } fn serialize_str(self, value: &str) -> Result { Ok(Content::String(value.to_owned())) } fn serialize_bytes(self, value: &[u8]) -> Result { Ok(Content::Bytes(value.to_owned())) } fn serialize_none(self) -> Result { Ok(Content::None) } fn serialize_some(self, value: &T) -> Result where T: Serialize + ?Sized, { Ok(Content::Some(Box::new(value.serialize(self)?))) } fn serialize_unit(self) -> Result { Ok(Content::Unit) } fn serialize_unit_struct(self, name: &'static str) -> Result { Ok(Content::UnitStruct(name)) } fn serialize_unit_variant( self, name: &'static str, variant_index: u32, variant: &'static str, ) -> Result { Ok(Content::UnitVariant(name, variant_index, variant)) } fn serialize_newtype_struct(self, name: &'static str, value: &T) -> Result where T: Serialize + ?Sized, { Ok(Content::NewtypeStruct( name, Box::new(value.serialize(self)?), )) } fn serialize_newtype_variant( self, name: &'static str, variant_index: u32, variant: &'static str, value: &T, ) -> Result where T: Serialize + ?Sized, { Ok(Content::NewtypeVariant( name, variant_index, variant, Box::new(value.serialize(self)?), )) } fn serialize_seq(self, len: Option) -> Result { Ok(SerializeSeq { elements: Vec::with_capacity(len.unwrap_or(0)), error: PhantomData, }) } fn serialize_tuple(self, len: usize) -> Result { Ok(SerializeTuple { elements: Vec::with_capacity(len), error: PhantomData, }) } fn serialize_tuple_struct( self, name: &'static str, len: usize, ) -> Result { Ok(SerializeTupleStruct { name, fields: Vec::with_capacity(len), error: PhantomData, }) } fn serialize_tuple_variant( self, name: &'static str, variant_index: u32, variant: &'static str, len: usize, ) -> Result { Ok(SerializeTupleVariant { name, variant_index, variant, fields: Vec::with_capacity(len), error: PhantomData, }) } fn serialize_map(self, len: Option) -> Result { Ok(SerializeMap { entries: Vec::with_capacity(len.unwrap_or(0)), key: None, error: PhantomData, }) } fn serialize_struct(self, name: &'static str, len: usize) -> Result { Ok(SerializeStruct { name, fields: Vec::with_capacity(len), error: PhantomData, }) } fn serialize_struct_variant( self, name: &'static str, variant_index: u32, variant: &'static str, len: usize, ) -> Result { Ok(SerializeStructVariant { name, variant_index, variant, fields: Vec::with_capacity(len), error: PhantomData, }) } } pub struct SerializeSeq { elements: Vec, error: PhantomData, } impl ser::SerializeSeq for SerializeSeq where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_element(&mut self, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let value = value.serialize(ContentSerializer::::new())?; self.elements.push(value); Ok(()) } fn end(self) -> Result { Ok(Content::Seq(self.elements)) } } pub struct SerializeTuple { elements: Vec, error: PhantomData, } impl ser::SerializeTuple for SerializeTuple where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_element(&mut self, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let value = value.serialize(ContentSerializer::::new())?; self.elements.push(value); Ok(()) } fn end(self) -> Result { Ok(Content::Tuple(self.elements)) } } pub struct SerializeTupleStruct { name: &'static str, fields: Vec, error: PhantomData, } impl ser::SerializeTupleStruct for SerializeTupleStruct where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_field(&mut self, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let value = value.serialize(ContentSerializer::::new())?; self.fields.push(value); Ok(()) } fn end(self) -> Result { Ok(Content::TupleStruct(self.name, self.fields)) } } pub struct SerializeTupleVariant { name: &'static str, variant_index: u32, variant: &'static str, fields: Vec, error: PhantomData, } impl ser::SerializeTupleVariant for SerializeTupleVariant where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_field(&mut self, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let value = value.serialize(ContentSerializer::::new())?; self.fields.push(value); Ok(()) } fn end(self) -> Result { Ok(Content::TupleVariant( self.name, self.variant_index, self.variant, self.fields, )) } } pub struct SerializeMap { entries: Vec<(Content, Content)>, key: Option, error: PhantomData, } impl ser::SerializeMap for SerializeMap where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_key(&mut self, key: &T) -> Result<(), E> where T: Serialize + ?Sized, { let key = key.serialize(ContentSerializer::::new())?; self.key = Some(key); Ok(()) } fn serialize_value(&mut self, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let key = self .key .take() .expect("serialize_value called before serialize_key"); let value = value.serialize(ContentSerializer::::new())?; self.entries.push((key, value)); Ok(()) } fn end(self) -> Result { Ok(Content::Map(self.entries)) } fn serialize_entry(&mut self, key: &K, value: &V) -> Result<(), E> where K: Serialize + ?Sized, V: Serialize + ?Sized, { let key = key.serialize(ContentSerializer::::new())?; let value = value.serialize(ContentSerializer::::new())?; self.entries.push((key, value)); Ok(()) } } pub struct SerializeStruct { name: &'static str, fields: Vec<(&'static str, Content)>, error: PhantomData, } impl ser::SerializeStruct for SerializeStruct where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let value = value.serialize(ContentSerializer::::new())?; self.fields.push((key, value)); Ok(()) } fn end(self) -> Result { Ok(Content::Struct(self.name, self.fields)) } } pub struct SerializeStructVariant { name: &'static str, variant_index: u32, variant: &'static str, fields: Vec<(&'static str, Content)>, error: PhantomData, } impl ser::SerializeStructVariant for SerializeStructVariant where E: ser::Error, { type Ok = Content; type Error = E; fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), E> where T: Serialize + ?Sized, { let value = value.serialize(ContentSerializer::::new())?; self.fields.push((key, value)); Ok(()) } fn end(self) -> Result { Ok(Content::StructVariant( self.name, self.variant_index, self.variant, self.fields, )) } } insta-1.39.0/src/content/yaml/mod.rs000064400000000000000000000117511046102023000153710ustar 00000000000000pub mod vendored; use std::path::Path; use crate::content::{Content, Error}; use crate::content::yaml::vendored::{yaml::Hash as YamlObj, Yaml as YamlValue}; pub fn parse_str(s: &str, filename: &Path) -> Result { let mut blobs = crate::content::yaml::vendored::yaml::YamlLoader::load_from_str(s) .map_err(|_| Error::FailedParsingYaml(filename.to_path_buf()))?; match (blobs.pop(), blobs.pop()) { (Some(blob), None) => from_yaml_blob(blob, filename), _ => Err(Error::FailedParsingYaml(filename.to_path_buf())), } } fn from_yaml_blob(blob: YamlValue, filename: &Path) -> Result { match blob { YamlValue::Null => Ok(Content::None), YamlValue::Boolean(b) => Ok(Content::from(b)), YamlValue::Integer(num) => Ok(Content::from(num)), YamlValue::Real(real_str) => { let real: f64 = real_str.parse().unwrap(); Ok(Content::from(real)) } YamlValue::String(s) => Ok(Content::from(s)), YamlValue::Array(seq) => { let seq = seq .into_iter() .map(|x| from_yaml_blob(x, filename)) .collect::>()?; Ok(Content::Seq(seq)) } YamlValue::Hash(obj) => { let obj = obj .into_iter() .map(|(k, v)| Ok((from_yaml_blob(k, filename)?, from_yaml_blob(v, filename)?))) .collect::>()?; Ok(Content::Map(obj)) } YamlValue::BadValue => Err(Error::FailedParsingYaml(filename.to_path_buf())), } } pub fn to_string(content: &Content) -> String { let yaml_blob = to_yaml_value(content); let mut buf = String::new(); let mut emitter = crate::content::yaml::vendored::emitter::YamlEmitter::new(&mut buf); emitter.dump(&yaml_blob).unwrap(); if !buf.ends_with('\n') { buf.push('\n'); } buf } fn to_yaml_value(content: &Content) -> YamlValue { fn translate_seq(seq: &[Content]) -> YamlValue { let seq = seq.iter().map(to_yaml_value).collect(); YamlValue::Array(seq) } fn translate_fields(fields: &[(&str, Content)]) -> YamlValue { let fields = fields .iter() .map(|(k, v)| (YamlValue::String(k.to_string()), to_yaml_value(v))) .collect(); YamlValue::Hash(fields) } match content { Content::Bool(b) => YamlValue::Boolean(*b), Content::U8(n) => YamlValue::Integer(i64::from(*n)), Content::U16(n) => YamlValue::Integer(i64::from(*n)), Content::U32(n) => YamlValue::Integer(i64::from(*n)), Content::U64(n) => YamlValue::Real(n.to_string()), Content::U128(n) => YamlValue::Real(n.to_string()), Content::I8(n) => YamlValue::Integer(i64::from(*n)), Content::I16(n) => YamlValue::Integer(i64::from(*n)), Content::I32(n) => YamlValue::Integer(i64::from(*n)), Content::I64(n) => YamlValue::Integer(*n), Content::I128(n) => YamlValue::Real(n.to_string()), Content::F32(f) => YamlValue::Real(f.to_string()), Content::F64(f) => YamlValue::Real(f.to_string()), Content::Char(c) => YamlValue::String(c.to_string()), Content::String(s) => YamlValue::String(s.to_owned()), Content::Bytes(bytes) => { let bytes = bytes .iter() .map(|b| YamlValue::Integer(i64::from(*b))) .collect(); YamlValue::Array(bytes) } Content::None | Content::Unit | Content::UnitStruct(_) => YamlValue::Null, Content::Some(content) => to_yaml_value(content), Content::UnitVariant(_, _, variant) => YamlValue::String(variant.to_string()), Content::NewtypeStruct(_, content) => to_yaml_value(content), Content::NewtypeVariant(_, _, variant, content) => { let mut obj = YamlObj::new(); obj.insert( YamlValue::String(variant.to_string()), to_yaml_value(content), ); YamlValue::Hash(obj) } Content::Seq(seq) => translate_seq(seq), Content::Tuple(seq) => translate_seq(seq), Content::TupleStruct(_, seq) => translate_seq(seq), Content::TupleVariant(_, _, variant, seq) => { let mut obj = YamlObj::new(); obj.insert(YamlValue::String(variant.to_string()), translate_seq(seq)); YamlValue::Hash(obj) } Content::Map(map) => { let map = map .iter() .map(|(k, v)| (to_yaml_value(k), to_yaml_value(v))) .collect(); YamlValue::Hash(map) } Content::Struct(_name, fields) => translate_fields(fields), Content::StructVariant(_, _, variant, fields) => { let mut obj = YamlObj::new(); obj.insert( YamlValue::String(variant.to_string()), translate_fields(fields), ); YamlValue::Hash(obj) } } } insta-1.39.0/src/content/yaml/vendored/emitter.rs000064400000000000000000000357141046102023000200760ustar 00000000000000use crate::content::yaml::vendored::yaml::{Hash, Yaml}; use std::convert::From; use std::error::Error; use std::fmt::{self, Display}; #[derive(Copy, Clone, Debug)] pub enum EmitError { FmtError(fmt::Error), } impl Error for EmitError { fn cause(&self) -> Option<&dyn Error> { None } } impl Display for EmitError { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match *self { EmitError::FmtError(ref err) => Display::fmt(err, formatter), } } } impl From for EmitError { fn from(f: fmt::Error) -> Self { EmitError::FmtError(f) } } pub struct YamlEmitter<'a> { writer: &'a mut dyn fmt::Write, best_indent: usize, compact: bool, level: isize, } pub type EmitResult = Result<(), EmitError>; // from serialize::json fn escape_str(wr: &mut dyn fmt::Write, v: &str) -> Result<(), fmt::Error> { wr.write_str("\"")?; let mut start = 0; for (i, byte) in v.bytes().enumerate() { let escaped = match byte { b'"' => "\\\"", b'\\' => "\\\\", b'\x00' => "\\u0000", b'\x01' => "\\u0001", b'\x02' => "\\u0002", b'\x03' => "\\u0003", b'\x04' => "\\u0004", b'\x05' => "\\u0005", b'\x06' => "\\u0006", b'\x07' => "\\u0007", b'\x08' => "\\b", b'\t' => "\\t", b'\n' => "\\n", b'\x0b' => "\\u000b", b'\x0c' => "\\f", b'\r' => "\\r", b'\x0e' => "\\u000e", b'\x0f' => "\\u000f", b'\x10' => "\\u0010", b'\x11' => "\\u0011", b'\x12' => "\\u0012", b'\x13' => "\\u0013", b'\x14' => "\\u0014", b'\x15' => "\\u0015", b'\x16' => "\\u0016", b'\x17' => "\\u0017", b'\x18' => "\\u0018", b'\x19' => "\\u0019", b'\x1a' => "\\u001a", b'\x1b' => "\\u001b", b'\x1c' => "\\u001c", b'\x1d' => "\\u001d", b'\x1e' => "\\u001e", b'\x1f' => "\\u001f", b'\x7f' => "\\u007f", _ => continue, }; if start < i { wr.write_str(&v[start..i])?; } wr.write_str(escaped)?; start = i + 1; } if start != v.len() { wr.write_str(&v[start..])?; } wr.write_str("\"")?; Ok(()) } impl<'a> YamlEmitter<'a> { pub fn new(writer: &'a mut dyn fmt::Write) -> YamlEmitter { YamlEmitter { writer, best_indent: 2, compact: true, level: -1, } } pub fn dump(&mut self, doc: &Yaml) -> EmitResult { // write DocumentStart writeln!(self.writer, "---")?; self.level = -1; self.emit_node(doc) } fn write_indent(&mut self) -> EmitResult { if self.level <= 0 { return Ok(()); } for _ in 0..self.level { for _ in 0..self.best_indent { write!(self.writer, " ")?; } } Ok(()) } fn emit_node(&mut self, node: &Yaml) -> EmitResult { match *node { Yaml::Array(ref v) => self.emit_array(v), Yaml::Hash(ref h) => self.emit_hash(h), Yaml::String(ref v) => { if need_quotes(v) { escape_str(self.writer, v)?; } else { write!(self.writer, "{}", v)?; } Ok(()) } Yaml::Boolean(v) => { if v { self.writer.write_str("true")?; } else { self.writer.write_str("false")?; } Ok(()) } Yaml::Integer(v) => { write!(self.writer, "{}", v)?; Ok(()) } Yaml::Real(ref v) => { write!(self.writer, "{}", v)?; Ok(()) } Yaml::Null | Yaml::BadValue => { write!(self.writer, "~")?; Ok(()) } } } fn emit_array(&mut self, v: &[Yaml]) -> EmitResult { if v.is_empty() { write!(self.writer, "[]")?; } else { self.level += 1; for (cnt, x) in v.iter().enumerate() { if cnt > 0 { writeln!(self.writer)?; self.write_indent()?; } write!(self.writer, "-")?; self.emit_val(true, x)?; } self.level -= 1; } Ok(()) } fn emit_hash(&mut self, h: &Hash) -> EmitResult { if h.is_empty() { self.writer.write_str("{}")?; } else { self.level += 1; for (cnt, (k, v)) in h.iter().enumerate() { let complex_key = matches!(*k, Yaml::Hash(_) | Yaml::Array(_)); if cnt > 0 { writeln!(self.writer)?; self.write_indent()?; } if complex_key { write!(self.writer, "?")?; self.emit_val(true, k)?; writeln!(self.writer)?; self.write_indent()?; write!(self.writer, ":")?; self.emit_val(true, v)?; } else { self.emit_node(k)?; write!(self.writer, ":")?; self.emit_val(false, v)?; } } self.level -= 1; } Ok(()) } /// Emit a yaml as a hash or array value: i.e., which should appear /// following a ":" or "-", either after a space, or on a new line. /// If `inline` is true, then the preceding characters are distinct /// and short enough to respect the compact flag. fn emit_val(&mut self, inline: bool, val: &Yaml) -> EmitResult { match *val { Yaml::Array(ref v) => { if (inline && self.compact) || v.is_empty() { write!(self.writer, " ")?; } else { writeln!(self.writer)?; self.level += 1; self.write_indent()?; self.level -= 1; } self.emit_array(v) } Yaml::Hash(ref h) => { if (inline && self.compact) || h.is_empty() { write!(self.writer, " ")?; } else { writeln!(self.writer)?; self.level += 1; self.write_indent()?; self.level -= 1; } self.emit_hash(h) } _ => { write!(self.writer, " ")?; self.emit_node(val) } } } } /// Check if the string requires quoting. /// Strings starting with any of the following characters must be quoted. /// :, &, *, ?, |, -, <, >, =, !, %, @ /// Strings containing any of the following characters must be quoted. /// {, }, [, ], ,, #, ` /// /// If the string contains any of the following control characters, it must be escaped with double quotes: /// \0, \x01, \x02, \x03, \x04, \x05, \x06, \a, \b, \t, \n, \v, \f, \r, \x0e, \x0f, \x10, \x11, \x12, \x13, \x14, \x15, \x16, \x17, \x18, \x19, \x1a, \e, \x1c, \x1d, \x1e, \x1f, \N, \_, \L, \P /// /// Finally, there are other cases when the strings must be quoted, no matter if you're using single or double quotes: /// * When the string is true or false (otherwise, it would be treated as a boolean value); /// * When the string is null or ~ (otherwise, it would be considered as a null value); /// * When the string looks like a number, such as integers (e.g. 2, 14, etc.), floats (e.g. 2.6, 14.9) and exponential numbers (e.g. 12e7, etc.) (otherwise, it would be treated as a numeric value); /// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp). fn need_quotes(string: &str) -> bool { fn need_quotes_spaces(string: &str) -> bool { string.starts_with(' ') || string.ends_with(' ') } string.is_empty() || need_quotes_spaces(string) || string.starts_with(|character: char| { matches!( character, '&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' ) }) || string.contains(|character: char| { matches!(character, ':' | '{' | '}' | '[' | ']' | ',' | '#' | '`' | '\"' | '\'' | '\\' | '\0'..='\x06' | '\t' | '\n' | '\r' | '\x0e'..='\x1a' | '\x1c'..='\x1f') }) || [ // http://yaml.org/type/bool.html // Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse // them as string, not booleans, although it is violating the YAML 1.1 specification. // See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088. "yes", "Yes", "YES", "no", "No", "NO", "True", "TRUE", "true", "False", "FALSE", "false", "on", "On", "ON", "off", "Off", "OFF", // http://yaml.org/type/null.html "null", "Null", "NULL", "~", ] .contains(&string) || string.starts_with('.') || string.starts_with("0x") || string.parse::().is_ok() || string.parse::().is_ok() } #[cfg(test)] mod test { use super::*; use crate::content::yaml::vendored::yaml::YamlLoader; #[test] fn test_emit_simple() { let s = " # comment a0 bb: val a1: b1: 4 b2: d a2: 4 # i'm comment a3: [1, 2, 3] a4: - [a1, a2] - 2 "; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } println!("original:\n{}", s); println!("emitted:\n{}", writer); let docs_new = match YamlLoader::load_from_str(&writer) { Ok(y) => y, Err(e) => panic!("{}", e), }; let doc_new = &docs_new[0]; assert_eq!(doc, doc_new); } #[test] fn test_emit_complex() { let s = r#" catalogue: product: &coffee { name: Coffee, price: 2.5 , unit: 1l } product: &cookies { name: Cookies!, price: 3.40 , unit: 400g} products: *coffee: amount: 4 *cookies: amount: 4 [1,2,3,4]: array key 2.4: real key true: bool key {}: empty hash key "#; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } let docs_new = match YamlLoader::load_from_str(&writer) { Ok(y) => y, Err(e) => panic!("{}", e), }; let doc_new = &docs_new[0]; assert_eq!(doc, doc_new); } #[test] fn test_emit_avoid_quotes() { let s = r#"--- a7: 你好 boolean: "true" boolean2: "false" date: 2014-12-31 empty_string: "" empty_string1: " " empty_string2: " a" empty_string3: " a " exp: "12e7" field: ":" field2: "{" field3: "\\" field4: "\n" field5: "can't avoid quote" float: "2.6" int: "4" nullable: "null" nullable2: "~" products: "*coffee": amount: 4 "*cookies": amount: 4 ".milk": amount: 1 "2.4": real key "[1,2,3,4]": array key "true": bool key "{}": empty hash key x: test y: avoid quoting here z: string with spaces"#; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } assert_eq!(s, writer, "actual:\n\n{}\n", writer); } #[test] fn emit_quoted_bools() { let input = r#"--- string0: yes string1: no string2: "true" string3: "false" string4: "~" null0: ~ [true, false]: real_bools [True, TRUE, False, FALSE, y,Y,yes,Yes,YES,n,N,no,No,NO,on,On,ON,off,Off,OFF]: false_bools bool0: true bool1: false"#; let expected = r#"--- string0: "yes" string1: "no" string2: "true" string3: "false" string4: "~" null0: ~ ? - true - false : real_bools ? - "True" - "TRUE" - "False" - "FALSE" - y - Y - "yes" - "Yes" - "YES" - n - N - "no" - "No" - "NO" - "on" - "On" - "ON" - "off" - "Off" - "OFF" : false_bools bool0: true bool1: false"#; let docs = YamlLoader::load_from_str(input).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } assert_eq!( expected, writer, "expected:\n{}\nactual:\n{}\n", expected, writer ); } #[test] fn test_empty_and_nested_compact() { let s = r#"--- a: b: c: hello d: {} e: - f - g - h: []"#; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } assert_eq!(s, writer); } #[test] fn test_nested_arrays() { let s = r#"--- a: - b - - c - d - - e - f"#; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } println!("original:\n{}", s); println!("emitted:\n{}", writer); assert_eq!(s, writer); } #[test] fn test_deeply_nested_arrays() { let s = r#"--- a: - b - - c - d - - e - - f - - e"#; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } println!("original:\n{}", s); println!("emitted:\n{}", writer); assert_eq!(s, writer); } #[test] fn test_nested_hashes() { let s = r#"--- a: b: c: d: e: f"#; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } println!("original:\n{}", s); println!("emitted:\n{}", writer); assert_eq!(s, writer); } } insta-1.39.0/src/content/yaml/vendored/mod.rs000064400000000000000000000033731046102023000172000ustar 00000000000000//! Copyright 2015, Yuheng Chen. Apache 2 licensed. //! //! This vendored code used to be yaml-rust. It's intended to be replaced in //! the next major version with a yaml-rust2 which is an actively maintained //! version of this. Is it has different snapshot formats and different //! MSRV requirements, we vendor it temporarily. #![allow(unused)] pub mod emitter; pub mod parser; pub mod scanner; pub mod yaml; pub use self::yaml::Yaml; #[cfg(test)] mod tests { use super::*; use crate::content::yaml::vendored::emitter::YamlEmitter; use crate::content::yaml::vendored::scanner::ScanError; use crate::content::yaml::vendored::yaml::YamlLoader; #[test] fn test_api() { let s = " # from yaml-cpp example - name: Ogre position: [0, 5, 0] powers: - name: Club damage: 10 - name: Fist damage: 8 - name: Dragon position: [1, 0, 10] powers: - name: Fire Breath damage: 25 - name: Claws damage: 15 - name: Wizard position: [5, -3, 0] powers: - name: Acid Rain damage: 50 - name: Staff damage: 3 "; let docs = YamlLoader::load_from_str(s).unwrap(); let doc = &docs[0]; assert_eq!(doc[0]["name"].as_str().unwrap(), "Ogre"); let mut writer = String::new(); { let mut emitter = YamlEmitter::new(&mut writer); emitter.dump(doc).unwrap(); } assert!(!writer.is_empty()); } fn try_fail(s: &str) -> Result, ScanError> { let t = YamlLoader::load_from_str(s)?; Ok(t) } #[test] fn test_fail() { let s = " # syntax error scalar key: [1, 2]] key1:a2 "; assert!(YamlLoader::load_from_str(s).is_err()); assert!(try_fail(s).is_err()); } } insta-1.39.0/src/content/yaml/vendored/parser.rs000064400000000000000000000672421046102023000177220ustar 00000000000000use crate::content::yaml::vendored::scanner::*; use std::collections::HashMap; #[derive(Clone, Copy, PartialEq, Debug, Eq)] enum State { StreamStart, ImplicitDocumentStart, DocumentStart, DocumentContent, DocumentEnd, BlockNode, // BlockNodeOrIndentlessSequence, // FlowNode, BlockSequenceFirstEntry, BlockSequenceEntry, IndentlessSequenceEntry, BlockMappingFirstKey, BlockMappingKey, BlockMappingValue, FlowSequenceFirstEntry, FlowSequenceEntry, FlowSequenceEntryMappingKey, FlowSequenceEntryMappingValue, FlowSequenceEntryMappingEnd, FlowMappingFirstKey, FlowMappingKey, FlowMappingValue, FlowMappingEmptyValue, End, } /// `Event` is used with the low-level event base parsing API, /// see `EventReceiver` trait. #[derive(Clone, PartialEq, Debug, Eq)] pub enum Event { /// Reserved for internal use StreamStart, StreamEnd, DocumentStart, DocumentEnd, /// Refer to an anchor ID Alias(usize), /// Value, style, anchor_id, tag Scalar(String, TScalarStyle, usize, Option), /// Anchor ID SequenceStart(usize), SequenceEnd, /// Anchor ID MappingStart(usize), MappingEnd, } impl Event { fn empty_scalar() -> Event { // a null scalar Event::Scalar("~".to_owned(), TScalarStyle::Plain, 0, None) } fn empty_scalar_with_anchor(anchor: usize, tag: Option) -> Event { Event::Scalar("".to_owned(), TScalarStyle::Plain, anchor, tag) } } #[derive(Debug)] pub struct Parser { scanner: Scanner, states: Vec, state: State, token: Option, current: Option<(Event, Marker)>, anchors: HashMap, anchor_id: usize, } pub trait EventReceiver { fn on_event(&mut self, ev: Event); } pub trait MarkedEventReceiver { fn on_event(&mut self, ev: Event, _mark: Marker); } impl MarkedEventReceiver for R { fn on_event(&mut self, ev: Event, _mark: Marker) { self.on_event(ev) } } pub type ParseResult = Result<(Event, Marker), ScanError>; impl> Parser { pub fn new(src: T) -> Parser { Parser { scanner: Scanner::new(src), states: Vec::new(), state: State::StreamStart, token: None, current: None, anchors: HashMap::new(), // valid anchor_id starts from 1 anchor_id: 1, } } pub fn next(&mut self) -> ParseResult { match self.current { None => self.parse(), Some(_) => Ok(self.current.take().unwrap()), } } fn peek_token(&mut self) -> Result<&Token, ScanError> { match self.token { None => { self.token = Some(self.scan_next_token()?); Ok(self.token.as_ref().unwrap()) } Some(ref tok) => Ok(tok), } } fn scan_next_token(&mut self) -> Result { let token = self.scanner.next(); match token { None => match self.scanner.get_error() { None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")), Some(e) => Err(e), }, Some(tok) => Ok(tok), } } fn fetch_token(&mut self) -> Token { self.token .take() .expect("fetch_token needs to be preceded by peek_token") } fn skip(&mut self) { self.token = None; //self.peek_token(); } fn pop_state(&mut self) { self.state = self.states.pop().unwrap() } fn push_state(&mut self, state: State) { self.states.push(state); } fn parse(&mut self) -> ParseResult { if self.state == State::End { return Ok((Event::StreamEnd, self.scanner.mark())); } let (ev, mark) = self.state_machine()?; // println!("EV {:?}", ev); Ok((ev, mark)) } pub fn load( &mut self, recv: &mut R, multi: bool, ) -> Result<(), ScanError> { if !self.scanner.stream_started() { let (ev, mark) = self.next()?; assert_eq!(ev, Event::StreamStart); recv.on_event(ev, mark); } if self.scanner.stream_ended() { // XXX has parsed? recv.on_event(Event::StreamEnd, self.scanner.mark()); return Ok(()); } loop { let (ev, mark) = self.next()?; if ev == Event::StreamEnd { recv.on_event(ev, mark); return Ok(()); } // clear anchors before a new document self.anchors.clear(); self.load_document(ev, mark, recv)?; if !multi { break; } } Ok(()) } fn load_document( &mut self, first_ev: Event, mark: Marker, recv: &mut R, ) -> Result<(), ScanError> { assert_eq!(first_ev, Event::DocumentStart); recv.on_event(first_ev, mark); let (ev, mark) = self.next()?; self.load_node(ev, mark, recv)?; // DOCUMENT-END is expected. let (ev, mark) = self.next()?; assert_eq!(ev, Event::DocumentEnd); recv.on_event(ev, mark); Ok(()) } fn load_node( &mut self, first_ev: Event, mark: Marker, recv: &mut R, ) -> Result<(), ScanError> { match first_ev { Event::Alias(..) | Event::Scalar(..) => { recv.on_event(first_ev, mark); Ok(()) } Event::SequenceStart(_) => { recv.on_event(first_ev, mark); self.load_sequence(recv) } Event::MappingStart(_) => { recv.on_event(first_ev, mark); self.load_mapping(recv) } _ => { println!("UNREACHABLE EVENT: {:?}", first_ev); unreachable!(); } } } fn load_mapping(&mut self, recv: &mut R) -> Result<(), ScanError> { let (mut key_ev, mut key_mark) = self.next()?; while key_ev != Event::MappingEnd { // key self.load_node(key_ev, key_mark, recv)?; // value let (ev, mark) = self.next()?; self.load_node(ev, mark, recv)?; // next event let (ev, mark) = self.next()?; key_ev = ev; key_mark = mark; } recv.on_event(key_ev, key_mark); Ok(()) } fn load_sequence(&mut self, recv: &mut R) -> Result<(), ScanError> { let (mut ev, mut mark) = self.next()?; while ev != Event::SequenceEnd { self.load_node(ev, mark, recv)?; // next event let (next_ev, next_mark) = self.next()?; ev = next_ev; mark = next_mark; } recv.on_event(ev, mark); Ok(()) } fn state_machine(&mut self) -> ParseResult { // let next_tok = self.peek_token()?; // println!("cur_state {:?}, next tok: {:?}", self.state, next_tok); match self.state { State::StreamStart => self.stream_start(), State::ImplicitDocumentStart => self.document_start(true), State::DocumentStart => self.document_start(false), State::DocumentContent => self.document_content(), State::DocumentEnd => self.document_end(), State::BlockNode => self.parse_node(true, false), // State::BlockNodeOrIndentlessSequence => self.parse_node(true, true), // State::FlowNode => self.parse_node(false, false), State::BlockMappingFirstKey => self.block_mapping_key(true), State::BlockMappingKey => self.block_mapping_key(false), State::BlockMappingValue => self.block_mapping_value(), State::BlockSequenceFirstEntry => self.block_sequence_entry(true), State::BlockSequenceEntry => self.block_sequence_entry(false), State::FlowSequenceFirstEntry => self.flow_sequence_entry(true), State::FlowSequenceEntry => self.flow_sequence_entry(false), State::FlowMappingFirstKey => self.flow_mapping_key(true), State::FlowMappingKey => self.flow_mapping_key(false), State::FlowMappingValue => self.flow_mapping_value(false), State::IndentlessSequenceEntry => self.indentless_sequence_entry(), State::FlowSequenceEntryMappingKey => self.flow_sequence_entry_mapping_key(), State::FlowSequenceEntryMappingValue => self.flow_sequence_entry_mapping_value(), State::FlowSequenceEntryMappingEnd => self.flow_sequence_entry_mapping_end(), State::FlowMappingEmptyValue => self.flow_mapping_value(true), /* impossible */ State::End => unreachable!(), } } fn stream_start(&mut self) -> ParseResult { match *self.peek_token()? { Token(mark, TokenType::StreamStart(_)) => { self.state = State::ImplicitDocumentStart; self.skip(); Ok((Event::StreamStart, mark)) } Token(mark, _) => Err(ScanError::new(mark, "did not find expected ")), } } fn document_start(&mut self, implicit: bool) -> ParseResult { if !implicit { while let TokenType::DocumentEnd = self.peek_token()?.1 { self.skip(); } } match *self.peek_token()? { Token(mark, TokenType::StreamEnd) => { self.state = State::End; self.skip(); Ok((Event::StreamEnd, mark)) } Token(_, TokenType::VersionDirective(..)) | Token(_, TokenType::TagDirective(..)) | Token(_, TokenType::DocumentStart) => { // explicit document self._explicit_document_start() } Token(mark, _) if implicit => { self.parser_process_directives()?; self.push_state(State::DocumentEnd); self.state = State::BlockNode; Ok((Event::DocumentStart, mark)) } _ => { // explicit document self._explicit_document_start() } } } fn parser_process_directives(&mut self) -> Result<(), ScanError> { loop { match self.peek_token()?.1 { TokenType::VersionDirective(_, _) => { // XXX parsing with warning according to spec //if major != 1 || minor > 2 { // return Err(ScanError::new(tok.0, // "found incompatible YAML document")); //} } TokenType::TagDirective(..) => { // TODO add tag directive } _ => break, } self.skip(); } // TODO tag directive Ok(()) } fn _explicit_document_start(&mut self) -> ParseResult { self.parser_process_directives()?; match *self.peek_token()? { Token(mark, TokenType::DocumentStart) => { self.push_state(State::DocumentEnd); self.state = State::DocumentContent; self.skip(); Ok((Event::DocumentStart, mark)) } Token(mark, _) => Err(ScanError::new( mark, "did not find expected ", )), } } fn document_content(&mut self) -> ParseResult { match *self.peek_token()? { Token(mark, TokenType::VersionDirective(..)) | Token(mark, TokenType::TagDirective(..)) | Token(mark, TokenType::DocumentStart) | Token(mark, TokenType::DocumentEnd) | Token(mark, TokenType::StreamEnd) => { self.pop_state(); // empty scalar Ok((Event::empty_scalar(), mark)) } _ => self.parse_node(true, false), } } fn document_end(&mut self) -> ParseResult { let mut _implicit = true; let marker: Marker = match *self.peek_token()? { Token(mark, TokenType::DocumentEnd) => { self.skip(); _implicit = false; mark } Token(mark, _) => mark, }; // TODO tag handling self.state = State::DocumentStart; Ok((Event::DocumentEnd, marker)) } fn register_anchor(&mut self, name: String, _: &Marker) -> Result { // anchors can be overridden/reused // if self.anchors.contains_key(name) { // return Err(ScanError::new(*mark, // "while parsing anchor, found duplicated anchor")); // } let new_id = self.anchor_id; self.anchor_id += 1; self.anchors.insert(name, new_id); Ok(new_id) } fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult { let mut anchor_id = 0; let mut tag = None; match *self.peek_token()? { Token(_, TokenType::Alias(_)) => { self.pop_state(); if let Token(mark, TokenType::Alias(name)) = self.fetch_token() { match self.anchors.get(&name) { None => { return Err(ScanError::new( mark, "while parsing node, found unknown anchor", )) } Some(id) => return Ok((Event::Alias(*id), mark)), } } else { unreachable!() } } Token(_, TokenType::Anchor(_)) => { if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() { anchor_id = self.register_anchor(name, &mark)?; if let TokenType::Tag(..) = self.peek_token()?.1 { if let tg @ TokenType::Tag(..) = self.fetch_token().1 { tag = Some(tg); } else { unreachable!() } } } else { unreachable!() } } Token(_, TokenType::Tag(..)) => { if let tg @ TokenType::Tag(..) = self.fetch_token().1 { tag = Some(tg); if let TokenType::Anchor(_) = self.peek_token()?.1 { if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() { anchor_id = self.register_anchor(name, &mark)?; } else { unreachable!() } } } else { unreachable!() } } _ => {} } match *self.peek_token()? { Token(mark, TokenType::BlockEntry) if indentless_sequence => { self.state = State::IndentlessSequenceEntry; Ok((Event::SequenceStart(anchor_id), mark)) } Token(_, TokenType::Scalar(..)) => { self.pop_state(); if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() { Ok((Event::Scalar(v, style, anchor_id, tag), mark)) } else { unreachable!() } } Token(mark, TokenType::FlowSequenceStart) => { self.state = State::FlowSequenceFirstEntry; Ok((Event::SequenceStart(anchor_id), mark)) } Token(mark, TokenType::FlowMappingStart) => { self.state = State::FlowMappingFirstKey; Ok((Event::MappingStart(anchor_id), mark)) } Token(mark, TokenType::BlockSequenceStart) if block => { self.state = State::BlockSequenceFirstEntry; Ok((Event::SequenceStart(anchor_id), mark)) } Token(mark, TokenType::BlockMappingStart) if block => { self.state = State::BlockMappingFirstKey; Ok((Event::MappingStart(anchor_id), mark)) } // ex 7.2, an empty scalar can follow a secondary tag Token(mark, _) if tag.is_some() || anchor_id > 0 => { self.pop_state(); Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark)) } Token(mark, _) => Err(ScanError::new( mark, "while parsing a node, did not find expected node content", )), } } fn block_mapping_key(&mut self, first: bool) -> ParseResult { // skip BlockMappingStart if first { let _ = self.peek_token()?; //self.marks.push(tok.0); self.skip(); } match *self.peek_token()? { Token(_, TokenType::Key) => { self.skip(); match *self.peek_token()? { Token(mark, TokenType::Key) | Token(mark, TokenType::Value) | Token(mark, TokenType::BlockEnd) => { self.state = State::BlockMappingValue; // empty scalar Ok((Event::empty_scalar(), mark)) } _ => { self.push_state(State::BlockMappingValue); self.parse_node(true, true) } } } // XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18 Token(mark, TokenType::Value) => { self.state = State::BlockMappingValue; Ok((Event::empty_scalar(), mark)) } Token(mark, TokenType::BlockEnd) => { self.pop_state(); self.skip(); Ok((Event::MappingEnd, mark)) } Token(mark, _) => Err(ScanError::new( mark, "while parsing a block mapping, did not find expected key", )), } } fn block_mapping_value(&mut self) -> ParseResult { match *self.peek_token()? { Token(_, TokenType::Value) => { self.skip(); match *self.peek_token()? { Token(mark, TokenType::Key) | Token(mark, TokenType::Value) | Token(mark, TokenType::BlockEnd) => { self.state = State::BlockMappingKey; // empty scalar Ok((Event::empty_scalar(), mark)) } _ => { self.push_state(State::BlockMappingKey); self.parse_node(true, true) } } } Token(mark, _) => { self.state = State::BlockMappingKey; // empty scalar Ok((Event::empty_scalar(), mark)) } } } fn flow_mapping_key(&mut self, first: bool) -> ParseResult { if first { let _ = self.peek_token()?; self.skip(); } let marker: Marker = { match *self.peek_token()? { Token(mark, TokenType::FlowMappingEnd) => mark, Token(mark, _) => { if !first { match *self.peek_token()? { Token(_, TokenType::FlowEntry) => self.skip(), Token(mark, _) => return Err(ScanError::new(mark, "while parsing a flow mapping, did not find expected ',' or '}'")) } } match *self.peek_token()? { Token(_, TokenType::Key) => { self.skip(); match *self.peek_token()? { Token(mark, TokenType::Value) | Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowMappingEnd) => { self.state = State::FlowMappingValue; return Ok((Event::empty_scalar(), mark)); } _ => { self.push_state(State::FlowMappingValue); return self.parse_node(false, false); } } } Token(marker, TokenType::Value) => { self.state = State::FlowMappingValue; return Ok((Event::empty_scalar(), marker)); } Token(_, TokenType::FlowMappingEnd) => (), _ => { self.push_state(State::FlowMappingEmptyValue); return self.parse_node(false, false); } } mark } } }; self.pop_state(); self.skip(); Ok((Event::MappingEnd, marker)) } fn flow_mapping_value(&mut self, empty: bool) -> ParseResult { let mark: Marker = { if empty { let Token(mark, _) = *self.peek_token()?; self.state = State::FlowMappingKey; return Ok((Event::empty_scalar(), mark)); } else { match *self.peek_token()? { Token(marker, TokenType::Value) => { self.skip(); match self.peek_token()?.1 { TokenType::FlowEntry | TokenType::FlowMappingEnd => {} _ => { self.push_state(State::FlowMappingKey); return self.parse_node(false, false); } } marker } Token(marker, _) => marker, } } }; self.state = State::FlowMappingKey; Ok((Event::empty_scalar(), mark)) } fn flow_sequence_entry(&mut self, first: bool) -> ParseResult { // skip FlowMappingStart if first { let _ = self.peek_token()?; //self.marks.push(tok.0); self.skip(); } match *self.peek_token()? { Token(mark, TokenType::FlowSequenceEnd) => { self.pop_state(); self.skip(); return Ok((Event::SequenceEnd, mark)); } Token(_, TokenType::FlowEntry) if !first => { self.skip(); } Token(mark, _) if !first => { return Err(ScanError::new( mark, "while parsing a flow sequence, expected ',' or ']'", )); } _ => { /* next */ } } match *self.peek_token()? { Token(mark, TokenType::FlowSequenceEnd) => { self.pop_state(); self.skip(); Ok((Event::SequenceEnd, mark)) } Token(mark, TokenType::Key) => { self.state = State::FlowSequenceEntryMappingKey; self.skip(); Ok((Event::MappingStart(0), mark)) } _ => { self.push_state(State::FlowSequenceEntry); self.parse_node(false, false) } } } fn indentless_sequence_entry(&mut self) -> ParseResult { match *self.peek_token()? { Token(_, TokenType::BlockEntry) => (), Token(mark, _) => { self.pop_state(); return Ok((Event::SequenceEnd, mark)); } } self.skip(); match *self.peek_token()? { Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::Key) | Token(mark, TokenType::Value) | Token(mark, TokenType::BlockEnd) => { self.state = State::IndentlessSequenceEntry; Ok((Event::empty_scalar(), mark)) } _ => { self.push_state(State::IndentlessSequenceEntry); self.parse_node(true, false) } } } fn block_sequence_entry(&mut self, first: bool) -> ParseResult { // BLOCK-SEQUENCE-START if first { let _ = self.peek_token()?; //self.marks.push(tok.0); self.skip(); } match *self.peek_token()? { Token(mark, TokenType::BlockEnd) => { self.pop_state(); self.skip(); Ok((Event::SequenceEnd, mark)) } Token(_, TokenType::BlockEntry) => { self.skip(); match *self.peek_token()? { Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => { self.state = State::BlockSequenceEntry; Ok((Event::empty_scalar(), mark)) } _ => { self.push_state(State::BlockSequenceEntry); self.parse_node(true, false) } } } Token(mark, _) => Err(ScanError::new( mark, "while parsing a block collection, did not find expected '-' indicator", )), } } fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult { match *self.peek_token()? { Token(mark, TokenType::Value) | Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => { self.skip(); self.state = State::FlowSequenceEntryMappingValue; Ok((Event::empty_scalar(), mark)) } _ => { self.push_state(State::FlowSequenceEntryMappingValue); self.parse_node(false, false) } } } fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult { match *self.peek_token()? { Token(_, TokenType::Value) => { self.skip(); self.state = State::FlowSequenceEntryMappingValue; match *self.peek_token()? { Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => { self.state = State::FlowSequenceEntryMappingEnd; Ok((Event::empty_scalar(), mark)) } _ => { self.push_state(State::FlowSequenceEntryMappingEnd); self.parse_node(false, false) } } } Token(mark, _) => { self.state = State::FlowSequenceEntryMappingEnd; Ok((Event::empty_scalar(), mark)) } } } fn flow_sequence_entry_mapping_end(&mut self) -> ParseResult { self.state = State::FlowSequenceEntry; Ok((Event::MappingEnd, self.scanner.mark())) } } insta-1.39.0/src/content/yaml/vendored/scanner.rs000064400000000000000000001753521046102023000200610ustar 00000000000000use std::collections::VecDeque; use std::error::Error; use std::{char, fmt}; #[derive(Clone, Copy, PartialEq, Debug, Eq)] pub enum TEncoding { Utf8, } #[derive(Clone, Copy, PartialEq, Debug, Eq)] pub enum TScalarStyle { Plain, SingleQuoted, DoubleQuoted, Literal, Foled, } #[derive(Clone, Copy, PartialEq, Debug, Eq)] pub struct Marker { index: usize, line: usize, col: usize, } impl Marker { fn new(index: usize, line: usize, col: usize) -> Marker { Marker { index, line, col } } } #[derive(Clone, PartialEq, Debug, Eq)] pub struct ScanError { mark: Marker, info: String, } impl ScanError { pub fn new(loc: Marker, info: &str) -> ScanError { ScanError { mark: loc, info: info.to_owned(), } } } impl Error for ScanError { fn description(&self) -> &str { self.info.as_ref() } fn cause(&self) -> Option<&dyn Error> { None } } impl fmt::Display for ScanError { // col starts from 0 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "{} at line {} column {}", self.info, self.mark.line, self.mark.col + 1 ) } } #[derive(Clone, PartialEq, Debug, Eq)] pub enum TokenType { StreamStart(TEncoding), StreamEnd, /// major, minor VersionDirective(u32, u32), /// handle, prefix TagDirective(String, String), DocumentStart, DocumentEnd, BlockSequenceStart, BlockMappingStart, BlockEnd, FlowSequenceStart, FlowSequenceEnd, FlowMappingStart, FlowMappingEnd, BlockEntry, FlowEntry, Key, Value, Alias(String), Anchor(String), /// handle, suffix Tag(String, String), Scalar(TScalarStyle, String), } #[derive(Clone, PartialEq, Debug, Eq)] pub struct Token(pub Marker, pub TokenType); #[derive(Clone, PartialEq, Debug, Eq)] struct SimpleKey { possible: bool, required: bool, token_number: usize, mark: Marker, } impl SimpleKey { fn new(mark: Marker) -> SimpleKey { SimpleKey { possible: false, required: false, token_number: 0, mark, } } } #[derive(Debug)] pub struct Scanner { rdr: T, mark: Marker, tokens: VecDeque, buffer: VecDeque, error: Option, stream_start_produced: bool, stream_end_produced: bool, adjacent_value_allowed_at: usize, simple_key_allowed: bool, simple_keys: Vec, indent: isize, indents: Vec, flow_level: u8, tokens_parsed: usize, token_available: bool, } impl> Iterator for Scanner { type Item = Token; fn next(&mut self) -> Option { if self.error.is_some() { return None; } match self.next_token() { Ok(tok) => tok, Err(e) => { self.error = Some(e); None } } } } #[inline] fn is_z(c: char) -> bool { c == '\0' } #[inline] fn is_break(c: char) -> bool { c == '\n' || c == '\r' } #[inline] fn is_breakz(c: char) -> bool { is_break(c) || is_z(c) } #[inline] fn is_blank(c: char) -> bool { c == ' ' || c == '\t' } #[inline] fn is_blankz(c: char) -> bool { is_blank(c) || is_breakz(c) } #[inline] fn is_digit(c: char) -> bool { c.is_ascii_digit() } #[inline] fn is_alpha(c: char) -> bool { matches!(c, '0'..='9' | 'a'..='z' | 'A'..='Z' | '_' | '-') } #[inline] fn is_hex(c: char) -> bool { c.is_ascii_digit() || ('a'..='f').contains(&c) || ('A'..='F').contains(&c) } #[inline] fn as_hex(c: char) -> u32 { match c { '0'..='9' => (c as u32) - ('0' as u32), 'a'..='f' => (c as u32) - ('a' as u32) + 10, 'A'..='F' => (c as u32) - ('A' as u32) + 10, _ => unreachable!(), } } #[inline] fn is_flow(c: char) -> bool { matches!(c, ',' | '[' | ']' | '{' | '}') } pub type ScanResult = Result<(), ScanError>; impl> Scanner { /// Creates the YAML tokenizer. pub fn new(rdr: T) -> Scanner { Scanner { rdr, buffer: VecDeque::new(), mark: Marker::new(0, 1, 0), tokens: VecDeque::new(), error: None, stream_start_produced: false, stream_end_produced: false, adjacent_value_allowed_at: 0, simple_key_allowed: true, simple_keys: Vec::new(), indent: -1, indents: Vec::new(), flow_level: 0, tokens_parsed: 0, token_available: false, } } #[inline] pub fn get_error(&self) -> Option { self.error.clone() } #[inline] fn lookahead(&mut self, count: usize) { if self.buffer.len() >= count { return; } for _ in 0..(count - self.buffer.len()) { self.buffer.push_back(self.rdr.next().unwrap_or('\0')); } } #[inline] fn skip(&mut self) { let c = self.buffer.pop_front().unwrap(); self.mark.index += 1; if c == '\n' { self.mark.line += 1; self.mark.col = 0; } else { self.mark.col += 1; } } #[inline] fn skip_line(&mut self) { if self.buffer[0] == '\r' && self.buffer[1] == '\n' { self.skip(); self.skip(); } else if is_break(self.buffer[0]) { self.skip(); } } #[inline] fn ch(&self) -> char { self.buffer[0] } #[inline] fn ch_is(&self, c: char) -> bool { self.buffer[0] == c } #[allow(dead_code)] #[inline] fn eof(&self) -> bool { self.ch_is('\0') } #[inline] pub fn stream_started(&self) -> bool { self.stream_start_produced } #[inline] pub fn stream_ended(&self) -> bool { self.stream_end_produced } #[inline] pub fn mark(&self) -> Marker { self.mark } #[inline] fn read_break(&mut self, s: &mut String) { if self.buffer[0] == '\r' && self.buffer[1] == '\n' { s.push('\n'); self.skip(); self.skip(); } else if self.buffer[0] == '\r' || self.buffer[0] == '\n' { s.push('\n'); self.skip(); } else { unreachable!(); } } fn insert_token(&mut self, pos: usize, tok: Token) { let old_len = self.tokens.len(); assert!(pos <= old_len); self.tokens.push_back(tok); for i in 0..old_len - pos { self.tokens.swap(old_len - i, old_len - i - 1); } } fn allow_simple_key(&mut self) { self.simple_key_allowed = true; } fn disallow_simple_key(&mut self) { self.simple_key_allowed = false; } pub fn fetch_next_token(&mut self) -> ScanResult { self.lookahead(1); // println!("--> fetch_next_token Cur {:?} {:?}", self.mark, self.ch()); if !self.stream_start_produced { self.fetch_stream_start(); return Ok(()); } self.skip_to_next_token(); self.stale_simple_keys()?; let mark = self.mark; self.unroll_indent(mark.col as isize); self.lookahead(4); if is_z(self.ch()) { self.fetch_stream_end()?; return Ok(()); } // Is it a directive? if self.mark.col == 0 && self.ch_is('%') { return self.fetch_directive(); } if self.mark.col == 0 && self.buffer[0] == '-' && self.buffer[1] == '-' && self.buffer[2] == '-' && is_blankz(self.buffer[3]) { self.fetch_document_indicator(TokenType::DocumentStart)?; return Ok(()); } if self.mark.col == 0 && self.buffer[0] == '.' && self.buffer[1] == '.' && self.buffer[2] == '.' && is_blankz(self.buffer[3]) { self.fetch_document_indicator(TokenType::DocumentEnd)?; return Ok(()); } let c = self.buffer[0]; let nc = self.buffer[1]; match c { '[' => self.fetch_flow_collection_start(TokenType::FlowSequenceStart), '{' => self.fetch_flow_collection_start(TokenType::FlowMappingStart), ']' => self.fetch_flow_collection_end(TokenType::FlowSequenceEnd), '}' => self.fetch_flow_collection_end(TokenType::FlowMappingEnd), ',' => self.fetch_flow_entry(), '-' if is_blankz(nc) => self.fetch_block_entry(), '?' if is_blankz(nc) => self.fetch_key(), ':' if is_blankz(nc) || (self.flow_level > 0 && (is_flow(nc) || self.mark.index == self.adjacent_value_allowed_at)) => { self.fetch_value() } // Is it an alias? '*' => self.fetch_anchor(true), // Is it an anchor? '&' => self.fetch_anchor(false), '!' => self.fetch_tag(), // Is it a literal scalar? '|' if self.flow_level == 0 => self.fetch_block_scalar(true), // Is it a folded scalar? '>' if self.flow_level == 0 => self.fetch_block_scalar(false), '\'' => self.fetch_flow_scalar(true), '"' => self.fetch_flow_scalar(false), // plain scalar '-' if !is_blankz(nc) => self.fetch_plain_scalar(), ':' | '?' if !is_blankz(nc) && self.flow_level == 0 => self.fetch_plain_scalar(), '%' | '@' | '`' => Err(ScanError::new( self.mark, &format!("unexpected character: `{}'", c), )), _ => self.fetch_plain_scalar(), } } pub fn next_token(&mut self) -> Result, ScanError> { if self.stream_end_produced { return Ok(None); } if !self.token_available { self.fetch_more_tokens()?; } let t = self.tokens.pop_front().unwrap(); self.token_available = false; self.tokens_parsed += 1; if let TokenType::StreamEnd = t.1 { self.stream_end_produced = true; } Ok(Some(t)) } pub fn fetch_more_tokens(&mut self) -> ScanResult { let mut need_more; loop { need_more = false; if self.tokens.is_empty() { need_more = true; } else { self.stale_simple_keys()?; for sk in &self.simple_keys { if sk.possible && sk.token_number == self.tokens_parsed { need_more = true; break; } } } if !need_more { break; } self.fetch_next_token()?; } self.token_available = true; Ok(()) } fn stale_simple_keys(&mut self) -> ScanResult { for sk in &mut self.simple_keys { if sk.possible && (sk.mark.line < self.mark.line || sk.mark.index + 1024 < self.mark.index) { if sk.required { return Err(ScanError::new(self.mark, "simple key expect ':'")); } sk.possible = false; } } Ok(()) } fn skip_to_next_token(&mut self) { loop { self.lookahead(1); // TODO(chenyh) BOM match self.ch() { ' ' => self.skip(), '\t' if self.flow_level > 0 || !self.simple_key_allowed => self.skip(), '\n' | '\r' => { self.lookahead(2); self.skip_line(); if self.flow_level == 0 { self.allow_simple_key(); } } '#' => { while !is_breakz(self.ch()) { self.skip(); self.lookahead(1); } } _ => break, } } } fn fetch_stream_start(&mut self) { let mark = self.mark; self.indent = -1; self.stream_start_produced = true; self.allow_simple_key(); self.tokens .push_back(Token(mark, TokenType::StreamStart(TEncoding::Utf8))); self.simple_keys.push(SimpleKey::new(Marker::new(0, 0, 0))); } fn fetch_stream_end(&mut self) -> ScanResult { // force new line if self.mark.col != 0 { self.mark.col = 0; self.mark.line += 1; } self.unroll_indent(-1); self.remove_simple_key()?; self.disallow_simple_key(); self.tokens .push_back(Token(self.mark, TokenType::StreamEnd)); Ok(()) } fn fetch_directive(&mut self) -> ScanResult { self.unroll_indent(-1); self.remove_simple_key()?; self.disallow_simple_key(); let tok = self.scan_directive()?; self.tokens.push_back(tok); Ok(()) } fn scan_directive(&mut self) -> Result { let start_mark = self.mark; self.skip(); let name = self.scan_directive_name()?; let tok = match name.as_ref() { "YAML" => self.scan_version_directive_value(&start_mark)?, "TAG" => self.scan_tag_directive_value(&start_mark)?, // XXX This should be a warning instead of an error _ => { // skip current line self.lookahead(1); while !is_breakz(self.ch()) { self.skip(); self.lookahead(1); } // XXX return an empty TagDirective token Token( start_mark, TokenType::TagDirective(String::new(), String::new()), ) // return Err(ScanError::new(start_mark, // "while scanning a directive, found unknown directive name")) } }; self.lookahead(1); while is_blank(self.ch()) { self.skip(); self.lookahead(1); } if self.ch() == '#' { while !is_breakz(self.ch()) { self.skip(); self.lookahead(1); } } if !is_breakz(self.ch()) { return Err(ScanError::new( start_mark, "while scanning a directive, did not find expected comment or line break", )); } // Eat a line break if is_break(self.ch()) { self.lookahead(2); self.skip_line(); } Ok(tok) } fn scan_version_directive_value(&mut self, mark: &Marker) -> Result { self.lookahead(1); while is_blank(self.ch()) { self.skip(); self.lookahead(1); } let major = self.scan_version_directive_number(mark)?; if self.ch() != '.' { return Err(ScanError::new( *mark, "while scanning a YAML directive, did not find expected digit or '.' character", )); } self.skip(); let minor = self.scan_version_directive_number(mark)?; Ok(Token(*mark, TokenType::VersionDirective(major, minor))) } fn scan_directive_name(&mut self) -> Result { let start_mark = self.mark; let mut string = String::new(); self.lookahead(1); while is_alpha(self.ch()) { string.push(self.ch()); self.skip(); self.lookahead(1); } if string.is_empty() { return Err(ScanError::new( start_mark, "while scanning a directive, could not find expected directive name", )); } if !is_blankz(self.ch()) { return Err(ScanError::new( start_mark, "while scanning a directive, found unexpected non-alphabetical character", )); } Ok(string) } fn scan_version_directive_number(&mut self, mark: &Marker) -> Result { let mut val = 0u32; let mut length = 0usize; self.lookahead(1); while is_digit(self.ch()) { if length + 1 > 9 { return Err(ScanError::new( *mark, "while scanning a YAML directive, found extremely long version number", )); } length += 1; val = val * 10 + ((self.ch() as u32) - ('0' as u32)); self.skip(); self.lookahead(1); } if length == 0 { return Err(ScanError::new( *mark, "while scanning a YAML directive, did not find expected version number", )); } Ok(val) } fn scan_tag_directive_value(&mut self, mark: &Marker) -> Result { self.lookahead(1); /* Eat whitespaces. */ while is_blank(self.ch()) { self.skip(); self.lookahead(1); } let handle = self.scan_tag_handle(true, mark)?; self.lookahead(1); /* Eat whitespaces. */ while is_blank(self.ch()) { self.skip(); self.lookahead(1); } let is_secondary = handle == "!!"; let prefix = self.scan_tag_uri(true, is_secondary, "", mark)?; self.lookahead(1); if is_blankz(self.ch()) { Ok(Token(*mark, TokenType::TagDirective(handle, prefix))) } else { Err(ScanError::new( *mark, "while scanning TAG, did not find expected whitespace or line break", )) } } fn fetch_tag(&mut self) -> ScanResult { self.save_simple_key()?; self.disallow_simple_key(); let tok = self.scan_tag()?; self.tokens.push_back(tok); Ok(()) } fn scan_tag(&mut self) -> Result { let start_mark = self.mark; let mut handle = String::new(); let mut suffix; let mut secondary = false; // Check if the tag is in the canonical form (verbatim). self.lookahead(2); if self.buffer[1] == '<' { // Eat '!<' self.skip(); self.skip(); suffix = self.scan_tag_uri(false, false, "", &start_mark)?; if self.ch() != '>' { return Err(ScanError::new( start_mark, "while scanning a tag, did not find the expected '>'", )); } self.skip(); } else { // The tag has either the '!suffix' or the '!handle!suffix' handle = self.scan_tag_handle(false, &start_mark)?; // Check if it is, indeed, handle. if handle.len() >= 2 && handle.starts_with('!') && handle.ends_with('!') { if handle == "!!" { secondary = true; } suffix = self.scan_tag_uri(false, secondary, "", &start_mark)?; } else { suffix = self.scan_tag_uri(false, false, &handle, &start_mark)?; handle = "!".to_string(); // A special case: the '!' tag. Set the handle to '' and the // suffix to '!'. if suffix.is_empty() { handle.clear(); suffix = "!".to_owned(); } } } self.lookahead(1); if is_blankz(self.ch()) { // XXX: ex 7.2, an empty scalar can follow a secondary tag Ok(Token(start_mark, TokenType::Tag(handle, suffix))) } else { Err(ScanError::new( start_mark, "while scanning a tag, did not find expected whitespace or line break", )) } } fn scan_tag_handle(&mut self, directive: bool, mark: &Marker) -> Result { let mut string = String::new(); self.lookahead(1); if self.ch() != '!' { return Err(ScanError::new( *mark, "while scanning a tag, did not find expected '!'", )); } string.push(self.ch()); self.skip(); self.lookahead(1); while is_alpha(self.ch()) { string.push(self.ch()); self.skip(); self.lookahead(1); } // Check if the trailing character is '!' and copy it. if self.ch() == '!' { string.push(self.ch()); self.skip(); } else if directive && string != "!" { // It's either the '!' tag or not really a tag handle. If it's a %TAG // directive, it's an error. If it's a tag token, it must be a part of // URI. return Err(ScanError::new( *mark, "while parsing a tag directive, did not find expected '!'", )); } Ok(string) } fn scan_tag_uri( &mut self, directive: bool, _is_secondary: bool, head: &str, mark: &Marker, ) -> Result { let mut length = head.len(); let mut string = String::new(); // Copy the head if needed. // Note that we don't copy the leading '!' character. if length > 1 { string.extend(head.chars().skip(1)); } self.lookahead(1); /* * The set of characters that may appear in URI is as follows: * * '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', * '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', * '%'. */ while match self.ch() { ';' | '/' | '?' | ':' | '@' | '&' => true, '=' | '+' | '$' | ',' | '.' | '!' | '~' | '*' | '\'' | '(' | ')' | '[' | ']' => true, '%' => true, c if is_alpha(c) => true, _ => false, } { // Check if it is a URI-escape sequence. if self.ch() == '%' { string.push(self.scan_uri_escapes(directive, mark)?); } else { string.push(self.ch()); self.skip(); } length += 1; self.lookahead(1); } if length == 0 { return Err(ScanError::new( *mark, "while parsing a tag, did not find expected tag URI", )); } Ok(string) } fn scan_uri_escapes(&mut self, _directive: bool, mark: &Marker) -> Result { let mut width = 0usize; let mut code = 0u32; loop { self.lookahead(3); if !(self.ch() == '%' && is_hex(self.buffer[1]) && is_hex(self.buffer[2])) { return Err(ScanError::new( *mark, "while parsing a tag, did not find URI escaped octet", )); } let octet = (as_hex(self.buffer[1]) << 4) + as_hex(self.buffer[2]); if width == 0 { width = match octet { _ if octet & 0x80 == 0x00 => 1, _ if octet & 0xE0 == 0xC0 => 2, _ if octet & 0xF0 == 0xE0 => 3, _ if octet & 0xF8 == 0xF0 => 4, _ => { return Err(ScanError::new( *mark, "while parsing a tag, found an incorrect leading UTF-8 octet", )); } }; code = octet; } else { if octet & 0xc0 != 0x80 { return Err(ScanError::new( *mark, "while parsing a tag, found an incorrect trailing UTF-8 octet", )); } code = (code << 8) + octet; } self.skip(); self.skip(); self.skip(); width -= 1; if width == 0 { break; } } match char::from_u32(code) { Some(ch) => Ok(ch), None => Err(ScanError::new( *mark, "while parsing a tag, found an invalid UTF-8 codepoint", )), } } fn fetch_anchor(&mut self, alias: bool) -> ScanResult { self.save_simple_key()?; self.disallow_simple_key(); let tok = self.scan_anchor(alias)?; self.tokens.push_back(tok); Ok(()) } fn scan_anchor(&mut self, alias: bool) -> Result { let mut string = String::new(); let start_mark = self.mark; self.skip(); self.lookahead(1); while is_alpha(self.ch()) { string.push(self.ch()); self.skip(); self.lookahead(1); } if string.is_empty() || match self.ch() { c if is_blankz(c) => false, '?' | ':' | ',' | ']' | '}' | '%' | '@' | '`' => false, _ => true, } { return Err(ScanError::new(start_mark, "while scanning an anchor or alias, did not find expected alphabetic or numeric character")); } if alias { Ok(Token(start_mark, TokenType::Alias(string))) } else { Ok(Token(start_mark, TokenType::Anchor(string))) } } fn fetch_flow_collection_start(&mut self, tok: TokenType) -> ScanResult { // The indicators '[' and '{' may start a simple key. self.save_simple_key()?; self.increase_flow_level()?; self.allow_simple_key(); let start_mark = self.mark; self.skip(); self.tokens.push_back(Token(start_mark, tok)); Ok(()) } fn fetch_flow_collection_end(&mut self, tok: TokenType) -> ScanResult { self.remove_simple_key()?; self.decrease_flow_level(); self.disallow_simple_key(); let start_mark = self.mark; self.skip(); self.tokens.push_back(Token(start_mark, tok)); Ok(()) } fn fetch_flow_entry(&mut self) -> ScanResult { self.remove_simple_key()?; self.allow_simple_key(); let start_mark = self.mark; self.skip(); self.tokens .push_back(Token(start_mark, TokenType::FlowEntry)); Ok(()) } fn increase_flow_level(&mut self) -> ScanResult { self.simple_keys.push(SimpleKey::new(Marker::new(0, 0, 0))); self.flow_level = self .flow_level .checked_add(1) .ok_or_else(|| ScanError::new(self.mark, "recursion limit exceeded"))?; Ok(()) } fn decrease_flow_level(&mut self) { if self.flow_level > 0 { self.flow_level -= 1; self.simple_keys.pop().unwrap(); } } fn fetch_block_entry(&mut self) -> ScanResult { if self.flow_level == 0 { // Check if we are allowed to start a new entry. if !self.simple_key_allowed { return Err(ScanError::new( self.mark, "block sequence entries are not allowed in this context", )); } let mark = self.mark; // generate BLOCK-SEQUENCE-START if indented self.roll_indent(mark.col, None, TokenType::BlockSequenceStart, mark); } else { // - * only allowed in block return Err(ScanError::new( self.mark, r#""-" is only valid inside a block"#, )); } self.remove_simple_key()?; self.allow_simple_key(); let start_mark = self.mark; self.skip(); self.tokens .push_back(Token(start_mark, TokenType::BlockEntry)); Ok(()) } fn fetch_document_indicator(&mut self, t: TokenType) -> ScanResult { self.unroll_indent(-1); self.remove_simple_key()?; self.disallow_simple_key(); let mark = self.mark; self.skip(); self.skip(); self.skip(); self.tokens.push_back(Token(mark, t)); Ok(()) } fn fetch_block_scalar(&mut self, literal: bool) -> ScanResult { self.save_simple_key()?; self.allow_simple_key(); let tok = self.scan_block_scalar(literal)?; self.tokens.push_back(tok); Ok(()) } fn scan_block_scalar(&mut self, literal: bool) -> Result { let start_mark = self.mark; let mut chomping: i32 = 0; let mut increment: usize = 0; let mut indent: usize = 0; let mut trailing_blank: bool; let mut leading_blank: bool = false; let mut string = String::new(); let mut leading_break = String::new(); let mut trailing_breaks = String::new(); // skip '|' or '>' self.skip(); self.lookahead(1); if self.ch() == '+' || self.ch() == '-' { if self.ch() == '+' { chomping = 1; } else { chomping = -1; } self.skip(); self.lookahead(1); if is_digit(self.ch()) { if self.ch() == '0' { return Err(ScanError::new( start_mark, "while scanning a block scalar, found an indentation indicator equal to 0", )); } increment = (self.ch() as usize) - ('0' as usize); self.skip(); } } else if is_digit(self.ch()) { if self.ch() == '0' { return Err(ScanError::new( start_mark, "while scanning a block scalar, found an indentation indicator equal to 0", )); } increment = (self.ch() as usize) - ('0' as usize); self.skip(); self.lookahead(1); if self.ch() == '+' || self.ch() == '-' { if self.ch() == '+' { chomping = 1; } else { chomping = -1; } self.skip(); } } // Eat whitespaces and comments to the end of the line. self.lookahead(1); while is_blank(self.ch()) { self.skip(); self.lookahead(1); } if self.ch() == '#' { while !is_breakz(self.ch()) { self.skip(); self.lookahead(1); } } // Check if we are at the end of the line. if !is_breakz(self.ch()) { return Err(ScanError::new( start_mark, "while scanning a block scalar, did not find expected comment or line break", )); } if is_break(self.ch()) { self.lookahead(2); self.skip_line(); } if increment > 0 { indent = if self.indent >= 0 { (self.indent + increment as isize) as usize } else { increment } } // Scan the leading line breaks and determine the indentation level if needed. self.block_scalar_breaks(&mut indent, &mut trailing_breaks)?; self.lookahead(1); let start_mark = self.mark; while self.mark.col == indent && !is_z(self.ch()) { // We are at the beginning of a non-empty line. trailing_blank = is_blank(self.ch()); if !literal && !leading_break.is_empty() && !leading_blank && !trailing_blank { if trailing_breaks.is_empty() { string.push(' '); } leading_break.clear(); } else { string.push_str(&leading_break); leading_break.clear(); } string.push_str(&trailing_breaks); trailing_breaks.clear(); leading_blank = is_blank(self.ch()); while !is_breakz(self.ch()) { string.push(self.ch()); self.skip(); self.lookahead(1); } // break on EOF if is_z(self.ch()) { break; } self.lookahead(2); self.read_break(&mut leading_break); // Eat the following indentation spaces and line breaks. self.block_scalar_breaks(&mut indent, &mut trailing_breaks)?; } // Chomp the tail. if chomping != -1 { string.push_str(&leading_break); } if chomping == 1 { string.push_str(&trailing_breaks); } if literal { Ok(Token( start_mark, TokenType::Scalar(TScalarStyle::Literal, string), )) } else { Ok(Token( start_mark, TokenType::Scalar(TScalarStyle::Foled, string), )) } } fn block_scalar_breaks(&mut self, indent: &mut usize, breaks: &mut String) -> ScanResult { let mut max_indent = 0; loop { self.lookahead(1); while (*indent == 0 || self.mark.col < *indent) && self.buffer[0] == ' ' { self.skip(); self.lookahead(1); } if self.mark.col > max_indent { max_indent = self.mark.col; } // Check for a tab character messing the indentation. if (*indent == 0 || self.mark.col < *indent) && self.buffer[0] == '\t' { return Err(ScanError::new(self.mark, "while scanning a block scalar, found a tab character where an indentation space is expected")); } if !is_break(self.ch()) { break; } self.lookahead(2); // Consume the line break. self.read_break(breaks); } if *indent == 0 { *indent = max_indent; if *indent < (self.indent + 1) as usize { *indent = (self.indent + 1) as usize; } if *indent < 1 { *indent = 1; } } Ok(()) } fn fetch_flow_scalar(&mut self, single: bool) -> ScanResult { self.save_simple_key()?; self.disallow_simple_key(); let tok = self.scan_flow_scalar(single)?; // From spec: To ensure JSON compatibility, if a key inside a flow mapping is JSON-like, // YAML allows the following value to be specified adjacent to the “:”. self.adjacent_value_allowed_at = self.mark.index; self.tokens.push_back(tok); Ok(()) } fn scan_flow_scalar(&mut self, single: bool) -> Result { let start_mark = self.mark; let mut string = String::new(); let mut leading_break = String::new(); let mut trailing_breaks = String::new(); let mut whitespaces = String::new(); let mut leading_blanks; /* Eat the left quote. */ self.skip(); loop { /* Check for a document indicator. */ self.lookahead(4); if self.mark.col == 0 && (((self.buffer[0] == '-') && (self.buffer[1] == '-') && (self.buffer[2] == '-')) || ((self.buffer[0] == '.') && (self.buffer[1] == '.') && (self.buffer[2] == '.'))) && is_blankz(self.buffer[3]) { return Err(ScanError::new( start_mark, "while scanning a quoted scalar, found unexpected document indicator", )); } if is_z(self.ch()) { return Err(ScanError::new( start_mark, "while scanning a quoted scalar, found unexpected end of stream", )); } self.lookahead(2); leading_blanks = false; // Consume non-blank characters. while !is_blankz(self.ch()) { match self.ch() { // Check for an escaped single quote. '\'' if self.buffer[1] == '\'' && single => { string.push('\''); self.skip(); self.skip(); } // Check for the right quote. '\'' if single => break, '"' if !single => break, // Check for an escaped line break. '\\' if !single && is_break(self.buffer[1]) => { self.lookahead(3); self.skip(); self.skip_line(); leading_blanks = true; break; } // Check for an escape sequence. '\\' if !single => { let mut code_length = 0usize; match self.buffer[1] { '0' => string.push('\0'), 'a' => string.push('\x07'), 'b' => string.push('\x08'), 't' | '\t' => string.push('\t'), 'n' => string.push('\n'), 'v' => string.push('\x0b'), 'f' => string.push('\x0c'), 'r' => string.push('\x0d'), 'e' => string.push('\x1b'), ' ' => string.push('\x20'), '"' => string.push('"'), '\'' => string.push('\''), '\\' => string.push('\\'), // NEL (#x85) 'N' => string.push(char::from_u32(0x85).unwrap()), // #xA0 '_' => string.push(char::from_u32(0xA0).unwrap()), // LS (#x2028) 'L' => string.push(char::from_u32(0x2028).unwrap()), // PS (#x2029) 'P' => string.push(char::from_u32(0x2029).unwrap()), 'x' => code_length = 2, 'u' => code_length = 4, 'U' => code_length = 8, _ => { return Err(ScanError::new( start_mark, "while parsing a quoted scalar, found unknown escape character", )) } } self.skip(); self.skip(); // Consume an arbitrary escape code. if code_length > 0 { self.lookahead(code_length); let mut value = 0u32; for i in 0..code_length { if !is_hex(self.buffer[i]) { return Err(ScanError::new(start_mark, "while parsing a quoted scalar, did not find expected hexadecimal number")); } value = (value << 4) + as_hex(self.buffer[i]); } let ch = match char::from_u32(value) { Some(v) => v, None => { return Err(ScanError::new(start_mark, "while parsing a quoted scalar, found invalid Unicode character escape code")); } }; string.push(ch); for _ in 0..code_length { self.skip(); } } } c => { string.push(c); self.skip(); } } self.lookahead(2); } self.lookahead(1); match self.ch() { '\'' if single => break, '"' if !single => break, _ => {} } // Consume blank characters. while is_blank(self.ch()) || is_break(self.ch()) { if is_blank(self.ch()) { // Consume a space or a tab character. if leading_blanks { self.skip(); } else { whitespaces.push(self.ch()); self.skip(); } } else { self.lookahead(2); // Check if it is a first line break. if leading_blanks { self.read_break(&mut trailing_breaks); } else { whitespaces.clear(); self.read_break(&mut leading_break); leading_blanks = true; } } self.lookahead(1); } // Join the whitespaces or fold line breaks. if leading_blanks { if leading_break.is_empty() { string.push_str(&leading_break); string.push_str(&trailing_breaks); trailing_breaks.clear(); leading_break.clear(); } else { if trailing_breaks.is_empty() { string.push(' '); } else { string.push_str(&trailing_breaks); trailing_breaks.clear(); } leading_break.clear(); } } else { string.push_str(&whitespaces); whitespaces.clear(); } } // loop // Eat the right quote. self.skip(); if single { Ok(Token( start_mark, TokenType::Scalar(TScalarStyle::SingleQuoted, string), )) } else { Ok(Token( start_mark, TokenType::Scalar(TScalarStyle::DoubleQuoted, string), )) } } fn fetch_plain_scalar(&mut self) -> ScanResult { self.save_simple_key()?; self.disallow_simple_key(); let tok = self.scan_plain_scalar()?; self.tokens.push_back(tok); Ok(()) } fn scan_plain_scalar(&mut self) -> Result { let indent = self.indent + 1; let start_mark = self.mark; let mut string = String::new(); let mut leading_break = String::new(); let mut trailing_breaks = String::new(); let mut whitespaces = String::new(); let mut leading_blanks = false; loop { /* Check for a document indicator. */ self.lookahead(4); if self.mark.col == 0 && (((self.buffer[0] == '-') && (self.buffer[1] == '-') && (self.buffer[2] == '-')) || ((self.buffer[0] == '.') && (self.buffer[1] == '.') && (self.buffer[2] == '.'))) && is_blankz(self.buffer[3]) { break; } if self.ch() == '#' { break; } while !is_blankz(self.ch()) { // indicators can end a plain scalar, see 7.3.3. Plain Style match self.ch() { ':' if is_blankz(self.buffer[1]) || (self.flow_level > 0 && is_flow(self.buffer[1])) => { break; } ',' | '[' | ']' | '{' | '}' if self.flow_level > 0 => break, _ => {} } if leading_blanks || !whitespaces.is_empty() { if leading_blanks { if leading_break.is_empty() { string.push_str(&leading_break); string.push_str(&trailing_breaks); trailing_breaks.clear(); leading_break.clear(); } else { if trailing_breaks.is_empty() { string.push(' '); } else { string.push_str(&trailing_breaks); trailing_breaks.clear(); } leading_break.clear(); } leading_blanks = false; } else { string.push_str(&whitespaces); whitespaces.clear(); } } string.push(self.ch()); self.skip(); self.lookahead(2); } // is the end? if !(is_blank(self.ch()) || is_break(self.ch())) { break; } self.lookahead(1); while is_blank(self.ch()) || is_break(self.ch()) { if is_blank(self.ch()) { if leading_blanks && (self.mark.col as isize) < indent && self.ch() == '\t' { return Err(ScanError::new( start_mark, "while scanning a plain scalar, found a tab", )); } if leading_blanks { self.skip(); } else { whitespaces.push(self.ch()); self.skip(); } } else { self.lookahead(2); // Check if it is a first line break if leading_blanks { self.read_break(&mut trailing_breaks); } else { whitespaces.clear(); self.read_break(&mut leading_break); leading_blanks = true; } } self.lookahead(1); } // check indentation level if self.flow_level == 0 && (self.mark.col as isize) < indent { break; } } if leading_blanks { self.allow_simple_key(); } Ok(Token( start_mark, TokenType::Scalar(TScalarStyle::Plain, string), )) } fn fetch_key(&mut self) -> ScanResult { let start_mark = self.mark; if self.flow_level == 0 { // Check if we are allowed to start a new key (not necessarily simple). if !self.simple_key_allowed { return Err(ScanError::new( self.mark, "mapping keys are not allowed in this context", )); } self.roll_indent( start_mark.col, None, TokenType::BlockMappingStart, start_mark, ); } self.remove_simple_key()?; if self.flow_level == 0 { self.allow_simple_key(); } else { self.disallow_simple_key(); } self.skip(); self.tokens.push_back(Token(start_mark, TokenType::Key)); Ok(()) } fn fetch_value(&mut self) -> ScanResult { let sk = self.simple_keys.last().unwrap().clone(); let start_mark = self.mark; if sk.possible { // insert simple key let tok = Token(sk.mark, TokenType::Key); let tokens_parsed = self.tokens_parsed; self.insert_token(sk.token_number - tokens_parsed, tok); // Add the BLOCK-MAPPING-START token if needed. self.roll_indent( sk.mark.col, Some(sk.token_number), TokenType::BlockMappingStart, start_mark, ); self.simple_keys.last_mut().unwrap().possible = false; self.disallow_simple_key(); } else { // The ':' indicator follows a complex key. if self.flow_level == 0 { if !self.simple_key_allowed { return Err(ScanError::new( start_mark, "mapping values are not allowed in this context", )); } self.roll_indent( start_mark.col, None, TokenType::BlockMappingStart, start_mark, ); } if self.flow_level == 0 { self.allow_simple_key(); } else { self.disallow_simple_key(); } } self.skip(); self.tokens.push_back(Token(start_mark, TokenType::Value)); Ok(()) } fn roll_indent(&mut self, col: usize, number: Option, tok: TokenType, mark: Marker) { if self.flow_level > 0 { return; } if self.indent < col as isize { self.indents.push(self.indent); self.indent = col as isize; let tokens_parsed = self.tokens_parsed; match number { Some(n) => self.insert_token(n - tokens_parsed, Token(mark, tok)), None => self.tokens.push_back(Token(mark, tok)), } } } fn unroll_indent(&mut self, col: isize) { if self.flow_level > 0 { return; } while self.indent > col { self.tokens.push_back(Token(self.mark, TokenType::BlockEnd)); self.indent = self.indents.pop().unwrap(); } } fn save_simple_key(&mut self) -> Result<(), ScanError> { let required = self.flow_level > 0 && self.indent == (self.mark.col as isize); if self.simple_key_allowed { let mut sk = SimpleKey::new(self.mark); sk.possible = true; sk.required = required; sk.token_number = self.tokens_parsed + self.tokens.len(); self.remove_simple_key()?; self.simple_keys.pop(); self.simple_keys.push(sk); } Ok(()) } fn remove_simple_key(&mut self) -> ScanResult { let last = self.simple_keys.last_mut().unwrap(); if last.possible && last.required { return Err(ScanError::new(self.mark, "simple key expected")); } last.possible = false; Ok(()) } } #[cfg(test)] mod test { use super::TokenType::*; use super::*; macro_rules! next { ($p:ident, $tk:pat) => {{ let tok = $p.next().unwrap(); match tok.1 { $tk => {} _ => panic!("unexpected token: {:?}", tok), } }}; } macro_rules! next_scalar { ($p:ident, $tk:expr, $v:expr) => {{ let tok = $p.next().unwrap(); match tok.1 { Scalar(style, ref v) => { assert_eq!(style, $tk); assert_eq!(v, $v); } _ => panic!("unexpected token: {:?}", tok), } }}; } macro_rules! end { ($p:ident) => {{ assert_eq!($p.next(), None); }}; } /// test cases in libyaml scanner.c #[test] fn test_empty() { let s = ""; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, StreamEnd); end!(p); } #[test] fn test_scalar() { let s = "a scalar"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, Scalar(TScalarStyle::Plain, _)); next!(p, StreamEnd); end!(p); } #[test] fn test_explicit_scalar() { let s = "--- 'a scalar' ... "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, DocumentStart); next!(p, Scalar(TScalarStyle::SingleQuoted, _)); next!(p, DocumentEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_multiple_documents() { let s = " 'a scalar' --- 'a scalar' --- 'a scalar' "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, Scalar(TScalarStyle::SingleQuoted, _)); next!(p, DocumentStart); next!(p, Scalar(TScalarStyle::SingleQuoted, _)); next!(p, DocumentStart); next!(p, Scalar(TScalarStyle::SingleQuoted, _)); next!(p, StreamEnd); end!(p); } #[test] fn test_a_flow_sequence() { let s = "[item 1, item 2, item 3]"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, FlowSequenceStart); next_scalar!(p, TScalarStyle::Plain, "item 1"); next!(p, FlowEntry); next!(p, Scalar(TScalarStyle::Plain, _)); next!(p, FlowEntry); next!(p, Scalar(TScalarStyle::Plain, _)); next!(p, FlowSequenceEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_a_flow_mapping() { let s = " { a simple key: a value, # Note that the KEY token is produced. ? a complex key: another value, } "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, FlowMappingStart); next!(p, Key); next!(p, Scalar(TScalarStyle::Plain, _)); next!(p, Value); next!(p, Scalar(TScalarStyle::Plain, _)); next!(p, FlowEntry); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a complex key"); next!(p, Value); next!(p, Scalar(TScalarStyle::Plain, _)); next!(p, FlowEntry); next!(p, FlowMappingEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_block_sequences() { let s = " - item 1 - item 2 - - item 3.1 - item 3.2 - key 1: value 1 key 2: value 2 "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, BlockSequenceStart); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); next!(p, BlockEntry); next!(p, BlockSequenceStart); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 3.1"); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 3.2"); next!(p, BlockEnd); next!(p, BlockEntry); next!(p, BlockMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 1"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 1"); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 2"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 2"); next!(p, BlockEnd); next!(p, BlockEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_block_mappings() { let s = " a simple key: a value # The KEY token is produced here. ? a complex key : another value a mapping: key 1: value 1 key 2: value 2 a sequence: - item 1 - item 2 "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, BlockMappingStart); next!(p, Key); next!(p, Scalar(_, _)); next!(p, Value); next!(p, Scalar(_, _)); next!(p, Key); next!(p, Scalar(_, _)); next!(p, Value); next!(p, Scalar(_, _)); next!(p, Key); next!(p, Scalar(_, _)); next!(p, Value); // libyaml comment seems to be wrong next!(p, BlockMappingStart); next!(p, Key); next!(p, Scalar(_, _)); next!(p, Value); next!(p, Scalar(_, _)); next!(p, Key); next!(p, Scalar(_, _)); next!(p, Value); next!(p, Scalar(_, _)); next!(p, BlockEnd); next!(p, Key); next!(p, Scalar(_, _)); next!(p, Value); next!(p, BlockSequenceStart); next!(p, BlockEntry); next!(p, Scalar(_, _)); next!(p, BlockEntry); next!(p, Scalar(_, _)); next!(p, BlockEnd); next!(p, BlockEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_no_block_sequence_start() { let s = " key: - item 1 - item 2 "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, BlockMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key"); next!(p, Value); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); next!(p, BlockEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_collections_in_sequence() { let s = " - - item 1 - item 2 - key 1: value 1 key 2: value 2 - ? complex key : complex value "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, BlockSequenceStart); next!(p, BlockEntry); next!(p, BlockSequenceStart); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); next!(p, BlockEnd); next!(p, BlockEntry); next!(p, BlockMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 1"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 1"); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 2"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 2"); next!(p, BlockEnd); next!(p, BlockEntry); next!(p, BlockMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "complex key"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "complex value"); next!(p, BlockEnd); next!(p, BlockEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_collections_in_mapping() { let s = " ? a sequence : - item 1 - item 2 ? a mapping : key 1: value 1 key 2: value 2 "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, BlockMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a sequence"); next!(p, Value); next!(p, BlockSequenceStart); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); next!(p, BlockEnd); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a mapping"); next!(p, Value); next!(p, BlockMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 1"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 1"); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 2"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 2"); next!(p, BlockEnd); next!(p, BlockEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_spec_ex7_3() { let s = " { ? foo :, : bar, } "; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, FlowMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "foo"); next!(p, Value); next!(p, FlowEntry); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "bar"); next!(p, FlowEntry); next!(p, FlowMappingEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_plain_scalar_starting_with_indicators_in_flow() { // "Plain scalars must not begin with most indicators, as this would cause ambiguity with // other YAML constructs. However, the “:”, “?” and “-” indicators may be used as the first // character if followed by a non-space “safe” character, as this causes no ambiguity." let s = "{a: :b}"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, FlowMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, ":b"); next!(p, FlowMappingEnd); next!(p, StreamEnd); end!(p); let s = "{a: ?b}"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, FlowMappingStart); next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a"); next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "?b"); next!(p, FlowMappingEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_plain_scalar_starting_with_indicators_in_block() { let s = ":a"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next_scalar!(p, TScalarStyle::Plain, ":a"); next!(p, StreamEnd); end!(p); let s = "?a"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next_scalar!(p, TScalarStyle::Plain, "?a"); next!(p, StreamEnd); end!(p); } #[test] fn test_plain_scalar_containing_indicators_in_block() { let s = "a:,b"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next_scalar!(p, TScalarStyle::Plain, "a:,b"); next!(p, StreamEnd); end!(p); let s = ":,b"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next_scalar!(p, TScalarStyle::Plain, ":,b"); next!(p, StreamEnd); end!(p); } #[test] fn test_scanner_cr() { let s = "---\r\n- tok1\r\n- tok2"; let mut p = Scanner::new(s.chars()); next!(p, StreamStart(..)); next!(p, DocumentStart); next!(p, BlockSequenceStart); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "tok1"); next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "tok2"); next!(p, BlockEnd); next!(p, StreamEnd); end!(p); } #[test] fn test_uri() { // TODO } #[test] fn test_uri_escapes() { // TODO } } insta-1.39.0/src/content/yaml/vendored/yaml.rs000064400000000000000000000461241046102023000173640ustar 00000000000000use crate::content::yaml::vendored::parser::*; use crate::content::yaml::vendored::scanner::{Marker, ScanError, TScalarStyle, TokenType}; use linked_hash_map::LinkedHashMap; use std::collections::BTreeMap; use std::f64; use std::i64; use std::mem; use std::ops::Index; use std::string; use std::vec; /// A YAML node is stored as this `Yaml` enumeration, which provides an easy way to /// access your YAML document. #[derive(Clone, PartialEq, PartialOrd, Debug, Eq, Ord, Hash)] pub enum Yaml { /// Float types are stored as String and parsed on demand. /// Note that f64 does NOT implement Eq trait and can NOT be stored in BTreeMap. Real(string::String), /// YAML int is stored as i64. Integer(i64), /// YAML scalar. String(string::String), /// YAML bool, e.g. `true` or `false`. Boolean(bool), /// YAML array, can be accessed as a `Vec`. Array(self::Array), /// YAML hash, can be accessed as a `LinkedHashMap`. /// /// Insertion order will match the order of insertion into the map. Hash(self::Hash), /// YAML null, e.g. `null` or `~`. Null, /// Accessing a nonexistent node via the Index trait returns `BadValue`. This /// simplifies error handling in the calling code. Invalid type conversion also /// returns `BadValue`. BadValue, } pub type Array = Vec; pub type Hash = LinkedHashMap; // parse f64 as Core schema // See: https://github.com/chyh1990/yaml-rust/issues/51 fn parse_f64(v: &str) -> Option { match v { ".inf" | ".Inf" | ".INF" | "+.inf" | "+.Inf" | "+.INF" => Some(f64::INFINITY), "-.inf" | "-.Inf" | "-.INF" => Some(f64::NEG_INFINITY), ".nan" | "NaN" | ".NAN" => Some(f64::NAN), _ => v.parse::().ok(), } } pub struct YamlLoader { docs: Vec, // states // (current node, anchor_id) tuple doc_stack: Vec<(Yaml, usize)>, key_stack: Vec, anchor_map: BTreeMap, } impl MarkedEventReceiver for YamlLoader { fn on_event(&mut self, ev: Event, _: Marker) { // println!("EV {:?}", ev); match ev { Event::DocumentStart => { // do nothing } Event::DocumentEnd => { match self.doc_stack.len() { // empty document 0 => self.docs.push(Yaml::BadValue), 1 => self.docs.push(self.doc_stack.pop().unwrap().0), _ => unreachable!(), } } Event::SequenceStart(aid) => { self.doc_stack.push((Yaml::Array(Vec::new()), aid)); } Event::SequenceEnd => { let node = self.doc_stack.pop().unwrap(); self.insert_new_node(node); } Event::MappingStart(aid) => { self.doc_stack.push((Yaml::Hash(Hash::new()), aid)); self.key_stack.push(Yaml::BadValue); } Event::MappingEnd => { self.key_stack.pop().unwrap(); let node = self.doc_stack.pop().unwrap(); self.insert_new_node(node); } Event::Scalar(v, style, aid, tag) => { let node = if style != TScalarStyle::Plain { Yaml::String(v) } else if let Some(TokenType::Tag(ref handle, ref suffix)) = tag { // XXX tag:yaml.org,2002: if handle == "!!" { match suffix.as_ref() { "bool" => { // "true" or "false" match v.parse::() { Err(_) => Yaml::BadValue, Ok(v) => Yaml::Boolean(v), } } "int" => match v.parse::() { Err(_) => Yaml::BadValue, Ok(v) => Yaml::Integer(v), }, "float" => match parse_f64(&v) { Some(_) => Yaml::Real(v), None => Yaml::BadValue, }, "null" => match v.as_ref() { "~" | "null" => Yaml::Null, _ => Yaml::BadValue, }, _ => Yaml::String(v), } } else { Yaml::String(v) } } else { // Datatype is not specified, or unrecognized Yaml::from_str(&v) }; self.insert_new_node((node, aid)); } _ => { /* ignore */ } } // println!("DOC {:?}", self.doc_stack); } } impl YamlLoader { fn insert_new_node(&mut self, node: (Yaml, usize)) { // valid anchor id starts from 1 if node.1 > 0 { self.anchor_map.insert(node.1, node.0.clone()); } if self.doc_stack.is_empty() { self.doc_stack.push(node); } else { let parent = self.doc_stack.last_mut().unwrap(); match *parent { (Yaml::Array(ref mut v), _) => v.push(node.0), (Yaml::Hash(ref mut h), _) => { let cur_key = self.key_stack.last_mut().unwrap(); // current node is a key if cur_key.is_badvalue() { *cur_key = node.0; // current node is a value } else { let mut newkey = Yaml::BadValue; mem::swap(&mut newkey, cur_key); h.insert(newkey, node.0); } } _ => unreachable!(), } } } pub fn load_from_str(source: &str) -> Result, ScanError> { let mut loader = YamlLoader { docs: Vec::new(), doc_stack: Vec::new(), key_stack: Vec::new(), anchor_map: BTreeMap::new(), }; let mut parser = Parser::new(source.chars()); parser.load(&mut loader, true)?; Ok(loader.docs) } } macro_rules! define_as ( ($name:ident, $t:ident, $yt:ident) => ( pub fn $name(&self) -> Option<$t> { match *self { Yaml::$yt(v) => Some(v), _ => None } } ); ); macro_rules! define_as_ref ( ($name:ident, $t:ty, $yt:ident) => ( pub fn $name(&self) -> Option<$t> { match *self { Yaml::$yt(ref v) => Some(v), _ => None } } ); ); macro_rules! define_into ( ($name:ident, $t:ty, $yt:ident) => ( pub fn $name(self) -> Option<$t> { match self { Yaml::$yt(v) => Some(v), _ => None } } ); ); impl Yaml { define_as!(as_bool, bool, Boolean); define_as!(as_i64, i64, Integer); define_as_ref!(as_str, &str, String); define_as_ref!(as_hash, &Hash, Hash); define_as_ref!(as_vec, &Array, Array); define_into!(into_bool, bool, Boolean); define_into!(into_i64, i64, Integer); define_into!(into_string, String, String); define_into!(into_hash, Hash, Hash); define_into!(into_vec, Array, Array); pub fn is_null(&self) -> bool { matches!(*self, Yaml::Null) } pub fn is_badvalue(&self) -> bool { matches!(*self, Yaml::BadValue) } pub fn is_array(&self) -> bool { matches!(*self, Yaml::Array(_)) } pub fn as_f64(&self) -> Option { match *self { Yaml::Real(ref v) => parse_f64(v), _ => None, } } pub fn into_f64(self) -> Option { match self { Yaml::Real(ref v) => parse_f64(v), _ => None, } } } impl Yaml { // Not implementing FromStr because there is no possibility of Error. // This function falls back to Yaml::String if nothing else matches. pub fn from_str(v: &str) -> Yaml { if let Some(rest) = v.strip_prefix("0x") { if let Ok(i) = i64::from_str_radix(rest, 16) { return Yaml::Integer(i); } } if let Some(rest) = v.strip_prefix("0o") { if let Ok(i) = i64::from_str_radix(rest, 8) { return Yaml::Integer(i); } } if let Some(rest) = v.strip_prefix('+') { if let Ok(i) = rest.parse::() { return Yaml::Integer(i); } } match v { "~" | "null" => Yaml::Null, "true" => Yaml::Boolean(true), "false" => Yaml::Boolean(false), _ if v.parse::().is_ok() => Yaml::Integer(v.parse::().unwrap()), // try parsing as f64 _ if parse_f64(v).is_some() => Yaml::Real(v.to_owned()), _ => Yaml::String(v.to_owned()), } } } static BAD_VALUE: Yaml = Yaml::BadValue; impl<'a> Index<&'a str> for Yaml { type Output = Yaml; fn index(&self, idx: &'a str) -> &Yaml { let key = Yaml::String(idx.to_owned()); match self.as_hash() { Some(h) => h.get(&key).unwrap_or(&BAD_VALUE), None => &BAD_VALUE, } } } impl Index for Yaml { type Output = Yaml; fn index(&self, idx: usize) -> &Yaml { if let Some(v) = self.as_vec() { v.get(idx).unwrap_or(&BAD_VALUE) } else if let Some(v) = self.as_hash() { let key = Yaml::Integer(idx as i64); v.get(&key).unwrap_or(&BAD_VALUE) } else { &BAD_VALUE } } } impl IntoIterator for Yaml { type Item = Yaml; type IntoIter = YamlIter; fn into_iter(self) -> Self::IntoIter { YamlIter { yaml: self.into_vec().unwrap_or_default().into_iter(), } } } pub struct YamlIter { yaml: vec::IntoIter, } impl Iterator for YamlIter { type Item = Yaml; fn next(&mut self) -> Option { self.yaml.next() } } #[cfg(test)] mod test { use crate::content::yaml::vendored::yaml::*; use std::f64; #[test] fn test_coerce() { let s = "--- a: 1 b: 2.2 c: [1, 2] "; let out = YamlLoader::load_from_str(s).unwrap(); let doc = &out[0]; assert_eq!(doc["a"].as_i64().unwrap(), 1i64); assert_eq!(doc["b"].as_f64().unwrap(), 2.2f64); assert_eq!(doc["c"][1].as_i64().unwrap(), 2i64); assert!(doc["d"][0].is_badvalue()); } #[test] fn test_empty_doc() { let s: String = "".to_owned(); YamlLoader::load_from_str(&s).unwrap(); let s: String = "---".to_owned(); assert_eq!(YamlLoader::load_from_str(&s).unwrap()[0], Yaml::Null); } #[test] fn test_parser() { let s: String = " # comment a0 bb: val a1: b1: 4 b2: d a2: 4 # i'm comment a3: [1, 2, 3] a4: - - a1 - a2 - 2 a5: 'single_quoted' a6: \"double_quoted\" a7: 你好 " .to_owned(); let out = YamlLoader::load_from_str(&s).unwrap(); let doc = &out[0]; assert_eq!(doc["a7"].as_str().unwrap(), "你好"); } #[test] fn test_multi_doc() { let s = " 'a scalar' --- 'a scalar' --- 'a scalar' "; let out = YamlLoader::load_from_str(s).unwrap(); assert_eq!(out.len(), 3); } #[test] fn test_bad_anchor() { let s = " a1: &DEFAULT b1: 4 b2: *DEFAULT "; let out = YamlLoader::load_from_str(s).unwrap(); let doc = &out[0]; assert_eq!(doc["a1"]["b2"], Yaml::BadValue); } #[test] fn test_github_27() { // https://github.com/chyh1990/yaml-rust/issues/27 let s = "&a"; let out = YamlLoader::load_from_str(s).unwrap(); let doc = &out[0]; assert_eq!(doc.as_str().unwrap(), ""); } #[test] fn test_plain_datatype() { let s = " - 'string' - \"string\" - string - 123 - -321 - 1.23 - -1e4 - ~ - null - true - false - !!str 0 - !!int 100 - !!float 2 - !!null ~ - !!bool true - !!bool false - 0xFF # bad values - !!int string - !!float string - !!bool null - !!null val - 0o77 - [ 0xF, 0xF ] - +12345 - [ true, false ] "; let out = YamlLoader::load_from_str(s).unwrap(); let doc = &out[0]; assert_eq!(doc[0].as_str().unwrap(), "string"); assert_eq!(doc[1].as_str().unwrap(), "string"); assert_eq!(doc[2].as_str().unwrap(), "string"); assert_eq!(doc[3].as_i64().unwrap(), 123); assert_eq!(doc[4].as_i64().unwrap(), -321); assert_eq!(doc[5].as_f64().unwrap(), 1.23); assert_eq!(doc[6].as_f64().unwrap(), -1e4); assert!(doc[7].is_null()); assert!(doc[8].is_null()); assert!(doc[9].as_bool().unwrap()); assert!(!doc[10].as_bool().unwrap()); assert_eq!(doc[11].as_str().unwrap(), "0"); assert_eq!(doc[12].as_i64().unwrap(), 100); assert_eq!(doc[13].as_f64().unwrap(), 2.0); assert!(doc[14].is_null()); assert!(doc[15].as_bool().unwrap()); assert!(!doc[16].as_bool().unwrap()); assert_eq!(doc[17].as_i64().unwrap(), 255); assert!(doc[18].is_badvalue()); assert!(doc[19].is_badvalue()); assert!(doc[20].is_badvalue()); assert!(doc[21].is_badvalue()); assert_eq!(doc[22].as_i64().unwrap(), 63); assert_eq!(doc[23][0].as_i64().unwrap(), 15); assert_eq!(doc[23][1].as_i64().unwrap(), 15); assert_eq!(doc[24].as_i64().unwrap(), 12345); assert!(doc[25][0].as_bool().unwrap()); assert!(!doc[25][1].as_bool().unwrap()); } #[test] fn test_bad_hyphen() { // See: https://github.com/chyh1990/yaml-rust/issues/23 let s = "{-"; assert!(YamlLoader::load_from_str(s).is_err()); } #[test] fn test_issue_65() { // See: https://github.com/chyh1990/yaml-rust/issues/65 let b = "\n\"ll\\\"ll\\\r\n\"ll\\\"ll\\\r\r\r\rU\r\r\rU"; assert!(YamlLoader::load_from_str(b).is_err()); } #[test] fn test_bad_docstart() { assert!(YamlLoader::load_from_str("---This used to cause an infinite loop").is_ok()); assert_eq!( YamlLoader::load_from_str("----"), Ok(vec![Yaml::String(String::from("----"))]) ); assert_eq!( YamlLoader::load_from_str("--- #here goes a comment"), Ok(vec![Yaml::Null]) ); assert_eq!( YamlLoader::load_from_str("---- #here goes a comment"), Ok(vec![Yaml::String(String::from("----"))]) ); } #[test] fn test_plain_datatype_with_into_methods() { let s = " - 'string' - \"string\" - string - 123 - -321 - 1.23 - -1e4 - true - false - !!str 0 - !!int 100 - !!float 2 - !!bool true - !!bool false - 0xFF - 0o77 - +12345 - -.INF - .NAN - !!float .INF "; let mut out = YamlLoader::load_from_str(s).unwrap().into_iter(); let mut doc = out.next().unwrap().into_iter(); assert_eq!(doc.next().unwrap().into_string().unwrap(), "string"); assert_eq!(doc.next().unwrap().into_string().unwrap(), "string"); assert_eq!(doc.next().unwrap().into_string().unwrap(), "string"); assert_eq!(doc.next().unwrap().into_i64().unwrap(), 123); assert_eq!(doc.next().unwrap().into_i64().unwrap(), -321); assert_eq!(doc.next().unwrap().into_f64().unwrap(), 1.23); assert_eq!(doc.next().unwrap().into_f64().unwrap(), -1e4); assert!(doc.next().unwrap().into_bool().unwrap()); assert!(!doc.next().unwrap().into_bool().unwrap()); assert_eq!(doc.next().unwrap().into_string().unwrap(), "0"); assert_eq!(doc.next().unwrap().into_i64().unwrap(), 100); assert_eq!(doc.next().unwrap().into_f64().unwrap(), 2.0); assert!(doc.next().unwrap().into_bool().unwrap()); assert!(!doc.next().unwrap().into_bool().unwrap()); assert_eq!(doc.next().unwrap().into_i64().unwrap(), 255); assert_eq!(doc.next().unwrap().into_i64().unwrap(), 63); assert_eq!(doc.next().unwrap().into_i64().unwrap(), 12345); assert_eq!(doc.next().unwrap().into_f64().unwrap(), f64::NEG_INFINITY); assert!(doc.next().unwrap().into_f64().is_some()); assert_eq!(doc.next().unwrap().into_f64().unwrap(), f64::INFINITY); } #[test] fn test_hash_order() { let s = "--- b: ~ a: ~ c: ~ "; let out = YamlLoader::load_from_str(s).unwrap(); let first = out.into_iter().next().unwrap(); let mut iter = first.into_hash().unwrap().into_iter(); assert_eq!( Some((Yaml::String("b".to_owned()), Yaml::Null)), iter.next() ); assert_eq!( Some((Yaml::String("a".to_owned()), Yaml::Null)), iter.next() ); assert_eq!( Some((Yaml::String("c".to_owned()), Yaml::Null)), iter.next() ); assert_eq!(None, iter.next()); } #[test] fn test_integer_key() { let s = " 0: important: true 1: important: false "; let out = YamlLoader::load_from_str(s).unwrap(); let first = out.into_iter().next().unwrap(); assert!(first[0]["important"].as_bool().unwrap()); } #[test] fn test_indentation_equality() { let four_spaces = YamlLoader::load_from_str( r#" hash: with: indentations "#, ) .unwrap() .into_iter() .next() .unwrap(); let two_spaces = YamlLoader::load_from_str( r#" hash: with: indentations "#, ) .unwrap() .into_iter() .next() .unwrap(); let one_space = YamlLoader::load_from_str( r#" hash: with: indentations "#, ) .unwrap() .into_iter() .next() .unwrap(); let mixed_spaces = YamlLoader::load_from_str( r#" hash: with: indentations "#, ) .unwrap() .into_iter() .next() .unwrap(); assert_eq!(four_spaces, two_spaces); assert_eq!(two_spaces, one_space); assert_eq!(four_spaces, mixed_spaces); } #[test] fn test_two_space_indentations() { // https://github.com/kbknapp/clap-rs/issues/965 let s = r#" subcommands: - server: about: server related commands subcommands2: - server: about: server related commands subcommands3: - server: about: server related commands "#; let out = YamlLoader::load_from_str(s).unwrap(); let doc = &out.into_iter().next().unwrap(); println!("{:#?}", doc); assert_eq!(doc["subcommands"][0]["server"], Yaml::Null); assert!(doc["subcommands2"][0]["server"].as_hash().is_some()); assert!(doc["subcommands3"][0]["server"].as_hash().is_some()); } #[test] fn test_recursion_depth_check_objects() { let s = "{a:".repeat(10_000) + &"}".repeat(10_000); assert!(YamlLoader::load_from_str(&s).is_err()); } #[test] fn test_recursion_depth_check_arrays() { let s = "[".repeat(10_000) + &"]".repeat(10_000); assert!(YamlLoader::load_from_str(&s).is_err()); } } insta-1.39.0/src/env.rs000064400000000000000000000403051046102023000127630ustar 00000000000000use std::collections::BTreeMap; use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; use std::{env, fmt, fs}; use crate::content::{yaml, Content}; use crate::utils::is_ci; lazy_static::lazy_static! { static ref WORKSPACES: Mutex>> = Mutex::new(BTreeMap::new()); static ref TOOL_CONFIGS: Mutex>> = Mutex::new(BTreeMap::new()); } pub fn get_tool_config(manifest_dir: &str) -> Arc { let mut configs = TOOL_CONFIGS.lock().unwrap(); if let Some(rv) = configs.get(manifest_dir) { return rv.clone(); } let config = Arc::new(ToolConfig::from_manifest_dir(manifest_dir).expect("failed to load tool config")); configs.insert(manifest_dir.to_string(), config.clone()); config } /// The test runner to use. #[cfg(feature = "_cargo_insta_internal")] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum TestRunner { Auto, CargoTest, Nextest, } /// Controls how information is supposed to be displayed. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum OutputBehavior { /// Diff only Diff, /// Short summary Summary, /// The most minimal output Minimal, /// No output at all Nothing, } /// Unreferenced snapshots flag #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[cfg(feature = "_cargo_insta_internal")] pub enum UnreferencedSnapshots { Auto, Reject, Delete, Warn, Ignore, } /// Snapshot update flag #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SnapshotUpdate { Always, Auto, Unseen, New, No, } #[derive(Debug)] pub enum Error { Deserialize(crate::content::Error), Env(&'static str), #[allow(unused)] Config(&'static str), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Error::Deserialize(_) => write!(f, "failed to deserialize tool config"), Error::Env(var) => write!(f, "invalid value for env var '{}'", var), Error::Config(var) => write!(f, "invalid value for config '{}'", var), } } } impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { Error::Deserialize(ref err) => Some(err), _ => None, } } } /// Represents a tool configuration. #[derive(Debug)] pub struct ToolConfig { force_update_snapshots: bool, force_pass: bool, require_full_match: bool, output: OutputBehavior, snapshot_update: SnapshotUpdate, #[cfg(feature = "glob")] glob_fail_fast: bool, #[cfg(feature = "_cargo_insta_internal")] test_runner: TestRunner, #[cfg(feature = "_cargo_insta_internal")] test_unreferenced: UnreferencedSnapshots, #[cfg(feature = "_cargo_insta_internal")] auto_review: bool, #[cfg(feature = "_cargo_insta_internal")] auto_accept_unseen: bool, #[cfg(feature = "_cargo_insta_internal")] review_include_ignored: bool, #[cfg(feature = "_cargo_insta_internal")] review_include_hidden: bool, #[cfg(feature = "_cargo_insta_internal")] review_warn_undiscovered: bool, } impl ToolConfig { /// Loads the tool config for a specific manifest. pub fn from_manifest_dir(manifest_dir: &str) -> Result { ToolConfig::from_workspace(&get_cargo_workspace(manifest_dir)) } /// Loads the tool config from a cargo workspace. pub fn from_workspace(workspace_dir: &Path) -> Result { let mut cfg = None; for choice in &[".config/insta.yaml", "insta.yaml", ".insta.yaml"] { let path = workspace_dir.join(choice); match fs::read_to_string(&path) { Ok(s) => { cfg = Some(yaml::parse_str(&s, &path).map_err(Error::Deserialize)?); break; } // ideally we would not swallow all errors here but unfortunately there are // some cases where we cannot detect the error properly. // Eg we can see NotADirectory here as kind, but on stable rust it cannot // be matched on. Err(_) => continue, } } let cfg = cfg.unwrap_or_else(|| Content::Map(Default::default())); // support for the deprecated environment variable. This is implemented in a way that // cargo-insta can support older and newer insta versions alike. It will set both // variables. However if only `INSTA_FORCE_UPDATE_SNAPSHOTS` is set, we will emit // a deprecation warning. if env::var("INSTA_FORCE_UPDATE").is_err() { if let Ok("1") = env::var("INSTA_FORCE_UPDATE_SNAPSHOTS").as_deref() { eprintln!("INSTA_FORCE_UPDATE_SNAPSHOTS is deprecated, use INSTA_FORCE_UPDATE"); env::set_var("INSTA_FORCE_UPDATE", "1"); } } Ok(ToolConfig { force_update_snapshots: match env::var("INSTA_FORCE_UPDATE").as_deref() { Err(_) | Ok("") => resolve(&cfg, &["behavior", "force_update"]) .and_then(|x| x.as_bool()) .unwrap_or(false), Ok("0") => false, Ok("1") => true, _ => return Err(Error::Env("INSTA_FORCE_UPDATE")), }, require_full_match: match env::var("INSTA_REQUIRE_FULL_MATCH").as_deref() { Err(_) | Ok("") => resolve(&cfg, &["behavior", "require_full_match"]) .and_then(|x| x.as_bool()) .unwrap_or(false), Ok("0") => false, Ok("1") => true, _ => return Err(Error::Env("INSTA_REQUIRE_FULL_MATCH")), }, force_pass: match env::var("INSTA_FORCE_PASS").as_deref() { Err(_) | Ok("") => resolve(&cfg, &["behavior", "force_pass"]) .and_then(|x| x.as_bool()) .unwrap_or(false), Ok("0") => false, Ok("1") => true, _ => return Err(Error::Env("INSTA_FORCE_PASS")), }, output: { let env_var = env::var("INSTA_OUTPUT"); let val = match env_var.as_deref() { Err(_) | Ok("") => resolve(&cfg, &["behavior", "output"]) .and_then(|x| x.as_str()) .unwrap_or("diff"), Ok(val) => val, }; match val { "diff" => OutputBehavior::Diff, "summary" => OutputBehavior::Summary, "minimal" => OutputBehavior::Minimal, "none" => OutputBehavior::Nothing, _ => return Err(Error::Env("INSTA_OUTPUT")), } }, snapshot_update: { let env_var = env::var("INSTA_UPDATE"); let val = match env_var.as_deref() { Err(_) | Ok("") => resolve(&cfg, &["behavior", "update"]) .and_then(|x| x.as_str()) .unwrap_or("auto"), Ok(val) => val, }; match val { "auto" => SnapshotUpdate::Auto, "always" | "1" => SnapshotUpdate::Always, "new" => SnapshotUpdate::New, "unseen" => SnapshotUpdate::Unseen, "no" => SnapshotUpdate::No, _ => return Err(Error::Env("INSTA_UPDATE")), } }, #[cfg(feature = "glob")] glob_fail_fast: match env::var("INSTA_GLOB_FAIL_FAST").as_deref() { Err(_) | Ok("") => resolve(&cfg, &["behavior", "glob_fail_fast"]) .and_then(|x| x.as_bool()) .unwrap_or(false), Ok("1") => true, Ok("0") => false, _ => return Err(Error::Env("INSTA_GLOB_FAIL_FAST")), }, #[cfg(feature = "_cargo_insta_internal")] test_runner: { let env_var = env::var("INSTA_TEST_RUNNER"); match env_var.as_deref() { Err(_) | Ok("") => resolve(&cfg, &["test", "runner"]) .and_then(|x| x.as_str()) .unwrap_or("auto"), Ok(val) => val, } .parse::() .map_err(|_| Error::Env("INSTA_TEST_RUNNER"))? }, #[cfg(feature = "_cargo_insta_internal")] test_unreferenced: { resolve(&cfg, &["test", "unreferenced"]) .and_then(|x| x.as_str()) .unwrap_or("ignore") .parse::() .map_err(|_| Error::Config("unreferenced"))? }, #[cfg(feature = "_cargo_insta_internal")] auto_review: resolve(&cfg, &["test", "auto_review"]) .and_then(|x| x.as_bool()) .unwrap_or(false), #[cfg(feature = "_cargo_insta_internal")] auto_accept_unseen: resolve(&cfg, &["test", "auto_accept_unseen"]) .and_then(|x| x.as_bool()) .unwrap_or(false), #[cfg(feature = "_cargo_insta_internal")] review_include_hidden: resolve(&cfg, &["review", "include_hidden"]) .and_then(|x| x.as_bool()) .unwrap_or(false), #[cfg(feature = "_cargo_insta_internal")] review_include_ignored: resolve(&cfg, &["review", "include_ignored"]) .and_then(|x| x.as_bool()) .unwrap_or(false), #[cfg(feature = "_cargo_insta_internal")] review_warn_undiscovered: resolve(&cfg, &["review", "warn_undiscovered"]) .and_then(|x| x.as_bool()) .unwrap_or(true), }) } /// Is insta told to force update snapshots? pub fn force_update_snapshots(&self) -> bool { self.force_update_snapshots } /// Should we fail if metadata doesn't match? pub fn require_full_match(&self) -> bool { self.require_full_match } /// Is insta instructed to fail in tests? pub fn force_pass(&self) -> bool { self.force_pass } /// Returns the intended output behavior for insta. pub fn output_behavior(&self) -> OutputBehavior { self.output } /// Returns the intended snapshot update behavior. pub fn snapshot_update(&self) -> SnapshotUpdate { self.snapshot_update } /// Returns the value of glob_fail_fast #[cfg(feature = "glob")] pub fn glob_fail_fast(&self) -> bool { self.glob_fail_fast } } #[cfg(feature = "_cargo_insta_internal")] impl ToolConfig { /// Returns the intended test runner pub fn test_runner(&self) -> TestRunner { self.test_runner } pub fn test_unreferenced(&self) -> UnreferencedSnapshots { self.test_unreferenced } /// Returns the auto review flag. pub fn auto_review(&self) -> bool { self.auto_review } /// Returns the auto accept unseen flag. pub fn auto_accept_unseen(&self) -> bool { self.auto_accept_unseen } pub fn review_include_hidden(&self) -> bool { self.review_include_hidden } pub fn review_include_ignored(&self) -> bool { self.review_include_ignored } pub fn review_warn_undiscovered(&self) -> bool { self.review_warn_undiscovered } } /// How snapshots are supposed to be updated #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SnapshotUpdateBehavior { /// Snapshots are updated in-place InPlace, /// Snapshots are placed in a new file with a .new suffix NewFile, /// Snapshots are not updated at all. NoUpdate, } /// Returns the intended snapshot update behavior. pub fn snapshot_update_behavior(tool_config: &ToolConfig, unseen: bool) -> SnapshotUpdateBehavior { match tool_config.snapshot_update() { SnapshotUpdate::Always => SnapshotUpdateBehavior::InPlace, SnapshotUpdate::Auto => { if is_ci() { SnapshotUpdateBehavior::NoUpdate } else { SnapshotUpdateBehavior::NewFile } } SnapshotUpdate::Unseen => { if unseen { SnapshotUpdateBehavior::NewFile } else { SnapshotUpdateBehavior::InPlace } } SnapshotUpdate::New => SnapshotUpdateBehavior::NewFile, SnapshotUpdate::No => SnapshotUpdateBehavior::NoUpdate, } } /// Returns the cargo workspace for a manifest pub fn get_cargo_workspace(manifest_dir: &str) -> Arc { // we really do not care about poisoning here. let mut workspaces = WORKSPACES.lock().unwrap_or_else(|x| x.into_inner()); if let Some(rv) = workspaces.get(manifest_dir) { rv.clone() } else { // If INSTA_WORKSPACE_ROOT environment variable is set, use the value // as-is. This is useful for those users where the compiled in // CARGO_MANIFEST_DIR points to some transient location. This can easily // happen if the user builds the test in one directory but then tries to // run it in another: even if sources are available in the new // directory, in the past we would always go with the compiled-in value. // The compiled-in directory may not even exist anymore. let path = if let Ok(workspace_root) = std::env::var("INSTA_WORKSPACE_ROOT") { Arc::new(PathBuf::from(workspace_root)) } else { let output = std::process::Command::new( env::var("CARGO") .ok() .unwrap_or_else(|| "cargo".to_string()), ) .arg("metadata") .arg("--format-version=1") .arg("--no-deps") .current_dir(manifest_dir) .output() .unwrap(); let docs = crate::content::yaml::vendored::yaml::YamlLoader::load_from_str( std::str::from_utf8(&output.stdout).unwrap(), ) .unwrap(); let manifest = docs.first().expect("Unable to parse cargo manifest"); let workspace_root = PathBuf::from(manifest["workspace_root"].as_str().unwrap()); Arc::new(workspace_root) }; workspaces.insert(manifest_dir.to_string(), path.clone()); path } } #[cfg(feature = "_cargo_insta_internal")] impl std::str::FromStr for TestRunner { type Err = (); fn from_str(value: &str) -> Result { match value { "auto" => Ok(TestRunner::Auto), "cargo-test" => Ok(TestRunner::CargoTest), "nextest" => Ok(TestRunner::Nextest), _ => Err(()), } } } #[cfg(feature = "_cargo_insta_internal")] impl std::str::FromStr for UnreferencedSnapshots { type Err = (); fn from_str(value: &str) -> Result { match value { "auto" => Ok(UnreferencedSnapshots::Auto), "reject" | "error" => Ok(UnreferencedSnapshots::Reject), "delete" => Ok(UnreferencedSnapshots::Delete), "warn" => Ok(UnreferencedSnapshots::Warn), "ignore" => Ok(UnreferencedSnapshots::Ignore), _ => Err(()), } } } /// Memoizes a snapshot file in the reference file. pub fn memoize_snapshot_file(snapshot_file: &Path) { if let Ok(path) = env::var("INSTA_SNAPSHOT_REFERENCES_FILE") { let mut f = fs::OpenOptions::new() .append(true) .create(true) .open(path) .unwrap(); f.write_all(format!("{}\n", snapshot_file.display()).as_bytes()) .unwrap(); } } fn resolve<'a>(value: &'a Content, path: &[&str]) -> Option<&'a Content> { path.iter() .try_fold(value, |node, segment| match node.resolve_inner() { Content::Map(fields) => fields .iter() .find(|x| x.0.as_str() == Some(segment)) .map(|x| &x.1), Content::Struct(_, fields) | Content::StructVariant(_, _, _, fields) => { fields.iter().find(|x| x.0 == *segment).map(|x| &x.1) } _ => None, }) } insta-1.39.0/src/filters.rs000064400000000000000000000040231046102023000136400ustar 00000000000000use std::borrow::Cow; use std::iter::FromIterator; use regex::Regex; /// Represents stored filters. #[derive(Debug, Default, Clone)] #[cfg_attr(docsrs, doc(cfg(feature = "filters")))] pub struct Filters { rules: Vec<(Regex, String)>, } impl<'a> From> for Filters { fn from(value: Vec<(&'a str, &'a str)>) -> Self { Self::from_iter(value) } } impl<'a> FromIterator<(&'a str, &'a str)> for Filters { fn from_iter>(iter: I) -> Self { let mut rv = Filters::default(); for (regex, replacement) in iter { rv.add(regex, replacement); } rv } } impl Filters { /// Adds a simple regex with a replacement. pub(crate) fn add>(&mut self, regex: &str, replacement: S) { self.rules.push(( Regex::new(regex).expect("invalid regex for snapshot filter rule"), replacement.into(), )); } /// Clears all filters. pub(crate) fn clear(&mut self) { self.rules.clear(); } /// Applies all filters to the given snapshot. pub(crate) fn apply_to<'s>(&self, s: &'s str) -> Cow<'s, str> { let mut rv = Cow::Borrowed(s); for (regex, replacement) in &self.rules { match regex.replace_all(&rv, replacement) { Cow::Borrowed(_) => continue, Cow::Owned(value) => rv = Cow::Owned(value), }; } rv } } #[test] fn test_filters() { let mut filters = Filters::default(); filters.add("\\bhello\\b", "[NAME]"); filters.add("(a)", "[$1]"); assert_eq!( filters.apply_to("hellohello hello abc"), "hellohello [NAME] [a]bc" ); } #[test] fn test_static_str_array_conversion() { let arr: [(&'static str, &'static str); 2] = [("a1", "b1"), ("a2", "b2")]; let _ = Filters::from_iter(arr); } #[test] fn test_vec_str_conversion() { let vec: Vec<(&str, &str)> = Vec::from([("a1", "b1"), ("a2", "b2")]); let _ = Filters::from(vec); } insta-1.39.0/src/glob.rs000064400000000000000000000112771046102023000131240ustar 00000000000000use std::env; use std::path::{Path, PathBuf}; use std::sync::Mutex; use globset::{GlobBuilder, GlobMatcher}; use walkdir::WalkDir; use crate::env::get_tool_config; use crate::settings::Settings; use crate::utils::style; pub(crate) struct GlobCollector { pub(crate) fail_fast: bool, pub(crate) failed: usize, pub(crate) show_insta_hint: bool, } // the glob stack holds failure count + an indication if cargo insta review // should be run. lazy_static::lazy_static! { pub(crate) static ref GLOB_STACK: Mutex> = Mutex::default(); } lazy_static::lazy_static! { static ref GLOB_FILTER: Vec = { env::var("INSTA_GLOB_FILTER") .unwrap_or_default() .split(';') .filter(|x| !x.is_empty()) .filter_map(|filter| { GlobBuilder::new(filter) .case_insensitive(true) .build() .ok() .map(|x| x.compile_matcher()) }) .collect() }; } pub fn glob_exec(manifest_dir: &str, base: &Path, pattern: &str, mut f: F) { // If settings.allow_empty_glob() == true and `base` doesn't exist, skip // everything. This is necessary as `base` is user-controlled via `glob!/3` // and may not exist. let mut settings = Settings::clone_current(); if settings.allow_empty_glob() && !base.exists() { return; } let glob = GlobBuilder::new(pattern) .case_insensitive(true) .literal_separator(true) .build() .unwrap() .compile_matcher(); let walker = WalkDir::new(base).follow_links(true); let mut glob_found_matches = false; GLOB_STACK.lock().unwrap().push(GlobCollector { failed: 0, show_insta_hint: false, fail_fast: get_tool_config(manifest_dir).glob_fail_fast(), }); // step 1: collect all matching files let mut matching_files = vec![]; for file in walker { let file = file.unwrap(); let path = file.path(); let stripped_path = path.strip_prefix(base).unwrap_or(path); if !glob.is_match(stripped_path) { continue; } glob_found_matches = true; // if there is a glob filter, skip if it does not match this path if !GLOB_FILTER.is_empty() && !GLOB_FILTER.iter().any(|x| x.is_match(stripped_path)) { eprintln!("Skipping {} due to glob filter", stripped_path.display()); continue; } matching_files.push(path.to_path_buf()); } // step 2: sort, determine common prefix and run assertions matching_files.sort(); let common_prefix = find_common_prefix(&matching_files); for path in &matching_files { settings.set_input_file(path); // if there is a common prefix, use that stirp down the input file. That way we // can ensure that a glob like inputs/*/*.txt with a/file.txt and b/file.txt // does not create two identical snapshot suffixes. Instead of file.txt for both // it would end up as a/file.txt and b/file.txt. let snapshot_suffix = if let Some(prefix) = common_prefix { path.strip_prefix(prefix).unwrap().as_os_str() } else { path.file_name().unwrap() }; settings.set_snapshot_suffix(snapshot_suffix.to_str().unwrap()); settings.bind(|| { f(path); }); } let top = GLOB_STACK.lock().unwrap().pop().unwrap(); if !glob_found_matches && !settings.allow_empty_glob() { panic!("the glob! macro did not match any files."); } if top.failed > 0 { if top.show_insta_hint { println!( "{hint}", hint = style("To update snapshots run `cargo insta review`").dim(), ); } if top.failed > 1 { println!( "{hint}", hint = style("To enable fast failing for glob! export INSTA_GLOB_FAIL_FAST=1 as environment variable.").dim() ); } panic!( "glob! resulted in {} snapshot assertion failure{}", top.failed, if top.failed == 1 { "" } else { "s" }, ); } } fn find_common_prefix(sorted_paths: &[PathBuf]) -> Option<&Path> { let first = sorted_paths.first()?; let last = sorted_paths.last()?; let prefix_len = first .components() .zip(last.components()) .take_while(|(a, b)| a == b) .count(); if prefix_len == 0 { None } else { let mut prefix = first.components(); for _ in 0..first.components().count() - prefix_len { prefix.next_back(); } Some(prefix.as_path()) } } insta-1.39.0/src/lib.rs000064400000000000000000000306541046102023000127470ustar 00000000000000//!
//! //!

insta: a snapshot testing library for Rust

//!
//! //! # What are snapshot tests //! //! Snapshots tests (also sometimes called approval tests) are tests that //! assert values against a reference value (the snapshot). This is similar //! to how `assert_eq!` lets you compare a value against a reference value but //! unlike simple string assertions, snapshot tests let you test against complex //! values and come with comprehensive tools to review changes. //! //! Snapshot tests are particularly useful if your reference values are very //! large or change often. //! //! # What it looks like: //! //! ```no_run //! #[test] //! fn test_hello_world() { //! insta::assert_debug_snapshot!(vec![1, 2, 3]); //! } //! ``` //! //! Where are the snapshots stored? Right next to your test in a folder //! called `snapshots` as individual [`.snap` files](https://insta.rs/docs/snapshot-files/). //! //! Got curious? //! //! * [Read the introduction](https://insta.rs/docs/quickstart/) //! * [Read the main documentation](https://insta.rs/docs/) which does not just //! cover the API of the crate but also many of the details of how it works. //! * There is a screencast that shows the entire workflow: [watch the insta //! introduction screencast](https://www.youtube.com/watch?v=rCHrMqE4JOY&feature=youtu.be). //! //! # Writing Tests //! //! ``` //! use insta::assert_debug_snapshot; //! //! #[test] //! fn test_snapshots() { //! assert_debug_snapshot!(vec![1, 2, 3]); //! } //! ``` //! //! The recommended flow is to run the tests once, have them fail and check //! if the result is okay. By default the new snapshots are stored next //! to the old ones with the extra `.new` extension. Once you are satisfied //! move the new files over. To simplify this workflow you can use //! `cargo insta review` (requires //! [`cargo-insta`](https://crates.io/crates/cargo-insta)) which will let you //! interactively review them: //! //! ```text //! $ cargo test //! $ cargo insta review //! ``` //! //! # Use Without `cargo-insta` //! //! Note that `cargo-insta` is entirely optional. You can also just use insta //! directly from `cargo test` and control it via the `INSTA_UPDATE` environment //! variable. The default is `auto` which will write all new snapshots into //! `.snap.new` files if no CI is detected so that `cargo-insta` can pick them //! up. The following other modes are possible: //! //! - `auto`: the default. `no` for CI environments or `new` otherwise //! - `always`: overwrites old snapshot files with new ones unasked //! - `unseen`: behaves like `always` for new snapshots and `new` for others //! - `new`: write new snapshots into `.snap.new` files //! - `no`: does not update snapshot files at all (just runs tests) //! //! You can for instance first run the tests and not write and new snapshots, and //! if you like them run the tests again and update them: //! //! ```text //! INSTA_UPDATE=no cargo test //! INSTA_UPDATE=always cargo test //! ``` //! //! # Assertion Macros //! //! This crate exports multiple macros for snapshot testing: //! //! - [`assert_snapshot!`] for comparing basic string snapshots. //! - [`assert_debug_snapshot!`] for comparing [`Debug`] outputs of values. //! - [`assert_display_snapshot!`] for comparing [`Display`](std::fmt::Display) outputs of values. //! //! The following macros require the use of serde's [`Serialize`](serde::Serialize): //! #![cfg_attr( feature = "csv", doc = "- [`assert_csv_snapshot!`] for comparing CSV serialized output. (requires the `csv` feature)" )] #![cfg_attr( feature = "toml", doc = "- [`assert_toml_snapshot!`] for comparing TOML serialized output. (requires the `toml` feature)" )] #![cfg_attr( feature = "yaml", doc = "- [`assert_yaml_snapshot!`] for comparing YAML serialized output. (requires the `yaml` feature)" )] #![cfg_attr( feature = "ron", doc = "- [`assert_ron_snapshot!`] for comparing RON serialized output. (requires the `ron` feature)" )] #![cfg_attr( feature = "json", doc = "- [`assert_json_snapshot!`] for comparing JSON serialized output. (requires the `json` feature)" )] #![cfg_attr( feature = "json", doc = "- [`assert_compact_json_snapshot!`] for comparing JSON serialized output while preferring single-line formatting. (requires the `json` feature)" )] //! //! For macros that work with [`serde`] this crate also permits redacting of //! partial values. See [redactions in the //! documentation](https://insta.rs/docs/redactions/) for more information. //! //! # Snapshot updating //! //! During test runs snapshots will be updated according to the `INSTA_UPDATE` //! environment variable. The default is `auto` which will write snapshots for //! any failing tests into `.snap.new` files (if no CI is detected) so that //! [`cargo-insta`](https://crates.io/crates/cargo-insta) can pick them up for //! review. Normally you don't have to change this variable. //! //! `INSTA_UPDATE` modes: //! //! - `auto`: the default. `no` for CI environments or `new` otherwise //! - `new`: writes snapshots for any failing tests into `.snap.new` files, //! pending review //! - `always`: writes snapshots for any failing tests into `.snap` files, //! bypassing review //! - `unseen`: `always` for previously unseen snapshots or `new` for existing //! snapshots //! - `no`: does not write to snapshot files at all; just runs tests //! //! When `new`, `auto` or `unseen` is used, the //! [`cargo-insta`](https://crates.io/crates/cargo-insta) command can be used to //! review the snapshots conveniently: //! //! ```text //! $ cargo insta review //! ``` //! //! "enter" or "a" accepts a new snapshot, "escape" or "r" rejects, "space" or //! "s" skips the snapshot for now. //! //! For more information [read the cargo insta //! docs](https://insta.rs/docs/cli/). //! //! # Inline Snapshots //! //! Additionally snapshots can also be stored inline. In that case the format //! for the snapshot macros is `assert_snapshot!(reference_value, @"snapshot")`. //! The leading at sign (`@`) indicates that the following string is the //! reference value. On review, `cargo-insta` will update the string with the //! new value. //! //! Example: //! //! ```no_run //! # use insta::assert_snapshot; //! assert_snapshot!(2 + 2, @""); //! ``` //! //! Like with normal snapshots, an initial test failure will write the proposed //! value into a draft file (note that inline snapshots use `.pending-snap` //! files rather than `.snap.new` files). Running `cargo insta review` will //! review the proposed changes and update the source files on acceptance //! automatically. //! //! # Features //! //! The following features exist: //! //! * `csv`: enables CSV support (via serde) //! * `json`: enables JSON support (via serde) //! * `ron`: enables RON support (via serde) //! * `toml`: enables TOML support (via serde) //! * `yaml`: enables YAML support (via serde) //! * `redactions`: enables support for redactions //! * `filters`: enables support for filters //! * `glob`: enables support for globbing ([`glob!`]) //! * `colors`: enables color output (enabled by default) //! //! For legacy reasons the `json` and `yaml` features are enabled by default in //! limited capacity. You will receive a deprecation warning if you are not //! opting into them but for now the macros will continue to function. //! //! Enabling any of the serde based formats enables the hidden `serde` feature //! which gates some serde specific APIs such as [`Settings::set_info`]. //! //! # Dependencies //! //! `insta` tries to be light in dependencies but this is tricky to accomplish //! given what it tries to do. By default it currently depends on `serde` for //! the [`assert_toml_snapshot!`] and [`assert_yaml_snapshot!`] macros. In the //! future this default dependencies will be removed. To already benefit from //! this optimization you can disable the default features and manually opt into //! what you want. //! //! # Settings //! //! There are some settings that can be changed on a per-thread (and thus //! per-test) basis. For more information see [Settings]. //! //! Additionally Insta will load a YAML config file with settings that change //! the behavior of insta between runs. It's loaded from any of the following //! locations: `.config/insta.yaml`, `insta.yaml` and `.insta.yaml` from the //! workspace root. The following config options exist: //! //! ```yaml //! behavior: //! # also set by INSTA_FORCE_UPDATE //! force_update: true/false //! # also set by INSTA_REQUIRE_FULL_MATCH //! require_full_match: true/false //! # also set by INSTA_FORCE_PASS //! force_pass: true/false //! # also set by INSTA_OUTPUT //! output: "diff" | "summary" | "minimal" | "none" //! # also set by INSTA_UPDATE //! update: "auto" | "always" | "new" | "unseen" | "no" //! # also set by INSTA_GLOB_FAIL_FAST //! glob_fail_fast: true/false //! //! # these are used by cargo insta test //! test: //! # also set by INSTA_TEST_RUNNER //! runner: "auto" | "cargo-test" | "nextest" //! # automatically assume --review was passed to cargo insta test //! auto_review: true/false //! # automatically assume --accept-unseen was passed to cargo insta test //! auto_accept_unseen: true/false //! //! # these are used by cargo insta review //! review: //! # also look for snapshots in ignored folders //! include_ignored: true / false //! # also look for snapshots in hidden folders //! include_hidden: true / false //! # show a warning if undiscovered (ignored or hidden) snapshots are found. //! # defaults to true but creates a performance hit. //! warn_undiscovered: true / false //! ``` //! //! # Optional: Faster Runs //! //! Insta benefits from being compiled in release mode, even as dev dependency. //! It will compile slightly slower once, but use less memory, have faster diffs //! and just generally be more fun to use. To achieve that, opt `insta` and //! `similar` (the diffing library) into higher optimization in your //! `Cargo.toml`: //! //! ```yaml //! [profile.dev.package.insta] //! opt-level = 3 //! //! [profile.dev.package.similar] //! opt-level = 3 //! ``` //! //! You can also disable the default features of `insta` which will cut down on //! the compile time a bit by removing some quality of life features. #![cfg_attr(docsrs, feature(doc_cfg))] #[macro_use] mod macros; mod content; mod env; mod output; mod runtime; #[cfg(feature = "serde")] mod serialization; mod settings; mod snapshot; mod utils; #[cfg(feature = "redactions")] mod redaction; #[cfg(feature = "filters")] mod filters; #[cfg(feature = "glob")] mod glob; #[cfg(test)] mod test; pub use crate::settings::Settings; pub use crate::snapshot::{MetaData, Snapshot}; /// Exposes some library internals. /// /// You're unlikely to want to work with these objects but they /// are exposed for documentation primarily. pub mod internals { pub use crate::content::Content; #[cfg(feature = "filters")] pub use crate::filters::Filters; pub use crate::runtime::AutoName; pub use crate::settings::SettingsBindDropGuard; pub use crate::snapshot::{MetaData, SnapshotContents}; #[cfg(feature = "redactions")] pub use crate::{ redaction::{ContentPath, Redaction}, settings::Redactions, }; } // exported for cargo-insta only #[doc(hidden)] #[cfg(feature = "_cargo_insta_internal")] pub mod _cargo_insta_support { pub use crate::{ content::Error as ContentError, env::{ Error as ToolConfigError, OutputBehavior, SnapshotUpdate, TestRunner, ToolConfig, UnreferencedSnapshots, }, output::SnapshotPrinter, snapshot::PendingInlineSnapshot, snapshot::SnapshotContents, utils::is_ci, }; } // useful for redactions #[cfg(feature = "redactions")] pub use crate::redaction::{dynamic_redaction, rounded_redaction, sorted_redaction}; // these are here to make the macros work #[doc(hidden)] pub mod _macro_support { pub use crate::content::Content; pub use crate::env::get_cargo_workspace; pub use crate::runtime::{assert_snapshot, with_allow_duplicates, AutoName, ReferenceValue}; #[cfg(feature = "serde")] pub use crate::serialization::{serialize_value, SerializationFormat, SnapshotLocation}; #[cfg(feature = "glob")] pub use crate::glob::glob_exec; #[cfg(feature = "redactions")] pub use crate::{ redaction::Redaction, redaction::Selector, serialization::serialize_value_redacted, }; } insta-1.39.0/src/macros.rs000064400000000000000000000457141046102023000134700ustar 00000000000000/// Utility macro to return the name of the current function. #[doc(hidden)] #[macro_export] macro_rules! _function_name { () => {{ fn f() {} fn type_name_of_val(_: T) -> &'static str { std::any::type_name::() } let mut name = type_name_of_val(f).strip_suffix("::f").unwrap_or(""); while let Some(rest) = name.strip_suffix("::{{closure}}") { name = rest; } name }}; } /// Asserts a `Serialize` snapshot in CSV format. /// /// **Feature:** `csv` (disabled by default) /// /// This works exactly like [`crate::assert_yaml_snapshot!`] /// but serializes in [CSV](https://github.com/burntsushi/rust-csv) format instead of /// YAML. /// /// Example: /// /// ```no_run /// insta::assert_csv_snapshot!(vec![1, 2, 3]); /// ``` /// /// The third argument to the macro can be an object expression for redaction. /// It's in the form `{ selector => replacement }` or `match .. { selector => replacement }`. /// For more information about redactions refer to the [redactions feature in /// the guide](https://insta.rs/docs/redactions/). /// /// The snapshot name is optional but can be provided as first argument. #[cfg(feature = "csv")] #[cfg_attr(docsrs, doc(cfg(feature = "csv")))] #[macro_export] macro_rules! assert_csv_snapshot { ($($arg:tt)*) => { $crate::_assert_serialized_snapshot!(format=Csv, $($arg)*); }; } /// Asserts a `Serialize` snapshot in TOML format. /// /// **Feature:** `toml` (disabled by default) /// /// This works exactly like [`crate::assert_yaml_snapshot!`] /// but serializes in [TOML](https://github.com/alexcrichton/toml-rs) format instead of /// YAML. Note that TOML cannot represent all values due to limitations in the /// format. /// /// Example: /// /// ```no_run /// insta::assert_toml_snapshot!(vec![1, 2, 3]); /// ``` /// /// The third argument to the macro can be an object expression for redaction. /// It's in the form `{ selector => replacement }` or `match .. { selector => replacement }`. /// For more information about redactions refer to the [redactions feature in /// the guide](https://insta.rs/docs/redactions/). /// /// The snapshot name is optional but can be provided as first argument. #[cfg(feature = "toml")] #[cfg_attr(docsrs, doc(cfg(feature = "toml")))] #[macro_export] macro_rules! assert_toml_snapshot { ($($arg:tt)*) => { $crate::_assert_serialized_snapshot!(format=Toml, $($arg)*); }; } /// Asserts a `Serialize` snapshot in YAML format. /// /// **Feature:** `yaml` /// /// The value needs to implement the `serde::Serialize` trait and the snapshot /// will be serialized in YAML format. This does mean that unlike the debug /// snapshot variant the type of the value does not appear in the output. /// You can however use the `assert_ron_snapshot!` macro to dump out /// the value in [RON](https://github.com/ron-rs/ron/) format which retains some /// type information for more accurate comparisons. /// /// Example: /// /// ```no_run /// # use insta::*; /// assert_yaml_snapshot!(vec![1, 2, 3]); /// ``` /// /// Unlike the [`crate::assert_debug_snapshot!`] /// macro, this one has a secondary mode where redactions can be defined. /// /// The third argument to the macro can be an object expression for redaction. /// It's in the form `{ selector => replacement }` or `match .. { selector => replacement }`. /// For more information about redactions refer to the [redactions feature in /// the guide](https://insta.rs/docs/redactions/). /// /// Example: /// #[cfg_attr(feature = "redactions", doc = " ```no_run")] #[cfg_attr(not(feature = "redactions"), doc = " ```ignore")] /// # use insta::*; use serde::Serialize; /// # #[derive(Serialize)] struct Value; let value = Value; /// assert_yaml_snapshot!(value, { /// ".key.to.redact" => "[replacement value]", /// ".another.key.*.to.redact" => 42 /// }); /// ``` /// /// The replacement value can be a string, integer or any other primitive value. /// /// For inline usage the format is `(expression, @reference_value)` where the /// reference value must be a string literal. If you make the initial snapshot /// just use an empty string (`@""`). /// /// The snapshot name is optional but can be provided as first argument. #[cfg(feature = "yaml")] #[cfg_attr(docsrs, doc(cfg(feature = "yaml")))] #[macro_export] macro_rules! assert_yaml_snapshot { ($($arg:tt)*) => { $crate::_assert_serialized_snapshot!(format=Yaml, $($arg)*); }; } /// Asserts a `Serialize` snapshot in RON format. /// /// **Feature:** `ron` (disabled by default) /// /// This works exactly like [`assert_yaml_snapshot!`] /// but serializes in [RON](https://github.com/ron-rs/ron/) format instead of /// YAML which retains some type information for more accurate comparisons. /// /// Example: /// /// ```no_run /// # use insta::*; /// assert_ron_snapshot!(vec![1, 2, 3]); /// ``` /// /// The third argument to the macro can be an object expression for redaction. /// It's in the form `{ selector => replacement }` or `match .. { selector => replacement }`. /// For more information about redactions refer to the [redactions feature in /// the guide](https://insta.rs/docs/redactions/). /// /// The snapshot name is optional but can be provided as first argument. #[cfg(feature = "ron")] #[cfg_attr(docsrs, doc(cfg(feature = "ron")))] #[macro_export] macro_rules! assert_ron_snapshot { ($($arg:tt)*) => { $crate::_assert_serialized_snapshot!(format=Ron, $($arg)*); }; } /// Asserts a `Serialize` snapshot in JSON format. /// /// **Feature:** `json` /// /// This works exactly like [`assert_yaml_snapshot!`] but serializes in JSON format. /// This is normally not recommended because it makes diffs less reliable, but it can /// be useful for certain specialized situations. /// /// Example: /// /// ```no_run /// # use insta::*; /// assert_json_snapshot!(vec![1, 2, 3]); /// ``` /// /// The third argument to the macro can be an object expression for redaction. /// It's in the form `{ selector => replacement }` or `match .. { selector => replacement }`. /// For more information about redactions refer to the [redactions feature in /// the guide](https://insta.rs/docs/redactions/). /// /// The snapshot name is optional but can be provided as first argument. #[cfg(feature = "json")] #[cfg_attr(docsrs, doc(cfg(feature = "json")))] #[macro_export] macro_rules! assert_json_snapshot { ($($arg:tt)*) => { $crate::_assert_serialized_snapshot!(format=Json, $($arg)*); }; } /// Asserts a `Serialize` snapshot in compact JSON format. /// /// **Feature:** `json` /// /// This works exactly like [`assert_json_snapshot!`] but serializes into a single /// line for as long as the output is less than 120 characters. This can be useful /// in cases where you are working with small result outputs but comes at the cost /// of slightly worse diffing behavior. /// /// Example: /// /// ```no_run /// # use insta::*; /// assert_compact_json_snapshot!(vec![1, 2, 3]); /// ``` /// /// The third argument to the macro can be an object expression for redaction. /// It's in the form `{ selector => replacement }` or `match .. { selector => replacement }`. /// For more information about redactions refer to the [redactions feature in /// the guide](https://insta.rs/docs/redactions/). /// /// The snapshot name is optional but can be provided as first argument. #[cfg(feature = "json")] #[cfg_attr(docsrs, doc(cfg(feature = "json")))] #[macro_export] macro_rules! assert_compact_json_snapshot { ($($arg:tt)*) => { $crate::_assert_serialized_snapshot!(format=JsonCompact, $($arg)*); }; } // This macro handles optional trailing commas. #[doc(hidden)] #[macro_export] macro_rules! _assert_serialized_snapshot { // If there are redaction expressions and an inline snapshot, capture // the redactions expressions and pass to `_assert_snapshot_base` // // Note that if we could unify the Inline & File representations of snapshots // redactions we could unify some of these branches. (format=$format:ident, $value:expr, $(match ..)? {$($k:expr => $v:expr),* $(,)?}, @$snapshot:literal $(,)?) => {{ let transform = |value| { let (_, value) = $crate::_prepare_snapshot_for_redaction!(value, {$($k => $v),*}, $format, Inline); value }; $crate::_assert_snapshot_base!(transform=transform, $value, @$snapshot); }}; // If there are redaction expressions and no name, add a auto-generated name, call self (format=$format:ident, $value:expr, $(match ..)? {$($k:expr => $v:expr),* $(,)?} $(,)?) => {{ $crate::_assert_serialized_snapshot!(format=$format, $crate::_macro_support::AutoName, $value, {$($k => $v),*}); }}; // If there are redaction expressions, capture and pass to `_assert_snapshot_base` (format=$format:ident, $name:expr, $value:expr, $(match ..)? {$($k:expr => $v:expr),* $(,)?} $(,)?) => {{ let transform = |value| { let (_, value) = $crate::_prepare_snapshot_for_redaction!(value, {$($k => $v),*}, $format, File); value }; $crate::_assert_snapshot_base!(transform=transform, $name, $value); }}; // If there's an inline snapshot, capture serialization function and pass to // `_assert_snapshot_base`, specifying `Inline` (format=$format:ident, $($arg:expr),*, @$snapshot:literal $(,)?) => {{ let transform = |value| {$crate::_macro_support::serialize_value( &value, $crate::_macro_support::SerializationFormat::$format, $crate::_macro_support::SnapshotLocation::Inline )}; $crate::_assert_snapshot_base!(transform = transform, $($arg),*, @$snapshot); }}; // Capture serialization function and pass to `_assert_snapshot_base`, // specifying `File` (format=$format:ident, $($arg:expr),* $(,)?) => {{ let transform = |value| {$crate::_macro_support::serialize_value( &value, $crate::_macro_support::SerializationFormat::$format, $crate::_macro_support::SnapshotLocation::File )}; $crate::_assert_snapshot_base!(transform = transform, $($arg),*); }}; } #[cfg(feature = "redactions")] #[doc(hidden)] #[macro_export] macro_rules! _prepare_snapshot_for_redaction { ($value:expr, {$($k:expr => $v:expr),*}, $format:ident, $location:ident) => { { let vec = std::vec![ $(( $crate::_macro_support::Selector::parse($k).unwrap(), $crate::_macro_support::Redaction::from($v) ),)* ]; let value = $crate::_macro_support::serialize_value_redacted( &$value, &vec, $crate::_macro_support::SerializationFormat::$format, $crate::_macro_support::SnapshotLocation::$location ); (vec, value) } } } #[cfg(not(feature = "redactions"))] #[doc(hidden)] #[macro_export] macro_rules! _prepare_snapshot_for_redaction { ($value:expr, {$($k:expr => $v:expr),*}, $format:ident, $location:ident) => { compile_error!("insta was compiled without redaction support."); }; } /// Asserts a `Debug` snapshot. /// /// The value needs to implement the `fmt::Debug` trait. This is useful for /// simple values that do not implement the `Serialize` trait, but does not /// permit redactions. /// /// Debug is called with `"{:#?}"`, which means this uses pretty-print. #[macro_export] macro_rules! assert_debug_snapshot { ($($arg:tt)*) => { $crate::_assert_snapshot_base!(transform=|v| std::format!("{:#?}", v), $($arg)*) }; } // A helper macro which takes a closure as `transform`, and runs the closure on // the value. This allows us to implement other macros with a small wrapper. All // snapshot macros eventually call this macro. // // This macro handles optional trailing commas. #[doc(hidden)] #[macro_export] macro_rules! _assert_snapshot_base { // If there's an inline literal value, wrap the literal in a // `ReferenceValue::Inline`, call self. (transform=$transform:expr, $($arg:expr),*, @$snapshot:literal $(,)?) => { $crate::_assert_snapshot_base!( transform = $transform, #[allow(clippy::needless_raw_string_hashes)] $crate::_macro_support::ReferenceValue::Inline($snapshot), $($arg),* ) }; // If there's no debug_expr, use the stringified value, call self. (transform=$transform:expr, $name:expr, $value:expr $(,)?) => { $crate::_assert_snapshot_base!(transform = $transform, $name, $value, stringify!($value)) }; // If there's no name (and necessarily no debug expr), auto generate the // name, call self. (transform=$transform:expr, $value:expr $(,)?) => { $crate::_assert_snapshot_base!( transform = $transform, $crate::_macro_support::AutoName, $value ) }; // The main macro body — every call to this macro should end up here. (transform=$transform:expr, $name:expr, $value:expr, $debug_expr:expr $(,)?) => { $crate::_macro_support::assert_snapshot( $name.into(), #[allow(clippy::redundant_closure_call)] &$transform(&$value), env!("CARGO_MANIFEST_DIR"), $crate::_function_name!(), module_path!(), file!(), line!(), $debug_expr, ) .unwrap() }; } /// Asserts a `Display` snapshot. /// /// This is now deprecated, replaced by the more generic `assert_snapshot!()` #[macro_export] #[deprecated = "use assert_snapshot!() instead"] macro_rules! assert_display_snapshot { ($($arg:tt)*) => { $crate::assert_snapshot!($($arg)*) }; } /// Asserts a string snapshot. /// /// This is the simplest of all assertion methods. It accepts any value that /// implements `fmt::Display`. /// /// ```no_run /// # use insta::*; /// // implicitly named /// assert_snapshot!("reference value to snapshot"); /// // named /// assert_snapshot!("snapshot_name", "reference value to snapshot"); /// // inline /// assert_snapshot!("reference value", @"reference value"); /// ``` /// /// Optionally a third argument can be given as an expression to be stringified /// as the debug expression. For more information on this, check out /// . #[macro_export] macro_rules! assert_snapshot { ($($arg:tt)*) => { $crate::_assert_snapshot_base!(transform=|v| std::format!("{}", v), $($arg)*) }; } /// Settings configuration macro. /// /// This macro lets you bind some [`Settings`](crate::Settings) temporarily. The first argument /// takes key value pairs that should be set, the second is the block to /// execute. All settings can be set (`sort_maps => value` maps to `set_sort_maps(value)`). /// The exception are redactions which can only be set to a vector this way. /// /// This example: /// /// ```rust /// insta::with_settings!({sort_maps => true}, { /// // run snapshot test here /// }); /// ``` /// /// Is equivalent to the following: /// /// ```rust /// # use insta::Settings; /// let mut settings = Settings::clone_current(); /// settings.set_sort_maps(true); /// settings.bind(|| { /// // run snapshot test here /// }); /// ``` /// /// Note: before insta 0.17 this macro used /// [`Settings::new`](crate::Settings::new) which meant that original settings /// were always reset rather than extended. #[macro_export] macro_rules! with_settings { ({$($k:ident => $v:expr),*$(,)?}, $body:block) => {{ let mut settings = $crate::Settings::clone_current(); $( settings._private_inner_mut().$k($v); )* settings.bind(|| $body) }} } /// Executes a closure for all input files matching a glob. /// /// The closure is passed the path to the file. You can use [`std::fs::read_to_string`] /// or similar functions to load the file and process it. /// /// ``` /// # use insta::{assert_snapshot, glob, Settings}; /// # let mut settings = Settings::clone_current(); /// # settings.set_allow_empty_glob(true); /// # let _dropguard = settings.bind_to_scope(); /// use std::fs; /// /// glob!("inputs/*.txt", |path| { /// let input = fs::read_to_string(path).unwrap(); /// assert_snapshot!(input.to_uppercase()); /// }); /// ``` /// /// The `INSTA_GLOB_FILTER` environment variable can be set to only execute certain files. /// The format of the filter is a semicolon separated filter. For instance by setting /// `INSTA_GLOB_FILTER` to `foo-*txt;bar-*.txt` only files starting with `foo-` or `bar-` /// end ending in `.txt` will be executed. When using `cargo-insta` the `--glob-filter` /// option can be used instead. /// /// Another effect of the globbing system is that snapshot failures within the glob macro /// are deferred until the end of of it. In other words this means that each snapshot /// assertion within the `glob!` block are reported. It can be disabled by setting /// `INSTA_GLOB_FAIL_FAST` environment variable to `1`. /// /// A three-argument version of this macro allows specifying a base directory /// for the glob to start in. This allows globbing in arbitrary directories, /// including parent directories: /// /// ``` /// # use insta::{assert_snapshot, glob, Settings}; /// # let mut settings = Settings::clone_current(); /// # settings.set_allow_empty_glob(true); /// # let _dropguard = settings.bind_to_scope(); /// use std::fs; /// /// glob!("../test_data", "inputs/*.txt", |path| { /// let input = fs::read_to_string(path).unwrap(); /// assert_snapshot!(input.to_uppercase()); /// }); /// ``` #[cfg(feature = "glob")] #[cfg_attr(docsrs, doc(cfg(feature = "glob")))] #[macro_export] macro_rules! glob { ($base_path:expr, $glob:expr, $closure:expr) => {{ use std::path::Path; let base = $crate::_macro_support::get_cargo_workspace(env!("CARGO_MANIFEST_DIR")) .join(Path::new(file!()).parent().unwrap()) .join($base_path) .to_path_buf(); // we try to canonicalize but on some platforms (eg: wasm) that might not work, so // we instead silently fall back. let base = base.canonicalize().unwrap_or_else(|_| base); $crate::_macro_support::glob_exec(env!("CARGO_MANIFEST_DIR"), &base, $glob, $closure); }}; ($glob:expr, $closure:expr) => {{ insta::glob!(".", $glob, $closure) }}; } /// Utility macro to permit a multi-snapshot run where all snapshots match. /// /// Within this block, insta will allow an assertion to be run more than once /// (even inline) without generating another snapshot. Instead it will assert /// that snapshot expressions visited more than once are matching. /// /// ```rust /// insta::allow_duplicates! { /// for x in (0..10).step_by(2) { /// let is_even = x % 2 == 0; /// insta::assert_debug_snapshot!(is_even, @"true"); /// } /// } /// ``` /// /// The first snapshot assertion will be used as a gold master and every further /// assertion will be checked against it. If they don't match the assertion will /// fail. #[macro_export] macro_rules! allow_duplicates { ($($x:tt)*) => { $crate::_macro_support::with_allow_duplicates(|| { $($x)* }) } } insta-1.39.0/src/output.rs000064400000000000000000000261661046102023000135440ustar 00000000000000use std::borrow::Cow; use std::{path::Path, time::Duration}; use similar::{Algorithm, ChangeTag, TextDiff}; use crate::content::yaml; use crate::snapshot::{MetaData, Snapshot}; use crate::utils::{format_rust_expression, style, term_width}; /// Snapshot printer utility. pub struct SnapshotPrinter<'a> { workspace_root: &'a Path, old_snapshot: Option<&'a Snapshot>, new_snapshot: &'a Snapshot, old_snapshot_hint: &'a str, new_snapshot_hint: &'a str, show_info: bool, show_diff: bool, title: Option<&'a str>, line: Option, snapshot_file: Option<&'a Path>, } impl<'a> SnapshotPrinter<'a> { pub fn new( workspace_root: &'a Path, old_snapshot: Option<&'a Snapshot>, new_snapshot: &'a Snapshot, ) -> SnapshotPrinter<'a> { SnapshotPrinter { workspace_root, old_snapshot, new_snapshot, old_snapshot_hint: "old snapshot", new_snapshot_hint: "new results", show_info: false, show_diff: false, title: None, line: None, snapshot_file: None, } } pub fn set_snapshot_hints(&mut self, old: &'a str, new: &'a str) { self.old_snapshot_hint = old; self.new_snapshot_hint = new; } pub fn set_show_info(&mut self, yes: bool) { self.show_info = yes; } pub fn set_show_diff(&mut self, yes: bool) { self.show_diff = yes; } pub fn set_title(&mut self, title: Option<&'a str>) { self.title = title; } pub fn set_line(&mut self, line: Option) { self.line = line; } pub fn set_snapshot_file(&mut self, file: Option<&'a Path>) { self.snapshot_file = file; } pub fn print(&self) { if let Some(title) = self.title { let width = term_width(); println!( "{title:━^width$}", title = style(format!(" {} ", title)).bold(), width = width ); } self.print_snapshot_diff(); } fn print_snapshot_diff(&self) { self.print_snapshot_summary(); if self.show_diff { self.print_changeset(); } else { self.print_snapshot(); } } fn print_snapshot_summary(&self) { print_snapshot_summary( self.workspace_root, self.new_snapshot, self.snapshot_file, self.line, ); } fn print_info(&self) { print_info(self.new_snapshot.metadata()); } fn print_snapshot(&self) { print_line(term_width()); let new_contents = self.new_snapshot.contents_str(); let width = term_width(); if self.show_info { self.print_info(); } println!("Snapshot Contents:"); println!("──────┬{:─^1$}", "", width.saturating_sub(7)); for (idx, line) in new_contents.lines().enumerate() { println!("{:>5} │ {}", style(idx + 1).cyan().dim().bold(), line); } println!("──────┴{:─^1$}", "", width.saturating_sub(7)); } fn print_changeset(&self) { let old = self.old_snapshot.as_ref().map_or("", |x| x.contents_str()); let new = self.new_snapshot.contents_str(); let newlines_matter = newlines_matter(old, new); let width = term_width(); let diff = TextDiff::configure() .algorithm(Algorithm::Patience) .timeout(Duration::from_millis(500)) .diff_lines(old, new); print_line(width); if self.show_info { self.print_info(); } if !old.is_empty() { println!( "{}", style(format_args!("-{}", self.old_snapshot_hint)).red() ); } println!( "{}", style(format_args!("+{}", self.new_snapshot_hint)).green() ); println!("────────────┬{:─^1$}", "", width.saturating_sub(13)); let mut has_changes = false; for (idx, group) in diff.grouped_ops(4).iter().enumerate() { if idx > 0 { println!("┈┈┈┈┈┈┈┈┈┈┈┈┼{:┈^1$}", "", width.saturating_sub(13)); } for op in group { for change in diff.iter_inline_changes(op) { match change.tag() { ChangeTag::Insert => { has_changes = true; print!( "{:>5} {:>5} │{}", "", style(change.new_index().unwrap()).cyan().dim().bold(), style("+").green(), ); for &(emphasized, change) in change.values() { let change = render_invisible(change, newlines_matter); if emphasized { print!("{}", style(change).green().underlined()); } else { print!("{}", style(change).green()); } } } ChangeTag::Delete => { has_changes = true; print!( "{:>5} {:>5} │{}", style(change.old_index().unwrap()).cyan().dim(), "", style("-").red(), ); for &(emphasized, change) in change.values() { let change = render_invisible(change, newlines_matter); if emphasized { print!("{}", style(change).red().underlined()); } else { print!("{}", style(change).red()); } } } ChangeTag::Equal => { print!( "{:>5} {:>5} │ ", style(change.old_index().unwrap()).cyan().dim(), style(change.new_index().unwrap()).cyan().dim().bold(), ); for &(_, change) in change.values() { let change = render_invisible(change, newlines_matter); print!("{}", style(change).dim()); } } } if change.missing_newline() { println!(); } } } } if !has_changes { println!( "{:>5} {:>5} │{}", "", style("-").dim(), style(" snapshots are matching").cyan(), ); } println!("────────────┴{:─^1$}", "", width.saturating_sub(13)); } } /// Prints the summary of a snapshot pub fn print_snapshot_summary( workspace_root: &Path, snapshot: &Snapshot, snapshot_file: Option<&Path>, mut line: Option, ) { // default to old assertion line from snapshot. if line.is_none() { line = snapshot.metadata().assertion_line(); } if let Some(snapshot_file) = snapshot_file { let snapshot_file = workspace_root .join(snapshot_file) .strip_prefix(workspace_root) .ok() .map(|x| x.to_path_buf()) .unwrap_or_else(|| snapshot_file.to_path_buf()); println!( "Snapshot file: {}", style(snapshot_file.display()).cyan().underlined() ); } if let Some(name) = snapshot.snapshot_name() { println!("Snapshot: {}", style(name).yellow()); } else { println!("Snapshot: {}", style("").dim()); } if let Some(ref value) = snapshot.metadata().get_relative_source(workspace_root) { println!( "Source: {}{}", style(value.display()).cyan(), if let Some(line) = line { format!(":{}", style(line).bold()) } else { "".to_string() } ); } if let Some(ref value) = snapshot.metadata().input_file() { println!("Input file: {}", style(value).cyan()); } } fn print_line(width: usize) { println!("{:─^1$}", "", width); } fn trailing_newline(s: &str) -> &str { if s.ends_with("\r\n") { "\r\n" } else if s.ends_with('\r') { "\r" } else if s.ends_with('\n') { "\n" } else { "" } } fn detect_newlines(s: &str) -> (bool, bool, bool) { let mut last_char = None; let mut detected_crlf = false; let mut detected_cr = false; let mut detected_lf = false; for c in s.chars() { if c == '\n' { if last_char.take() == Some('\r') { detected_crlf = true; } else { detected_lf = true; } } if last_char == Some('\r') { detected_cr = true; } last_char = Some(c); } if last_char == Some('\r') { detected_cr = true; } (detected_cr, detected_crlf, detected_lf) } fn newlines_matter(left: &str, right: &str) -> bool { if trailing_newline(left) != trailing_newline(right) { return true; } let (cr1, crlf1, lf1) = detect_newlines(left); let (cr2, crlf2, lf2) = detect_newlines(right); !matches!( (cr1 || cr2, crlf1 || crlf2, lf1 || lf2), (false, false, false) | (true, false, false) | (false, true, false) | (false, false, true) ) } fn render_invisible(s: &str, newlines_matter: bool) -> Cow<'_, str> { if newlines_matter || s.find(&['\x1b', '\x07', '\x08', '\x7f'][..]).is_some() { Cow::Owned( s.replace('\r', "␍\r") .replace('\n', "␊\n") .replace("␍\r␊\n", "␍␊\r\n") .replace('\x07', "␇") .replace('\x08', "␈") .replace('\x1b', "␛") .replace('\x7f', "␡"), ) } else { Cow::Borrowed(s) } } fn print_info(metadata: &MetaData) { let width = term_width(); if let Some(expr) = metadata.expression() { println!("Expression: {}", style(format_rust_expression(expr))); print_line(width); } if let Some(descr) = metadata.description() { println!("{}", descr); print_line(width); } if let Some(info) = metadata.private_info() { let out = yaml::to_string(info); // TODO: does the yaml output always start with '---'? println!("{}", out.trim().strip_prefix("---").unwrap().trim_start()); print_line(width); } } #[test] fn test_invisible() { assert_eq!( render_invisible("\r\n\x1b\r\x07\x08\x7f\n", true), "␍␊\r\n␛␍\r␇␈␡␊\n" ); } insta-1.39.0/src/redaction.rs000064400000000000000000000453621046102023000141530ustar 00000000000000use pest::Parser; use pest_derive::Parser; use std::borrow::Cow; use std::fmt; use crate::content::Content; #[derive(Debug)] pub struct SelectorParseError(Box>); impl SelectorParseError { /// Return the column of where the error occurred. pub fn column(&self) -> usize { match self.0.line_col { pest::error::LineColLocation::Pos((_, col)) => col, pest::error::LineColLocation::Span((_, col), _) => col, } } } /// Represents a path for a callback function. /// /// This can be converted into a string with `to_string` to see a stringified /// path that the selector matched. #[derive(Clone, Debug)] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub struct ContentPath<'a>(&'a [PathItem]); impl<'a> fmt::Display for ContentPath<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for item in self.0.iter() { write!(f, ".")?; match *item { PathItem::Content(ref ctx) => { if let Some(s) = ctx.as_str() { write!(f, "{}", s)?; } else { write!(f, "")?; } } PathItem::Field(name) => write!(f, "{}", name)?, PathItem::Index(idx, _) => write!(f, "{}", idx)?, } } Ok(()) } } /// Replaces a value with another one. /// Represents a redaction. #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub enum Redaction { /// Static redaction with new content. Static(Content), /// Redaction with new content. Dynamic(Box) -> Content + Sync + Send>), } macro_rules! impl_from { ($ty:ty) => { impl From<$ty> for Redaction { fn from(value: $ty) -> Redaction { Redaction::Static(Content::from(value)) } } }; } impl_from!(()); impl_from!(bool); impl_from!(u8); impl_from!(u16); impl_from!(u32); impl_from!(u64); impl_from!(i8); impl_from!(i16); impl_from!(i32); impl_from!(i64); impl_from!(f32); impl_from!(f64); impl_from!(char); impl_from!(String); impl_from!(Vec); impl<'a> From<&'a str> for Redaction { fn from(value: &'a str) -> Redaction { Redaction::Static(Content::from(value)) } } impl<'a> From<&'a [u8]> for Redaction { fn from(value: &'a [u8]) -> Redaction { Redaction::Static(Content::from(value)) } } /// Creates a dynamic redaction. /// /// This can be used to redact a value with a different value but instead of /// statically declaring it a dynamic value can be computed. This can also /// be used to perform assertions before replacing the value. /// /// The closure is passed two arguments: the value as [`Content`] /// and the path that was selected (as [`ContentPath`]) /// /// Example: /// /// ```rust /// # use insta::{Settings, dynamic_redaction}; /// # let mut settings = Settings::new(); /// settings.add_redaction(".id", dynamic_redaction(|value, path| { /// assert_eq!(path.to_string(), ".id"); /// assert_eq!( /// value /// .as_str() /// .unwrap() /// .chars() /// .filter(|&c| c == '-') /// .count(), /// 4 /// ); /// "[uuid]" /// })); /// ``` #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn dynamic_redaction(func: F) -> Redaction where I: Into, F: Fn(Content, ContentPath<'_>) -> I + Send + Sync + 'static, { Redaction::Dynamic(Box::new(move |c, p| func(c, p).into())) } /// Creates a dynamic redaction that sorts the value at the selector. /// /// This is useful to force something like a set or map to be ordered to make /// it deterministic. This is necessary as insta's serialization support is /// based on serde which does not have native set support. As a result vectors /// (which need to retain order) and sets (which should be given a stable order) /// look the same. /// /// ```rust /// # use insta::{Settings, sorted_redaction}; /// # let mut settings = Settings::new(); /// settings.add_redaction(".flags", sorted_redaction()); /// ``` #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn sorted_redaction() -> Redaction { fn sort(mut value: Content, _path: ContentPath) -> Content { match value.resolve_inner_mut() { Content::Seq(ref mut val) => { val.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)) } Content::Map(ref mut val) => { val.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)) } Content::Struct(_, ref mut fields) | Content::StructVariant(_, _, _, ref mut fields) => { fields.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)) } _ => {} } value } dynamic_redaction(sort) } /// Creates a redaction that rounds floating point numbers to a given /// number of decimal places. /// /// ```rust /// # use insta::{Settings, rounded_redaction}; /// # let mut settings = Settings::new(); /// settings.add_redaction(".sum", rounded_redaction(2)); /// ``` #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn rounded_redaction(decimals: usize) -> Redaction { dynamic_redaction(move |value: Content, _path: ContentPath| -> Content { let f = match value.resolve_inner() { Content::F32(f) => *f as f64, Content::F64(f) => *f, _ => return value, }; let x = 10f64.powf(decimals as f64); Content::F64((f * x).round() / x) }) } impl Redaction { /// Performs the redaction of the value at the given path. fn redact(&self, value: Content, path: &[PathItem]) -> Content { match *self { Redaction::Static(ref new_val) => new_val.clone(), Redaction::Dynamic(ref callback) => callback(value, ContentPath(path)), } } } #[derive(Parser)] #[grammar = "select_grammar.pest"] pub struct SelectParser; #[derive(Debug)] pub enum PathItem { Content(Content), Field(&'static str), Index(u64, u64), } impl PathItem { fn as_str(&self) -> Option<&str> { match *self { PathItem::Content(ref content) => content.as_str(), PathItem::Field(s) => Some(s), PathItem::Index(..) => None, } } fn as_u64(&self) -> Option { match *self { PathItem::Content(ref content) => content.as_u64(), PathItem::Field(_) => None, PathItem::Index(idx, _) => Some(idx), } } fn range_check(&self, start: Option, end: Option) -> bool { fn expand_range(sel: i64, len: i64) -> i64 { if sel < 0 { (len + sel).max(0) } else { sel } } let (idx, len) = match *self { PathItem::Index(idx, len) => (idx as i64, len as i64), _ => return false, }; match (start, end) { (None, None) => true, (None, Some(end)) => idx < expand_range(end, len), (Some(start), None) => idx >= expand_range(start, len), (Some(start), Some(end)) => { idx >= expand_range(start, len) && idx < expand_range(end, len) } } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Segment<'a> { DeepWildcard, Wildcard, Key(Cow<'a, str>), Index(u64), Range(Option, Option), } #[derive(Debug, Clone)] pub struct Selector<'a> { selectors: Vec>>, } impl<'a> Selector<'a> { pub fn parse(selector: &'a str) -> Result, SelectorParseError> { let pair = SelectParser::parse(Rule::selectors, selector) .map_err(Box::new) .map_err(SelectorParseError)? .next() .unwrap(); let mut rv = vec![]; for selector_pair in pair.into_inner() { match selector_pair.as_rule() { Rule::EOI => break, other => assert_eq!(other, Rule::selector), } let mut segments = vec![]; let mut have_deep_wildcard = false; for segment_pair in selector_pair.into_inner() { segments.push(match segment_pair.as_rule() { Rule::identity => continue, Rule::wildcard => Segment::Wildcard, Rule::deep_wildcard => { if have_deep_wildcard { return Err(SelectorParseError(Box::new( pest::error::Error::new_from_span( pest::error::ErrorVariant::CustomError { message: "deep wildcard used twice".into(), }, segment_pair.as_span(), ), ))); } have_deep_wildcard = true; Segment::DeepWildcard } Rule::key => Segment::Key(Cow::Borrowed(&segment_pair.as_str()[1..])), Rule::subscript => { let subscript_rule = segment_pair.into_inner().next().unwrap(); match subscript_rule.as_rule() { Rule::int => Segment::Index(subscript_rule.as_str().parse().unwrap()), Rule::string => { let sq = subscript_rule.as_str(); let s = &sq[1..sq.len() - 1]; let mut was_backslash = false; Segment::Key(if s.bytes().any(|x| x == b'\\') { Cow::Owned( s.chars() .filter_map(|c| { let rv = match c { '\\' if !was_backslash => { was_backslash = true; return None; } other => other, }; was_backslash = false; Some(rv) }) .collect(), ) } else { Cow::Borrowed(s) }) } _ => unreachable!(), } } Rule::full_range => Segment::Range(None, None), Rule::range => { let mut int_rule = segment_pair .into_inner() .map(|x| x.as_str().parse().unwrap()); Segment::Range(int_rule.next(), int_rule.next()) } Rule::range_to => { let int_rule = segment_pair.into_inner().next().unwrap(); Segment::Range(None, int_rule.as_str().parse().ok()) } Rule::range_from => { let int_rule = segment_pair.into_inner().next().unwrap(); Segment::Range(int_rule.as_str().parse().ok(), None) } _ => unreachable!(), }); } rv.push(segments); } Ok(Selector { selectors: rv }) } pub fn make_static(self) -> Selector<'static> { Selector { selectors: self .selectors .into_iter() .map(|parts| { parts .into_iter() .map(|x| match x { Segment::Key(x) => Segment::Key(Cow::Owned(x.into_owned())), Segment::Index(x) => Segment::Index(x), Segment::Wildcard => Segment::Wildcard, Segment::DeepWildcard => Segment::DeepWildcard, Segment::Range(a, b) => Segment::Range(a, b), }) .collect() }) .collect(), } } fn segment_is_match(&self, segment: &Segment, element: &PathItem) -> bool { match *segment { Segment::Wildcard => true, Segment::DeepWildcard => true, Segment::Key(ref k) => element.as_str() == Some(k), Segment::Index(i) => element.as_u64() == Some(i), Segment::Range(start, end) => element.range_check(start, end), } } fn selector_is_match(&self, selector: &[Segment], path: &[PathItem]) -> bool { if let Some(idx) = selector.iter().position(|x| *x == Segment::DeepWildcard) { let forward_sel = &selector[..idx]; let backward_sel = &selector[idx + 1..]; if path.len() <= idx { return false; } for (segment, element) in forward_sel.iter().zip(path.iter()) { if !self.segment_is_match(segment, element) { return false; } } for (segment, element) in backward_sel.iter().rev().zip(path.iter().rev()) { if !self.segment_is_match(segment, element) { return false; } } true } else { if selector.len() != path.len() { return false; } for (segment, element) in selector.iter().zip(path.iter()) { if !self.segment_is_match(segment, element) { return false; } } true } } pub fn is_match(&self, path: &[PathItem]) -> bool { for selector in &self.selectors { if self.selector_is_match(selector, path) { return true; } } false } pub fn redact(&self, value: Content, redaction: &Redaction) -> Content { self.redact_impl(value, redaction, &mut vec![]) } fn redact_seq( &self, seq: Vec, redaction: &Redaction, path: &mut Vec, ) -> Vec { let len = seq.len(); seq.into_iter() .enumerate() .map(|(idx, value)| { path.push(PathItem::Index(idx as u64, len as u64)); let new_value = self.redact_impl(value, redaction, path); path.pop(); new_value }) .collect() } fn redact_struct( &self, seq: Vec<(&'static str, Content)>, redaction: &Redaction, path: &mut Vec, ) -> Vec<(&'static str, Content)> { seq.into_iter() .map(|(key, value)| { path.push(PathItem::Field(key)); let new_value = self.redact_impl(value, redaction, path); path.pop(); (key, new_value) }) .collect() } fn redact_impl( &self, value: Content, redaction: &Redaction, path: &mut Vec, ) -> Content { if self.is_match(path) { redaction.redact(value, path) } else { match value { Content::Map(map) => Content::Map( map.into_iter() .map(|(key, value)| { path.push(PathItem::Field("$key")); let new_key = self.redact_impl(key.clone(), redaction, path); path.pop(); path.push(PathItem::Content(key)); let new_value = self.redact_impl(value, redaction, path); path.pop(); (new_key, new_value) }) .collect(), ), Content::Seq(seq) => Content::Seq(self.redact_seq(seq, redaction, path)), Content::Tuple(seq) => Content::Tuple(self.redact_seq(seq, redaction, path)), Content::TupleStruct(name, seq) => { Content::TupleStruct(name, self.redact_seq(seq, redaction, path)) } Content::TupleVariant(name, variant_index, variant, seq) => Content::TupleVariant( name, variant_index, variant, self.redact_seq(seq, redaction, path), ), Content::Struct(name, seq) => { Content::Struct(name, self.redact_struct(seq, redaction, path)) } Content::StructVariant(name, variant_index, variant, seq) => { Content::StructVariant( name, variant_index, variant, self.redact_struct(seq, redaction, path), ) } Content::NewtypeStruct(name, inner) => Content::NewtypeStruct( name, Box::new(self.redact_impl(*inner, redaction, path)), ), Content::NewtypeVariant(name, index, variant_name, inner) => { Content::NewtypeVariant( name, index, variant_name, Box::new(self.redact_impl(*inner, redaction, path)), ) } Content::Some(contents) => { Content::Some(Box::new(self.redact_impl(*contents, redaction, path))) } other => other, } } } } #[test] fn test_range_checks() { use similar_asserts::assert_eq; assert_eq!(PathItem::Index(0, 10).range_check(None, Some(-1)), true); assert_eq!(PathItem::Index(9, 10).range_check(None, Some(-1)), false); assert_eq!(PathItem::Index(0, 10).range_check(Some(1), Some(-1)), false); assert_eq!(PathItem::Index(1, 10).range_check(Some(1), Some(-1)), true); assert_eq!(PathItem::Index(9, 10).range_check(Some(1), Some(-1)), false); assert_eq!(PathItem::Index(0, 10).range_check(Some(1), None), false); assert_eq!(PathItem::Index(1, 10).range_check(Some(1), None), true); assert_eq!(PathItem::Index(9, 10).range_check(Some(1), None), true); } insta-1.39.0/src/runtime.rs000064400000000000000000000611311046102023000136560ustar 00000000000000use std::borrow::Cow; use std::cell::RefCell; use std::collections::{BTreeMap, BTreeSet}; use std::error::Error; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; use std::str; use std::sync::{Arc, Mutex}; use crate::env::{ get_cargo_workspace, get_tool_config, memoize_snapshot_file, snapshot_update_behavior, OutputBehavior, SnapshotUpdateBehavior, ToolConfig, }; use crate::output::SnapshotPrinter; use crate::settings::Settings; use crate::snapshot::{MetaData, PendingInlineSnapshot, Snapshot, SnapshotContents}; use crate::utils::{path_to_storage, style}; lazy_static::lazy_static! { static ref TEST_NAME_COUNTERS: Mutex> = Mutex::new(BTreeMap::new()); static ref TEST_NAME_CLASH_DETECTION: Mutex> = Mutex::new(BTreeMap::new()); static ref INLINE_DUPLICATES: Mutex> = Mutex::new(BTreeSet::new()); } thread_local! { static RECORDED_DUPLICATES: RefCell>> = RefCell::default() } // This macro is basically eprintln but without being captured and // hidden by the test runner. macro_rules! elog { () => (write!(std::io::stderr()).ok()); ($($arg:tt)*) => ({ writeln!(std::io::stderr(), $($arg)*).ok(); }) } /// Special marker to use an automatic name. /// /// This can be passed as a snapshot name in a macro to explicitly tell /// insta to use the automatic name. This is useful in ambiguous syntax /// situations. #[derive(Debug)] pub struct AutoName; impl From for ReferenceValue<'static> { fn from(_value: AutoName) -> ReferenceValue<'static> { ReferenceValue::Named(None) } } impl From> for ReferenceValue<'static> { fn from(value: Option) -> ReferenceValue<'static> { ReferenceValue::Named(value.map(Cow::Owned)) } } impl From for ReferenceValue<'static> { fn from(value: String) -> ReferenceValue<'static> { ReferenceValue::Named(Some(Cow::Owned(value))) } } impl<'a> From> for ReferenceValue<'a> { fn from(value: Option<&'a str>) -> ReferenceValue<'a> { ReferenceValue::Named(value.map(Cow::Borrowed)) } } impl<'a> From<&'a str> for ReferenceValue<'a> { fn from(value: &'a str) -> ReferenceValue<'a> { ReferenceValue::Named(Some(Cow::Borrowed(value))) } } pub enum ReferenceValue<'a> { Named(Option>), Inline(&'a str), } fn is_doctest(function_name: &str) -> bool { function_name.starts_with("rust_out::main::_doctest") } fn detect_snapshot_name( function_name: &str, module_path: &str, inline: bool, is_doctest: bool, ) -> Result { let mut name = function_name; // simplify doctest names if is_doctest && !inline { panic!("Cannot determine reliable names for snapshot in doctests. Please use explicit names instead."); } // clean test name first name = name.rsplit("::").next().unwrap(); let mut test_prefixed = false; if name.starts_with("test_") { name = &name[5..]; test_prefixed = true; } // next check if we need to add a suffix let name = add_suffix_to_snapshot_name(Cow::Borrowed(name)); let key = format!("{}::{}", module_path.replace("::", "__"), name); // because fn foo and fn test_foo end up with the same snapshot name we // make sure we detect this here and raise an error. let mut name_clash_detection = TEST_NAME_CLASH_DETECTION .lock() .unwrap_or_else(|x| x.into_inner()); match name_clash_detection.get(&key) { None => { name_clash_detection.insert(key.clone(), test_prefixed); } Some(&was_test_prefixed) => { if was_test_prefixed != test_prefixed { panic!( "Insta snapshot name clash detected between '{}' \ and 'test_{}' in '{}'. Rename one function.", name, name, module_path ); } } } // The rest of the code just deals with duplicates, which we in some // cases do not want to guard against. if allow_duplicates() { return Ok(name.to_string()); } // if the snapshot name clashes we need to increment a counter. // we really do not care about poisoning here. let mut counters = TEST_NAME_COUNTERS.lock().unwrap_or_else(|x| x.into_inner()); let test_idx = counters.get(&key).cloned().unwrap_or(0) + 1; let rv = if test_idx == 1 { name.to_string() } else { format!("{}-{}", name, test_idx) }; counters.insert(key, test_idx); Ok(rv) } /// If there is a suffix on the settings, append it to the snapshot name. fn add_suffix_to_snapshot_name(name: Cow<'_, str>) -> Cow<'_, str> { Settings::with(|settings| { settings .snapshot_suffix() .map(|suffix| Cow::Owned(format!("{}@{}", name, suffix))) .unwrap_or_else(|| name) }) } fn get_snapshot_filename( module_path: &str, assertion_file: &str, snapshot_name: &str, cargo_workspace: &Path, base: &str, is_doctest: bool, ) -> PathBuf { let root = Path::new(cargo_workspace); let base = Path::new(base); Settings::with(|settings| { root.join(base.parent().unwrap()) .join(settings.snapshot_path()) .join({ use std::fmt::Write; let mut f = String::new(); if settings.prepend_module_to_snapshot() { if is_doctest { write!( &mut f, "doctest_{}__", Path::new(assertion_file) .file_name() .unwrap() .to_string_lossy() .replace('.', "_") ) .unwrap(); } else { write!(&mut f, "{}__", module_path.replace("::", "__")).unwrap(); } } write!( &mut f, "{}.snap", snapshot_name.replace(&['/', '\\'][..], "__") ) .unwrap(); f }) }) } #[derive(Debug)] struct SnapshotAssertionContext<'a> { tool_config: Arc, cargo_workspace: Arc, module_path: &'a str, snapshot_name: Option>, snapshot_file: Option, duplication_key: Option, old_snapshot: Option, pending_snapshots_path: Option, assertion_file: &'a str, assertion_line: u32, is_doctest: bool, } impl<'a> SnapshotAssertionContext<'a> { fn prepare( refval: ReferenceValue<'a>, manifest_dir: &'a str, function_name: &'a str, module_path: &'a str, assertion_file: &'a str, assertion_line: u32, ) -> Result, Box> { let tool_config = get_tool_config(manifest_dir); let cargo_workspace = get_cargo_workspace(manifest_dir); let snapshot_name; let mut duplication_key = None; let mut snapshot_file = None; let mut old_snapshot = None; let mut pending_snapshots_path = None; let is_doctest = is_doctest(function_name); match refval { ReferenceValue::Named(name) => { let name = match name { Some(name) => add_suffix_to_snapshot_name(name), None => detect_snapshot_name(function_name, module_path, false, is_doctest) .unwrap() .into(), }; if allow_duplicates() { duplication_key = Some(format!("named:{}|{}", module_path, name)); } let file = get_snapshot_filename( module_path, assertion_file, &name, &cargo_workspace, assertion_file, is_doctest, ); if fs::metadata(&file).is_ok() { old_snapshot = Some(Snapshot::from_file(&file)?); } snapshot_name = Some(name); snapshot_file = Some(file); } ReferenceValue::Inline(contents) => { if allow_duplicates() { duplication_key = Some(format!( "inline:{}|{}|{}", function_name, assertion_file, assertion_line )); } else { prevent_inline_duplicate(function_name, assertion_file, assertion_line); } snapshot_name = detect_snapshot_name(function_name, module_path, true, is_doctest) .ok() .map(Cow::Owned); let mut pending_file = cargo_workspace.join(assertion_file); pending_file.set_file_name(format!( ".{}.pending-snap", pending_file .file_name() .expect("no filename") .to_str() .expect("non unicode filename") )); pending_snapshots_path = Some(pending_file); old_snapshot = Some(Snapshot::from_components( module_path.replace("::", "__"), None, MetaData::default(), SnapshotContents::from_inline(contents), )); } }; Ok(SnapshotAssertionContext { tool_config, cargo_workspace, module_path, snapshot_name, snapshot_file, old_snapshot, pending_snapshots_path, assertion_file, assertion_line, duplication_key, is_doctest, }) } /// Given a path returns the local path within the workspace. pub fn localize_path(&self, p: &Path) -> Option { let workspace = self.cargo_workspace.canonicalize().ok()?; let p = self.cargo_workspace.join(p).canonicalize().ok()?; p.strip_prefix(&workspace).ok().map(|x| x.to_path_buf()) } /// Creates the new snapshot from input values. pub fn new_snapshot(&self, contents: SnapshotContents, expr: &str) -> Snapshot { Snapshot::from_components( self.module_path.replace("::", "__"), self.snapshot_name.as_ref().map(|x| x.to_string()), Settings::with(|settings| MetaData { source: Some(path_to_storage(Path::new(self.assertion_file))), assertion_line: Some(self.assertion_line), description: settings.description().map(Into::into), expression: if settings.omit_expression() { None } else { Some(expr.to_string()) }, info: settings.info().map(ToOwned::to_owned), input_file: settings .input_file() .and_then(|x| self.localize_path(x)) .map(|x| path_to_storage(&x)), }), contents, ) } /// Cleanup logic for passing snapshots. pub fn cleanup_passing(&self) -> Result<(), Box> { // let's just make sure there are no more pending files lingering // around. if let Some(ref snapshot_file) = self.snapshot_file { let snapshot_file = snapshot_file.clone().with_extension("snap.new"); fs::remove_file(snapshot_file).ok(); } // and add a null pending snapshot to a pending snapshot file if needed if let Some(ref pending_snapshots) = self.pending_snapshots_path { if fs::metadata(pending_snapshots).is_ok() { PendingInlineSnapshot::new(None, None, self.assertion_line) .save(pending_snapshots)?; } } Ok(()) } /// Writes the changes of the snapshot back. pub fn update_snapshot( &self, new_snapshot: Snapshot, ) -> Result> { let unseen = self .snapshot_file .as_ref() .map_or(false, |x| fs::metadata(x).is_ok()); let should_print = self.tool_config.output_behavior() != OutputBehavior::Nothing; let snapshot_update = snapshot_update_behavior(&self.tool_config, unseen); match snapshot_update { SnapshotUpdateBehavior::InPlace => { if let Some(ref snapshot_file) = self.snapshot_file { let saved = new_snapshot.save(snapshot_file)?; if should_print && saved { elog!( "{} {}", if unseen { style("created previously unseen snapshot").green() } else { style("updated snapshot").green() }, style(snapshot_file.display()).cyan().underlined(), ); } } else if should_print { elog!( "{}", style( "error: cannot update inline snapshots in-place \ (https://github.com/mitsuhiko/insta/issues/272)" ) .red() .bold(), ); } } SnapshotUpdateBehavior::NewFile => { if let Some(ref snapshot_file) = self.snapshot_file { if let Some(new_path) = new_snapshot.save_new(snapshot_file)? { if should_print { elog!( "{} {}", style("stored new snapshot").green(), style(new_path.display()).cyan().underlined(), ); } } } else if self.is_doctest { if should_print { elog!( "{}", style("warning: cannot update inline snapshots in doctests") .red() .bold(), ); } // special case for pending inline snapshots. Here we really only want // to write the contents if the snapshot contents changed as the metadata // is not retained for inline snapshots. This used to have different // behavior in the past where we did indeed want to rewrite the snapshots // entirely since we used to change the canonical snapshot format, but now // this is significantly less likely to happen and seeing hundreds of unchanged // inline snapshots in the review screen is not a lot of fun. } else if self .old_snapshot .as_ref() .map_or(true, |x| x.contents() != new_snapshot.contents()) { PendingInlineSnapshot::new( Some(new_snapshot), self.old_snapshot.clone(), self.assertion_line, ) .save(self.pending_snapshots_path.as_ref().unwrap())?; } } SnapshotUpdateBehavior::NoUpdate => {} } Ok(snapshot_update) } } fn prevent_inline_duplicate(function_name: &str, assertion_file: &str, assertion_line: u32) { let key = format!("{}|{}|{}", function_name, assertion_file, assertion_line); let mut set = INLINE_DUPLICATES.lock().unwrap(); if set.contains(&key) { // drop the lock so we don't poison it drop(set); panic!( "Insta does not allow inline snapshot assertions in loops. \ Wrap your assertions in allow_duplicates! to change this." ); } set.insert(key); } /// This prints the information about the snapshot fn print_snapshot_info(ctx: &SnapshotAssertionContext, new_snapshot: &Snapshot) { let mut printer = SnapshotPrinter::new( ctx.cargo_workspace.as_path(), ctx.old_snapshot.as_ref(), new_snapshot, ); printer.set_line(Some(ctx.assertion_line)); printer.set_snapshot_file(ctx.snapshot_file.as_deref()); printer.set_title(Some("Snapshot Summary")); printer.set_show_info(true); match ctx.tool_config.output_behavior() { OutputBehavior::Summary => { printer.print(); } OutputBehavior::Diff => { printer.set_show_diff(true); printer.print(); } _ => {} } } #[cfg(feature = "glob")] macro_rules! print_or_panic { ($fail_fast:expr, $($tokens:tt)*) => {{ if (!$fail_fast) { eprintln!($($tokens)*); eprintln!(); } else { panic!($($tokens)*); } }} } /// Finalizes the assertion based on the update result. fn finalize_assertion(ctx: &SnapshotAssertionContext, update_result: SnapshotUpdateBehavior) { // if we are in glob mode, we want to adjust the finalization // so that we do not show the hints immediately. let fail_fast = { #[cfg(feature = "glob")] { if let Some(top) = crate::glob::GLOB_STACK.lock().unwrap().last() { top.fail_fast } else { true } } #[cfg(not(feature = "glob"))] { true } }; if fail_fast && update_result == SnapshotUpdateBehavior::NewFile && ctx.tool_config.output_behavior() != OutputBehavior::Nothing && !ctx.is_doctest { println!( "{hint}", hint = style("To update snapshots run `cargo insta review`").dim(), ); } if update_result != SnapshotUpdateBehavior::InPlace && !ctx.tool_config.force_pass() { if fail_fast && ctx.tool_config.output_behavior() != OutputBehavior::Nothing { println!( "{hint}", hint = style( "Stopped on the first failure. Run `cargo insta test` to run all snapshots." ) .dim(), ); } // if we are in glob mode, count the failures and print the // errors instead of panicking. The glob will then panic at // the end. #[cfg(feature = "glob")] { let mut stack = crate::glob::GLOB_STACK.lock().unwrap(); if let Some(glob_collector) = stack.last_mut() { glob_collector.failed += 1; if update_result == SnapshotUpdateBehavior::NewFile && ctx.tool_config.output_behavior() != OutputBehavior::Nothing { glob_collector.show_insta_hint = true; } print_or_panic!( fail_fast, "snapshot assertion from glob for '{}' failed in line {}", ctx.snapshot_name.as_deref().unwrap_or("unnamed snapshot"), ctx.assertion_line ); return; } } panic!( "snapshot assertion for '{}' failed in line {}", ctx.snapshot_name.as_deref().unwrap_or("unnamed snapshot"), ctx.assertion_line ); } } fn record_snapshot_duplicate( results: &mut BTreeMap, snapshot: &Snapshot, ctx: &SnapshotAssertionContext, ) { let key = ctx.duplication_key.as_deref().unwrap(); if let Some(prev_snapshot) = results.get(key) { if prev_snapshot.contents() != snapshot.contents() { println!("Snapshots in allow-duplicates block do not match."); let mut printer = SnapshotPrinter::new(ctx.cargo_workspace.as_path(), Some(prev_snapshot), snapshot); printer.set_line(Some(ctx.assertion_line)); printer.set_snapshot_file(ctx.snapshot_file.as_deref()); printer.set_title(Some("Differences in Block")); printer.set_snapshot_hints("previous assertion", "current assertion"); if ctx.tool_config.output_behavior() == OutputBehavior::Diff { printer.set_show_diff(true); } printer.print(); panic!( "snapshot assertion for '{}' failed in line {}. Result \ does not match previous snapshot in allow-duplicates block.", ctx.snapshot_name.as_deref().unwrap_or("unnamed snapshot"), ctx.assertion_line ); } } else { results.insert(key.to_string(), snapshot.clone()); } } /// Do we allow recording of duplicates? fn allow_duplicates() -> bool { RECORDED_DUPLICATES.with(|x| !x.borrow().is_empty()) } /// Helper function to support perfect duplicate detection. pub fn with_allow_duplicates(f: F) -> R where F: FnOnce() -> R, { RECORDED_DUPLICATES.with(|x| x.borrow_mut().push(BTreeMap::new())); let rv = std::panic::catch_unwind(std::panic::AssertUnwindSafe(f)); RECORDED_DUPLICATES.with(|x| x.borrow_mut().pop().unwrap()); match rv { Ok(rv) => rv, Err(payload) => std::panic::resume_unwind(payload), } } /// This function is invoked from the macros to run the main assertion logic. /// /// This will create the assertion context, run the main logic to assert /// on snapshots and write changes to the pending snapshot files. It will /// also print the necessary bits of information to the output and fail the /// assertion with a panic if needed. #[allow(clippy::too_many_arguments)] pub fn assert_snapshot( refval: ReferenceValue<'_>, new_snapshot_value: &str, manifest_dir: &str, function_name: &str, module_path: &str, assertion_file: &str, assertion_line: u32, expr: &str, ) -> Result<(), Box> { let ctx = SnapshotAssertionContext::prepare( refval, manifest_dir, function_name, module_path, assertion_file, assertion_line, )?; let tool_config = get_tool_config(manifest_dir); // apply filters if they are available #[cfg(feature = "filters")] let new_snapshot_value = Settings::with(|settings| settings.filters().apply_to(new_snapshot_value)); let new_snapshot = ctx.new_snapshot(new_snapshot_value.into(), expr); // memoize the snapshot file if requested. if let Some(ref snapshot_file) = ctx.snapshot_file { memoize_snapshot_file(snapshot_file); } // If we allow assertion with duplicates, we record the duplicate now. This will // in itself fail the assertion if the previous visit of the same assertion macro // did not yield the same result. RECORDED_DUPLICATES.with(|x| { if let Some(results) = x.borrow_mut().last_mut() { record_snapshot_duplicate(results, &new_snapshot, &ctx); } }); let pass = ctx .old_snapshot .as_ref() .map(|x| { if tool_config.require_full_match() { x.matches_fully(&new_snapshot) } else { x.matches(&new_snapshot) } }) .unwrap_or(false); if pass { ctx.cleanup_passing()?; if tool_config.force_update_snapshots() { ctx.update_snapshot(new_snapshot)?; } // otherwise print information and update snapshots. } else { print_snapshot_info(&ctx, &new_snapshot); let update_result = ctx.update_snapshot(new_snapshot)?; finalize_assertion(&ctx, update_result); } Ok(()) } /// Test snapshots in doctests. /// /// ``` /// // this is only working on newer rust versions /// extern crate rustc_version; /// use rustc_version::{Version, version}; /// if version().unwrap() > Version::parse("1.72.0").unwrap() { /// insta::assert_debug_snapshot!("named", vec![1, 2, 3, 4, 5]); /// } /// ``` /// /// ```should_panic /// insta::assert_debug_snapshot!(vec![1, 2, 3, 4, 5]); /// ``` /// /// ``` /// let some_string = "Coucou je suis un joli bug"; /// insta::assert_snapshot!(some_string, @"Coucou je suis un joli bug"); /// ``` /// /// ``` /// let some_string = "Coucou je suis un joli bug"; /// insta::assert_snapshot!(some_string, @"Coucou je suis un joli bug"); /// ``` const _DOCTEST1: bool = false; insta-1.39.0/src/select_grammar.pest000064400000000000000000000012031046102023000155010ustar 00000000000000WHITESPACE = _{ WHITE_SPACE } ident = @{ ( "_" | "$" | XID_START ) ~ XID_CONTINUE* } deep_wildcard = { "." ~ "**" } wildcard = { "." ~ "*" } key = @{ "." ~ ident } int = { "-"? ~ NUMBER+ } string = @{ "\"" ~ (!("\"") ~ ANY)* ~ "\""} subscript = { "[" ~ ( string | int ) ~ "]" } full_range = { "[" ~ "]" } range = { "[" ~ int ~ ":" ~ int ~ "]" } range_to = { "[" ~ ":" ~ int ~ "]" } range_from = { "[" ~ int ~ ":]" } segment = _{ deep_wildcard | wildcard | key | subscript | full_range | range | range_to | range_from } identity = { "." } selector = { (segment+ | identity) } selectors = { SOI ~ selector ~ ("," ~ selector)* ~ ","? ~ EOI } insta-1.39.0/src/serialization.rs000064400000000000000000000123451046102023000150530ustar 00000000000000use serde::de::value::Error as ValueError; use serde::Serialize; use crate::content::{json, yaml, Content, ContentSerializer}; use crate::settings::Settings; pub enum SerializationFormat { #[cfg(feature = "csv")] Csv, #[cfg(feature = "ron")] Ron, #[cfg(feature = "toml")] Toml, Yaml, Json, JsonCompact, } #[derive(Debug)] pub enum SnapshotLocation { Inline, File, } pub fn serialize_content( mut content: Content, format: SerializationFormat, location: SnapshotLocation, ) -> String { content = Settings::with(|settings| { if settings.sort_maps() { content.sort_maps(); } #[cfg(feature = "redactions")] { for (selector, redaction) in settings.iter_redactions() { content = selector.redact(content, redaction); } } content }); match format { SerializationFormat::Yaml => { let serialized = yaml::to_string(&content); match location { SnapshotLocation::Inline => serialized, SnapshotLocation::File => serialized[4..].to_string(), } } SerializationFormat::Json => json::to_string_pretty(&content), SerializationFormat::JsonCompact => json::to_string_compact(&content), #[cfg(feature = "csv")] SerializationFormat::Csv => { let mut buf = Vec::with_capacity(128); { let mut writer = dep_csv::Writer::from_writer(&mut buf); // if the top-level content we're serializing is a vector we // want to serialize it multiple times once for each item. if let Some(content_slice) = content.as_slice() { for content in content_slice { writer.serialize(content).unwrap(); } } else { writer.serialize(&content).unwrap(); } writer.flush().unwrap(); } if buf.ends_with(b"\n") { buf.truncate(buf.len() - 1); } String::from_utf8(buf).unwrap() } #[cfg(feature = "ron")] SerializationFormat::Ron => { let mut buf = Vec::new(); let mut config = dep_ron::ser::PrettyConfig::new(); config.new_line = "\n".to_string(); config.indentor = " ".to_string(); config.struct_names = true; let mut serializer = dep_ron::ser::Serializer::with_options( &mut buf, Some(config), dep_ron::options::Options::default(), ) .unwrap(); content.serialize(&mut serializer).unwrap(); String::from_utf8(buf).unwrap() } #[cfg(feature = "toml")] SerializationFormat::Toml => { let mut rv = dep_toml::to_string_pretty(&content).unwrap(); if rv.ends_with('\n') { rv.truncate(rv.len() - 1); } rv } } } pub fn serialize_value( s: &S, format: SerializationFormat, location: SnapshotLocation, ) -> String { let serializer = ContentSerializer::::new(); let content = Serialize::serialize(s, serializer).unwrap(); serialize_content(content, format, location) } #[cfg(feature = "redactions")] pub fn serialize_value_redacted( s: &S, redactions: &[(crate::redaction::Selector, crate::redaction::Redaction)], format: SerializationFormat, location: SnapshotLocation, ) -> String { let serializer = ContentSerializer::::new(); let mut content = Serialize::serialize(s, serializer).unwrap(); for (selector, redaction) in redactions { content = selector.redact(content, redaction); } serialize_content(content, format, location) } #[test] fn test_yaml_serialization() { let yaml = serialize_content( Content::Map(vec![ ( Content::from("env"), Content::Seq(vec![ Content::from("ENVIRONMENT"), Content::from("production"), ]), ), ( Content::from("cmdline"), Content::Seq(vec![Content::from("my-tool"), Content::from("run")]), ), ]), SerializationFormat::Yaml, SnapshotLocation::File, ); crate::assert_snapshot!(&yaml, @r###" env: - ENVIRONMENT - production cmdline: - my-tool - run "###); let inline_yaml = serialize_content( Content::Map(vec![ ( Content::from("env"), Content::Seq(vec![ Content::from("ENVIRONMENT"), Content::from("production"), ]), ), ( Content::from("cmdline"), Content::Seq(vec![Content::from("my-tool"), Content::from("run")]), ), ]), SerializationFormat::Yaml, SnapshotLocation::Inline, ); crate::assert_snapshot!(&inline_yaml, @r###" --- env: - ENVIRONMENT - production cmdline: - my-tool - run "###); } insta-1.39.0/src/settings.rs000064400000000000000000000471061046102023000140410ustar 00000000000000use std::cell::RefCell; use std::future::Future; use std::mem; use std::path::{Path, PathBuf}; use std::pin::Pin; use std::sync::Arc; use std::task::{Context, Poll}; #[cfg(feature = "serde")] use serde::{de::value::Error as ValueError, Serialize}; use crate::content::Content; #[cfg(feature = "serde")] use crate::content::ContentSerializer; #[cfg(feature = "filters")] use crate::filters::Filters; #[cfg(feature = "redactions")] use crate::redaction::{dynamic_redaction, sorted_redaction, ContentPath, Redaction, Selector}; lazy_static::lazy_static! { static ref DEFAULT_SETTINGS: Arc = { Arc::new(ActualSettings { sort_maps: false, snapshot_path: "snapshots".into(), snapshot_suffix: "".into(), input_file: None, description: None, info: None, omit_expression: false, prepend_module_to_snapshot: true, #[cfg(feature = "redactions")] redactions: Redactions::default(), #[cfg(feature = "filters")] filters: Filters::default(), #[cfg(feature = "glob")] allow_empty_glob: false, }) }; } thread_local!(static CURRENT_SETTINGS: RefCell = RefCell::new(Settings::new())); /// Represents stored redactions. #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] #[derive(Clone, Default)] pub struct Redactions(Vec<(Selector<'static>, Arc)>); #[cfg(feature = "redactions")] impl<'a> From> for Redactions { fn from(value: Vec<(&'a str, Redaction)>) -> Redactions { Redactions( value .into_iter() .map(|x| (Selector::parse(x.0).unwrap().make_static(), Arc::new(x.1))) .collect(), ) } } #[derive(Clone)] #[doc(hidden)] pub struct ActualSettings { pub sort_maps: bool, pub snapshot_path: PathBuf, pub snapshot_suffix: String, pub input_file: Option, pub description: Option, pub info: Option, pub omit_expression: bool, pub prepend_module_to_snapshot: bool, #[cfg(feature = "redactions")] pub redactions: Redactions, #[cfg(feature = "filters")] pub filters: Filters, #[cfg(feature = "glob")] pub allow_empty_glob: bool, } impl ActualSettings { pub fn sort_maps(&mut self, value: bool) { self.sort_maps = value; } pub fn snapshot_path>(&mut self, path: P) { self.snapshot_path = path.as_ref().to_path_buf(); } pub fn snapshot_suffix>(&mut self, suffix: I) { self.snapshot_suffix = suffix.into(); } pub fn input_file>(&mut self, p: P) { self.input_file = Some(p.as_ref().to_path_buf()); } pub fn description>(&mut self, value: S) { self.description = Some(value.into()); } #[cfg(feature = "serde")] pub fn info(&mut self, s: &S) { let serializer = ContentSerializer::::new(); let content = Serialize::serialize(s, serializer).unwrap(); self.info = Some(content); } pub fn raw_info(&mut self, content: &Content) { self.info = Some(content.to_owned()); } pub fn omit_expression(&mut self, value: bool) { self.omit_expression = value; } pub fn prepend_module_to_snapshot(&mut self, value: bool) { self.prepend_module_to_snapshot = value; } #[cfg(feature = "redactions")] pub fn redactions>(&mut self, r: R) { self.redactions = r.into(); } #[cfg(feature = "filters")] pub fn filters>(&mut self, f: F) { self.filters = f.into(); } #[cfg(feature = "glob")] pub fn allow_empty_glob(&mut self, value: bool) { self.allow_empty_glob = value; } } /// Configures how insta operates at test time. /// /// Settings are always bound to a thread and some default settings are always /// available. These settings can be changed and influence how insta behaves on /// that thread. They can either temporarily or permanently changed. /// /// This can be used to influence how the snapshot macros operate. /// For instance it can be useful to force ordering of maps when /// unordered structures are used through settings. /// /// Some of the settings can be changed but shouldn't as it will make it harder /// for tools like cargo-insta or an editor integration to locate the snapshot /// files. /// /// Settings can also be configured with the [`with_settings!`] macro. /// /// Example: /// /// ```ignore /// use insta; /// /// let mut settings = insta::Settings::clone_current(); /// settings.set_sort_maps(true); /// settings.bind(|| { /// // runs the assertion with the changed settings enabled /// insta::assert_snapshot!(...); /// }); /// ``` #[derive(Clone)] pub struct Settings { inner: Arc, } impl Default for Settings { fn default() -> Settings { Settings { inner: DEFAULT_SETTINGS.clone(), } } } impl Settings { /// Returns the default settings. /// /// It's recommended to use `clone_current` instead so that /// already applied modifications are not discarded. pub fn new() -> Settings { Settings::default() } /// Returns a copy of the current settings. pub fn clone_current() -> Settings { Settings::with(|x| x.clone()) } /// Internal helper for macros #[doc(hidden)] pub fn _private_inner_mut(&mut self) -> &mut ActualSettings { Arc::make_mut(&mut self.inner) } /// Enables forceful sorting of maps before serialization. /// /// Note that this only applies to snapshots that undergo serialization /// (eg: does not work for `assert_debug_snapshot!`.) /// /// The default value is `false`. pub fn set_sort_maps(&mut self, value: bool) { self._private_inner_mut().sort_maps = value; } /// Returns the current value for map sorting. pub fn sort_maps(&self) -> bool { self.inner.sort_maps } /// Disables prepending of modules to the snapshot filename. /// /// By default the filename of a snapshot is `__.snap`. /// Setting this flag to `false` changes the snapshot filename to just /// `.snap`. /// /// The default value is `true`. pub fn set_prepend_module_to_snapshot(&mut self, value: bool) { self._private_inner_mut().prepend_module_to_snapshot(value); } /// Returns the current value for module name prepending. pub fn prepend_module_to_snapshot(&self) -> bool { self.inner.prepend_module_to_snapshot } /// Allows the [`glob!`] macro to succeed if it matches no files. /// /// By default the glob macro will fail the test if it does not find /// any files to prevent accidental typos. This can be disabled when /// fixtures should be conditional. /// /// The default value is `false`. #[cfg(feature = "glob")] pub fn set_allow_empty_glob(&mut self, value: bool) { self._private_inner_mut().allow_empty_glob(value); } /// Returns the current value for the empty glob setting. #[cfg(feature = "glob")] pub fn allow_empty_glob(&self) -> bool { self.inner.allow_empty_glob } /// Sets the snapshot suffix. /// /// The snapshot suffix is added to all snapshot names with an `@` sign /// between. For instance if the snapshot suffix is set to `"foo"` and /// the snapshot would be named `"snapshot"` it turns into `"snapshot@foo"`. /// This is useful to separate snapshots if you want to use test /// parameterization. pub fn set_snapshot_suffix>(&mut self, suffix: I) { self._private_inner_mut().snapshot_suffix(suffix); } /// Removes the snapshot suffix. pub fn remove_snapshot_suffix(&mut self) { self.set_snapshot_suffix(""); } /// Returns the current snapshot suffix. pub fn snapshot_suffix(&self) -> Option<&str> { if self.inner.snapshot_suffix.is_empty() { None } else { Some(&self.inner.snapshot_suffix) } } /// Sets the input file reference. /// /// This value is completely unused by the snapshot testing system but /// it lets you store some meta data with a snapshot that refers you back /// to the input file. The path stored here is made relative to the /// workspace root before storing with the snapshot. pub fn set_input_file>(&mut self, p: P) { self._private_inner_mut().input_file(p); } /// Removes the input file reference. pub fn remove_input_file(&mut self) { self._private_inner_mut().input_file = None; } /// Returns the current input file reference. pub fn input_file(&self) -> Option<&Path> { self.inner.input_file.as_deref() } /// Sets the description. /// /// The description is stored alongside the snapshot and will be displayed /// in the diff UI. When a snapshot is captured the Rust expression for that /// snapshot is always retained. However sometimes that information is not /// super useful by itself, particularly when working with loops and generated /// tests. In that case the `description` can be set as extra information. /// /// See also [`set_info`](Self::set_info). pub fn set_description>(&mut self, value: S) { self._private_inner_mut().description(value); } /// Removes the description. pub fn remove_description(&mut self) { self._private_inner_mut().description = None; } /// Returns the current description pub fn description(&self) -> Option<&str> { self.inner.description.as_deref() } /// Sets the info. /// /// The `info` is similar to `description` but for structured data. This is /// stored with the snapshot and shown in the review UI. This for instance /// can be used to show extended information that can make a reviewer better /// understand what the snapshot is supposed to be testing. /// /// As an example the input parameters to the function that creates the snapshot /// can be persisted here. /// /// Alternatively you can use [`set_raw_info`](Self::set_raw_info) instead. #[cfg(feature = "serde")] #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] pub fn set_info(&mut self, s: &S) { self._private_inner_mut().info(s); } /// Sets the info from a content object. /// /// This works like [`set_info`](Self::set_info) but does not require `serde`. pub fn set_raw_info(&mut self, content: &Content) { self._private_inner_mut().raw_info(content); } /// Removes the info. pub fn remove_info(&mut self) { self._private_inner_mut().info = None; } /// Returns the current info pub(crate) fn info(&self) -> Option<&Content> { self.inner.info.as_ref() } /// Returns the current info pub fn has_info(&self) -> bool { self.inner.info.is_some() } /// If set to true, does not retain the expression in the snapshot. pub fn set_omit_expression(&mut self, value: bool) { self._private_inner_mut().omit_expression(value); } /// Returns true if expressions are omitted from snapshots. pub fn omit_expression(&self) -> bool { self.inner.omit_expression } /// Registers redactions that should be applied. /// /// This can be useful if redactions must be shared across multiple /// snapshots. /// /// Note that this only applies to snapshots that undergo serialization /// (eg: does not work for `assert_debug_snapshot!`.) #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn add_redaction>(&mut self, selector: &str, replacement: R) { self.add_redaction_impl(selector, replacement.into()) } #[cfg(feature = "redactions")] fn add_redaction_impl(&mut self, selector: &str, replacement: Redaction) { self._private_inner_mut().redactions.0.push(( Selector::parse(selector).unwrap().make_static(), Arc::new(replacement), )); } /// Registers a replacement callback. /// /// This works similar to a redaction but instead of changing the value it /// asserts the value at a certain place. This function is internally /// supposed to call things like `assert_eq!`. /// /// This is a shortcut to `add_redaction(selector, dynamic_redaction(...))`; #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn add_dynamic_redaction(&mut self, selector: &str, func: F) where I: Into, F: Fn(Content, ContentPath<'_>) -> I + Send + Sync + 'static, { self.add_redaction(selector, dynamic_redaction(func)); } /// A special redaction that sorts a sequence or map. /// /// This is a shortcut to `add_redaction(selector, sorted_redaction())`. #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn sort_selector(&mut self, selector: &str) { self.add_redaction(selector, sorted_redaction()); } /// Replaces the currently set redactions. /// /// The default set is empty. #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn set_redactions>(&mut self, redactions: R) { self._private_inner_mut().redactions(redactions); } /// Removes all redactions. #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub fn clear_redactions(&mut self) { self._private_inner_mut().redactions.0.clear(); } /// Iterate over the redactions. #[cfg(feature = "redactions")] #[cfg_attr(docsrs, doc(cfg(feature = "redactions")))] pub(crate) fn iter_redactions(&self) -> impl Iterator { self.inner.redactions.0.iter().map(|(a, b)| (a, &**b)) } /// Adds a new filter. /// /// Filters are similar to redactions but are applied as regex onto the final snapshot /// value. This can be used to perform modifications to the snapshot string that would /// be impossible to do with redactions because for instance the value is just a string. /// /// The first argument is the [`regex`] pattern to apply, the second is a replacement /// string. The replacement string has the same functionality as the second argument /// to [`Regex::replace`](regex::Regex::replace). /// /// This is useful to perform some cleanup procedures on the snapshot for unstable values. /// /// ```rust /// # use insta::Settings; /// # async fn foo() { /// # let mut settings = Settings::new(); /// settings.add_filter(r"\b[[:xdigit:]]{32}\b", "[UID]"); /// # } /// ``` #[cfg(feature = "filters")] #[cfg_attr(docsrs, doc(cfg(feature = "filters")))] pub fn add_filter>(&mut self, regex: &str, replacement: S) { self._private_inner_mut().filters.add(regex, replacement); } /// Replaces the currently set filters. /// /// The default set is empty. #[cfg(feature = "filters")] #[cfg_attr(docsrs, doc(cfg(feature = "filters")))] pub fn set_filters>(&mut self, filters: F) { self._private_inner_mut().filters(filters); } /// Removes all filters. #[cfg(feature = "filters")] #[cfg_attr(docsrs, doc(cfg(feature = "filters")))] pub fn clear_filters(&mut self) { self._private_inner_mut().filters.clear(); } /// Returns the current filters #[cfg(feature = "filters")] #[cfg_attr(docsrs, doc(cfg(feature = "filters")))] pub(crate) fn filters(&self) -> &Filters { &self.inner.filters } /// Sets the snapshot path. /// /// If not absolute it's relative to where the test is in. /// /// Defaults to `snapshots`. pub fn set_snapshot_path>(&mut self, path: P) { self._private_inner_mut().snapshot_path(path); } /// Returns the snapshot path. pub fn snapshot_path(&self) -> &Path { &self.inner.snapshot_path } /// Runs a function with the current settings bound to the thread. /// /// This is an alternative to [`bind_to_scope`](Settings::bind_to_scope) /// which does not require holding on to a drop guard. The return value /// of the closure is passed through. /// /// ``` /// # use insta::Settings; /// let mut settings = Settings::clone_current(); /// settings.set_sort_maps(true); /// settings.bind(|| { /// // do stuff here /// }); /// ``` pub fn bind R, R>(&self, f: F) -> R { let _guard = self.bind_to_scope(); f() } /// Like `bind` but for futures. /// /// This lets you bind settings for the duration of a future like this: /// /// ```rust /// # use insta::Settings; /// # async fn foo() { /// let settings = Settings::new(); /// settings.bind_async(async { /// // do assertions here /// }).await; /// # } /// ``` pub fn bind_async, T>(&self, future: F) -> impl Future { struct BindingFuture(Arc, F); impl Future for BindingFuture { type Output = F::Output; fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { let inner = self.0.clone(); let future = unsafe { self.map_unchecked_mut(|s| &mut s.1) }; CURRENT_SETTINGS.with(|x| { let old = { let mut current = x.borrow_mut(); let old = current.inner.clone(); current.inner = inner; old }; let rv = future.poll(cx); let mut current = x.borrow_mut(); current.inner = old; rv }) } } BindingFuture(self.inner.clone(), future) } /// Binds the settings to the current thread and resets when the drop /// guard is released. /// /// This is the recommended way to temporarily bind settings and replaces /// the earlier [`bind_to_scope`](Settings::bind_to_scope) and relies on /// drop guards. An alternative is [`bind`](Settings::bind) which binds /// for the duration of the block it wraps. /// /// ``` /// # use insta::Settings; /// let mut settings = Settings::clone_current(); /// settings.set_sort_maps(true); /// let _guard = settings.bind_to_scope(); /// // do stuff here /// ``` pub fn bind_to_scope(&self) -> SettingsBindDropGuard { CURRENT_SETTINGS.with(|x| { let mut x = x.borrow_mut(); let old = mem::replace(&mut x.inner, self.inner.clone()); SettingsBindDropGuard(Some(old)) }) } /// Runs a function with the current settings. pub(crate) fn with R>(f: F) -> R { CURRENT_SETTINGS.with(|x| f(&x.borrow())) } } /// Returned from [`bind_to_scope`](Settings::bind_to_scope) #[must_use = "The guard is immediately dropped so binding has no effect. Use `let _guard = ...` to bind it."] pub struct SettingsBindDropGuard(Option>); impl Drop for SettingsBindDropGuard { fn drop(&mut self) { CURRENT_SETTINGS.with(|x| { x.borrow_mut().inner = self.0.take().unwrap(); }) } } insta-1.39.0/src/snapshot.rs000064400000000000000000000646421046102023000140440ustar 00000000000000use std::borrow::Cow; use std::env; use std::error::Error; use std::fs; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; use std::time::{SystemTime, UNIX_EPOCH}; use crate::content::{self, json, yaml, Content}; lazy_static::lazy_static! { static ref RUN_ID: String = { if let Ok(run_id) = env::var("NEXTEST_RUN_ID") { run_id } else { let d = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); format!("{}-{}", d.as_secs(), d.subsec_nanos()) } }; } #[derive(Debug)] pub struct PendingInlineSnapshot { pub run_id: String, pub line: u32, pub new: Option, pub old: Option, } impl PendingInlineSnapshot { pub fn new(new: Option, old: Option, line: u32) -> PendingInlineSnapshot { PendingInlineSnapshot { new, old, line, run_id: RUN_ID.clone(), } } #[cfg(feature = "_cargo_insta_internal")] pub fn load_batch(p: &Path) -> Result, Box> { let contents = fs::read_to_string(p).map_err(|e| content::Error::FileIo(e, p.to_path_buf()))?; let mut rv: Vec = contents .lines() .map(|line| { let value = yaml::parse_str(line, p)?; Self::from_content(value) }) .collect::>>()?; // remove all but the last run if let Some(last_run_id) = rv.last().map(|x| x.run_id.clone()) { rv.retain(|x| x.run_id == last_run_id); } Ok(rv) } #[cfg(feature = "_cargo_insta_internal")] pub fn save_batch(p: &Path, batch: &[PendingInlineSnapshot]) -> Result<(), Box> { fs::remove_file(p).ok(); for snap in batch { snap.save(p)?; } Ok(()) } pub fn save(&self, p: &Path) -> Result<(), Box> { let mut f = fs::OpenOptions::new().create(true).append(true).open(p)?; let mut s = json::to_string(&self.as_content()); s.push('\n'); f.write_all(s.as_bytes())?; Ok(()) } #[cfg(feature = "_cargo_insta_internal")] fn from_content(content: Content) -> Result> { if let Content::Map(map) = content { let mut run_id = None; let mut line = None; let mut old = None; let mut new = None; for (key, value) in map.into_iter() { match key.as_str() { Some("run_id") => run_id = value.as_str().map(|x| x.to_string()), Some("line") => line = value.as_u64().map(|x| x as u32), Some("old") if !value.is_nil() => old = Some(Snapshot::from_content(value)?), Some("new") if !value.is_nil() => new = Some(Snapshot::from_content(value)?), _ => {} } } Ok(PendingInlineSnapshot { run_id: run_id.ok_or(content::Error::MissingField)?, line: line.ok_or(content::Error::MissingField)?, new, old, }) } else { Err(content::Error::UnexpectedDataType.into()) } } fn as_content(&self) -> Content { let fields = vec![ ("run_id", Content::from(self.run_id.as_str())), ("line", Content::from(self.line)), ( "new", match &self.new { Some(snap) => snap.as_content(), None => Content::None, }, ), ( "old", match &self.old { Some(snap) => snap.as_content(), None => Content::None, }, ), ]; Content::Struct("PendingInlineSnapshot", fields) } } /// Snapshot metadata information. #[derive(Debug, Default, Clone, PartialEq)] pub struct MetaData { /// The source file (relative to workspace root). pub(crate) source: Option, /// The source line, if available. This is used by pending snapshots, but trimmed /// before writing to the final `.snap` files in [`MetaData::trim_for_persistence`]. pub(crate) assertion_line: Option, /// Optional human readable (non formatted) snapshot description. pub(crate) description: Option, /// Optionally the expression that created the snapshot. pub(crate) expression: Option, /// An optional arbitrary structured info object. pub(crate) info: Option, /// Reference to the input file. pub(crate) input_file: Option, } impl MetaData { /// Returns the absolute source path. pub fn source(&self) -> Option<&str> { self.source.as_deref() } /// Returns the assertion line. pub fn assertion_line(&self) -> Option { self.assertion_line } /// Returns the expression that created the snapshot. pub fn expression(&self) -> Option<&str> { self.expression.as_deref() } /// Returns the description that created the snapshot. pub fn description(&self) -> Option<&str> { self.description.as_deref().filter(|x| !x.is_empty()) } /// Returns the embedded info. #[doc(hidden)] pub fn private_info(&self) -> Option<&Content> { self.info.as_ref() } /// Returns the relative source path. pub fn get_relative_source(&self, base: &Path) -> Option { self.source.as_ref().map(|source| { base.join(source) .canonicalize() .ok() .and_then(|s| s.strip_prefix(base).ok().map(|x| x.to_path_buf())) .unwrap_or_else(|| base.to_path_buf()) }) } /// Returns the input file reference. pub fn input_file(&self) -> Option<&str> { self.input_file.as_deref() } fn from_content(content: Content) -> Result> { if let Content::Map(map) = content { let mut source = None; let mut assertion_line = None; let mut description = None; let mut expression = None; let mut info = None; let mut input_file = None; for (key, value) in map.into_iter() { match key.as_str() { Some("source") => source = value.as_str().map(|x| x.to_string()), Some("assertion_line") => assertion_line = value.as_u64().map(|x| x as u32), Some("description") => description = value.as_str().map(Into::into), Some("expression") => expression = value.as_str().map(Into::into), Some("info") if !value.is_nil() => info = Some(value), Some("input_file") => input_file = value.as_str().map(Into::into), _ => {} } } Ok(MetaData { source, assertion_line, description, expression, info, input_file, }) } else { Err(content::Error::UnexpectedDataType.into()) } } fn as_content(&self) -> Content { let mut fields = Vec::new(); if let Some(source) = self.source.as_deref() { fields.push(("source", Content::from(source))); } if let Some(line) = self.assertion_line { fields.push(("assertion_line", Content::from(line))); } if let Some(description) = self.description.as_deref() { fields.push(("description", Content::from(description))); } if let Some(expression) = self.expression.as_deref() { fields.push(("expression", Content::from(expression))); } if let Some(info) = &self.info { fields.push(("info", info.to_owned())); } if let Some(input_file) = self.input_file.as_deref() { fields.push(("input_file", Content::from(input_file))); } Content::Struct("MetaData", fields) } /// Trims the metadata of fields that we don't save to `.snap` files; we /// only use for display while reviewing fn trim_for_persistence(&self) -> Cow<'_, MetaData> { if self.assertion_line.is_some() { let mut rv = self.clone(); rv.assertion_line = None; Cow::Owned(rv) } else { Cow::Borrowed(self) } } } /// A helper to work with stored snapshots. #[derive(Debug, Clone)] pub struct Snapshot { module_name: String, snapshot_name: Option, metadata: MetaData, snapshot: SnapshotContents, } impl Snapshot { /// Loads a snapshot from a file. pub fn from_file(p: &Path) -> Result> { let mut f = BufReader::new(fs::File::open(p)?); let mut buf = String::new(); f.read_line(&mut buf)?; // yaml format let metadata = if buf.trim_end() == "---" { loop { let read = f.read_line(&mut buf)?; if read == 0 { break; } if buf[buf.len() - read..].trim_end() == "---" { buf.truncate(buf.len() - read); break; } } let content = yaml::parse_str(&buf, p)?; MetaData::from_content(content)? // legacy format } else { let mut rv = MetaData::default(); loop { buf.clear(); let read = f.read_line(&mut buf)?; if read == 0 || buf.trim_end().is_empty() { buf.truncate(buf.len() - read); break; } let mut iter = buf.splitn(2, ':'); if let Some(key) = iter.next() { if let Some(value) = iter.next() { let value = value.trim(); match key.to_lowercase().as_str() { "expression" => rv.expression = Some(value.to_string()), "source" => rv.source = Some(value.into()), _ => {} } } } } rv }; buf.clear(); for (idx, line) in f.lines().enumerate() { let line = line?; if idx > 0 { buf.push('\n'); } buf.push_str(&line); } let (snapshot_name, module_name) = names_of_path(p); Ok(Snapshot::from_components( module_name, Some(snapshot_name), metadata, buf.into(), )) } /// Creates an empty snapshot. pub(crate) fn from_components( module_name: String, snapshot_name: Option, metadata: MetaData, snapshot: SnapshotContents, ) -> Snapshot { Snapshot { module_name, snapshot_name, metadata, snapshot, } } #[cfg(feature = "_cargo_insta_internal")] fn from_content(content: Content) -> Result> { if let Content::Map(map) = content { let mut module_name = None; let mut snapshot_name = None; let mut metadata = None; let mut snapshot = None; for (key, value) in map.into_iter() { match key.as_str() { Some("module_name") => module_name = value.as_str().map(|x| x.to_string()), Some("snapshot_name") => snapshot_name = value.as_str().map(|x| x.to_string()), Some("metadata") => metadata = Some(MetaData::from_content(value)?), Some("snapshot") => { snapshot = Some(SnapshotContents( value .as_str() .ok_or(content::Error::UnexpectedDataType)? .to_string(), )) } _ => {} } } Ok(Snapshot { module_name: module_name.ok_or(content::Error::MissingField)?, snapshot_name, metadata: metadata.ok_or(content::Error::MissingField)?, snapshot: snapshot.ok_or(content::Error::MissingField)?, }) } else { Err(content::Error::UnexpectedDataType.into()) } } fn as_content(&self) -> Content { let mut fields = vec![("module_name", Content::from(self.module_name.as_str()))]; if let Some(name) = self.snapshot_name.as_deref() { fields.push(("snapshot_name", Content::from(name))); } fields.push(("metadata", self.metadata.as_content())); fields.push(("snapshot", Content::from(self.snapshot.0.as_str()))); Content::Struct("Content", fields) } /// Returns the module name. pub fn module_name(&self) -> &str { &self.module_name } /// Returns the snapshot name. pub fn snapshot_name(&self) -> Option<&str> { self.snapshot_name.as_deref() } /// The metadata in the snapshot. pub fn metadata(&self) -> &MetaData { &self.metadata } /// The snapshot contents pub fn contents(&self) -> &SnapshotContents { &self.snapshot } /// Snapshot contents match another snapshot's. pub fn matches(&self, other: &Snapshot) -> bool { self.contents() == other.contents() } /// Snapshot contents _and_ metadata match another snapshot's. pub fn matches_fully(&self, other: &Snapshot) -> bool { self.matches(other) && self.metadata.trim_for_persistence() == other.metadata.trim_for_persistence() } /// The snapshot contents as a &str pub fn contents_str(&self) -> &str { self.snapshot.as_str() } fn serialize_snapshot(&self, md: &MetaData) -> String { let mut buf = yaml::to_string(&md.as_content()); buf.push_str("---\n"); buf.push_str(self.contents_str()); buf.push('\n'); buf } fn save_with_metadata( &self, path: &Path, ref_file: Option<&Path>, md: &MetaData, ) -> Result> { if let Some(folder) = path.parent() { fs::create_dir_all(folder)?; } let serialized_snapshot = self.serialize_snapshot(md); // check the reference file for contents. Note that we always want to // compare snapshots that were trimmed to persistence here. if let Ok(old) = fs::read_to_string(ref_file.unwrap_or(path)) { let persisted = match md.trim_for_persistence() { Cow::Owned(trimmed) => Cow::Owned(self.serialize_snapshot(&trimmed)), Cow::Borrowed(_) => Cow::Borrowed(&serialized_snapshot), }; if old == persisted.as_str() { return Ok(false); } } fs::write(path, serialized_snapshot)?; Ok(true) } /// Saves the snapshot. /// /// Returns `true` if the snapshot was saved. This will return `false` if there /// was already a snapshot with matching contents. #[doc(hidden)] pub fn save(&self, path: &Path) -> Result> { self.save_with_metadata(path, None, &self.metadata.trim_for_persistence()) } /// Same as `save` but instead of writing a normal snapshot file this will write /// a `.snap.new` file with additional information. /// /// If the existing snapshot matches the new file, then `None` is returned, otherwise /// the name of the new snapshot file. pub(crate) fn save_new(&self, path: &Path) -> Result, Box> { let new_path = path.to_path_buf().with_extension("snap.new"); if self.save_with_metadata(&new_path, Some(path), &self.metadata)? { Ok(Some(new_path)) } else { Ok(None) } } } /// The contents of a Snapshot // Could be Cow, but I think limited savings #[derive(Debug, Clone)] pub struct SnapshotContents(String); impl SnapshotContents { pub fn from_inline(value: &str) -> SnapshotContents { SnapshotContents(get_inline_snapshot_value(value)) } /// Returns the snapshot contents as string with surrounding whitespace removed. pub fn as_str(&self) -> &str { self.0 .trim_start_matches(|x| x == '\r' || x == '\n') .trim_end() } pub fn to_inline(&self, indentation: usize) -> String { let contents = &self.0; let mut out = String::new(); let is_escape = contents.contains(&['\n', '\\', '"'][..]); out.push_str(if is_escape { "r###\"" } else { "\"" }); // if we have more than one line we want to change into the block // representation mode if contents.contains('\n') { out.extend( contents .lines() // newline needs to be at the start, since we don't want the end // finishing with a newline - the closing suffix should be on the same line .map(|l| { format!( "\n{:width$}{l}", "", width = if l.is_empty() { 0 } else { indentation }, l = l ) }) // `lines` removes the final line ending - add back .chain(Some(format!("\n{:width$}", "", width = indentation))), ); } else { out.push_str(contents); } out.push_str(if is_escape { "\"###" } else { "\"" }); out } } impl<'a> From> for SnapshotContents { fn from(value: Cow<'a, str>) -> Self { match value { Cow::Borrowed(s) => SnapshotContents::from(s), Cow::Owned(s) => SnapshotContents::from(s), } } } impl From<&str> for SnapshotContents { fn from(value: &str) -> SnapshotContents { // make sure we have unix newlines consistently SnapshotContents(value.replace("\r\n", "\n")) } } impl From for SnapshotContents { fn from(value: String) -> SnapshotContents { // make sure we have unix newlines consistently SnapshotContents(value.replace("\r\n", "\n")) } } impl From for String { fn from(value: SnapshotContents) -> String { value.0 } } impl PartialEq for SnapshotContents { fn eq(&self, other: &Self) -> bool { self.as_str() == other.as_str() } } fn count_leading_spaces(value: &str) -> usize { value.chars().take_while(|x| x.is_whitespace()).count() } fn min_indentation(snapshot: &str) -> usize { let lines = snapshot.trim_end().lines(); if lines.clone().count() <= 1 { // not a multi-line string return 0; } lines .filter(|l| !l.is_empty()) .map(count_leading_spaces) .min() .unwrap_or(0) } // Removes excess indentation, removes excess whitespace at start & end // and changes newlines to \n. fn normalize_inline_snapshot(snapshot: &str) -> String { let indentation = min_indentation(snapshot); snapshot .trim_end() .lines() .skip_while(|l| l.is_empty()) .map(|l| l.get(indentation..).unwrap_or("")) .collect::>() .join("\n") } /// Extracts the module and snapshot name from a snapshot path fn names_of_path(path: &Path) -> (String, String) { // The final part of the snapshot file name is the test name; the // initial parts are the module name let parts: Vec<&str> = path .file_stem() .unwrap() .to_str() .unwrap_or("") .rsplitn(2, "__") .collect(); match parts.as_slice() { [snapshot_name, module_name] => (snapshot_name.to_string(), module_name.to_string()), [snapshot_name] => (snapshot_name.to_string(), String::new()), _ => (String::new(), "".to_string()), } } #[test] fn test_names_of_path() { assert_debug_snapshot!( names_of_path(Path::new("/src/snapshots/insta_tests__tests__name_foo.snap")), @r###" ( "name_foo", "insta_tests__tests", ) "### ); assert_debug_snapshot!( names_of_path(Path::new("/src/snapshots/name_foo.snap")), @r###" ( "name_foo", "", ) "### ); assert_debug_snapshot!( names_of_path(Path::new("foo/src/snapshots/go1.20.5.snap")), @r###" ( "go1.20.5", "", ) "### ); } /// Helper function that returns the real inline snapshot value from a given /// frozen value string. If the string starts with the '⋮' character /// (optionally prefixed by whitespace) the alternative serialization format /// is picked which has slightly improved indentation semantics. /// /// This also changes all newlines to \n fn get_inline_snapshot_value(frozen_value: &str) -> String { // TODO: could move this into the SnapshotContents `from_inline` method // (the only call site) if frozen_value.trim_start().starts_with('⋮') { // legacy format - retain so old snapshots still work let mut buf = String::new(); let mut line_iter = frozen_value.lines(); let mut indentation = 0; for line in &mut line_iter { let line_trimmed = line.trim_start(); if line_trimmed.is_empty() { continue; } indentation = line.len() - line_trimmed.len(); // 3 because '⋮' is three utf-8 bytes long buf.push_str(&line_trimmed[3..]); buf.push('\n'); break; } for line in &mut line_iter { if let Some(prefix) = line.get(..indentation) { if !prefix.trim().is_empty() { return "".to_string(); } } if let Some(remainder) = line.get(indentation..) { if let Some(rest) = remainder.strip_prefix('⋮') { buf.push_str(rest); buf.push('\n'); } else if remainder.trim().is_empty() { continue; } else { return "".to_string(); } } } buf.trim_end().to_string() } else { normalize_inline_snapshot(frozen_value) } } #[test] fn test_snapshot_contents() { use similar_asserts::assert_eq; let snapshot_contents = SnapshotContents("testing".to_string()); assert_eq!(snapshot_contents.to_inline(0), r#""testing""#); let t = &" a b"[1..]; assert_eq!( SnapshotContents(t.to_string()).to_inline(0), "r###\" a b \"###" ); let t = &" a b"[1..]; assert_eq!( SnapshotContents(t.to_string()).to_inline(4), "r###\" a b \"###" ); let t = &" a b"[1..]; assert_eq!( SnapshotContents(t.to_string()).to_inline(0), "r###\" a b \"###" ); let t = &" a b"[1..]; assert_eq!( SnapshotContents(t.to_string()).to_inline(0), "r###\" a b \"###" ); let t = &" ab "[1..]; assert_eq!( SnapshotContents(t.to_string()).to_inline(0), "r###\" ab \"###" ); let t = "ab"; assert_eq!(SnapshotContents(t.to_string()).to_inline(0), r#""ab""#); } #[test] fn test_normalize_inline_snapshot() { use similar_asserts::assert_eq; // here we do exact matching (rather than `assert_snapshot`) // to ensure we're not incorporating the modifications this library makes let t = r#" 1 2 "#; assert_eq!( normalize_inline_snapshot(t), r###" 1 2"###[1..] ); let t = r#" 1 2"#; assert_eq!( normalize_inline_snapshot(t), r###" 1 2"###[1..] ); let t = r#" 1 2 "#; assert_eq!( normalize_inline_snapshot(t), r###" 1 2"###[1..] ); let t = r#" 1 2 "#; assert_eq!( normalize_inline_snapshot(t), r###" 1 2"###[1..] ); let t = r#" a "#; assert_eq!(normalize_inline_snapshot(t), "a"); let t = ""; assert_eq!(normalize_inline_snapshot(t), ""); let t = r#" a b c "#; assert_eq!( normalize_inline_snapshot(t), r###" a b c"###[1..] ); let t = r#" a "#; assert_eq!(normalize_inline_snapshot(t), "a"); let t = " a"; assert_eq!(normalize_inline_snapshot(t), "a"); let t = r#"a a"#; assert_eq!( normalize_inline_snapshot(t), r###" a a"###[1..] ); } #[test] fn test_min_indentation() { use similar_asserts::assert_eq; let t = r#" 1 2 "#; assert_eq!(min_indentation(t), 3); let t = r#" 1 2"#; assert_eq!(min_indentation(t), 4); let t = r#" 1 2 "#; assert_eq!(min_indentation(t), 12); let t = r#" 1 2 "#; assert_eq!(min_indentation(t), 3); let t = r#" a "#; assert_eq!(min_indentation(t), 8); let t = ""; assert_eq!(min_indentation(t), 0); let t = r#" a b c "#; assert_eq!(min_indentation(t), 0); let t = r#" a "#; assert_eq!(min_indentation(t), 0); let t = " a"; assert_eq!(min_indentation(t), 4); let t = r#"a a"#; assert_eq!(min_indentation(t), 0); } #[test] fn test_inline_snapshot_value_newline() { // https://github.com/mitsuhiko/insta/issues/39 assert_eq!(get_inline_snapshot_value("\n"), ""); } #[test] fn test_parse_yaml_error() { use std::env::temp_dir; let mut temp = temp_dir(); temp.push("bad.yaml"); let mut f = fs::File::create(temp.clone()).unwrap(); let invalid = r#"--- This is invalid yaml: { { --- "#; f.write_all(invalid.as_bytes()).unwrap(); let error = format!("{}", Snapshot::from_file(temp.as_path()).unwrap_err()); assert!(error.contains("Failed parsing the YAML from")); assert!(error.contains("/bad.yaml")); } /// Check that snapshots don't take ownership of the value #[test] fn test_ownership() { // Range is non-copy use std::ops::Range; let r = Range { start: 0, end: 10 }; assert_debug_snapshot!(r, @"0..10"); assert_debug_snapshot!(r, @"0..10"); } insta-1.39.0/src/snapshots/doctest_runtime_rs__named.snap000064400000000000000000000001501046102023000217430ustar 00000000000000--- source: src/runtime.rs expression: "vec![1, 2, 3, 4, 5]" --- [ 1, 2, 3, 4, 5, ] insta-1.39.0/src/snapshots/insta__test__embedded.snap000064400000000000000000000001121046102023000210060ustar 00000000000000--- source: src/test.rs expression: "\"Just a string\"" --- Just a string insta-1.39.0/src/test.rs000064400000000000000000000001271046102023000131500ustar 00000000000000#[test] fn test_embedded_test() { assert_snapshot!("embedded", "Just a string"); } insta-1.39.0/src/utils.rs000064400000000000000000000065451046102023000133430ustar 00000000000000use std::{ borrow::Cow, env, io::Write, path::Path, process::{Command, Stdio}, }; /// Are we running in in a CI environment? pub fn is_ci() -> bool { match env::var("CI").ok().as_deref() { Some("false") | Some("0") | Some("") => false, None => env::var("TF_BUILD").is_ok(), Some(_) => true, } } #[cfg(feature = "colors")] pub use console::style; #[cfg(not(feature = "colors"))] mod fake_colors { pub struct FakeStyledObject(D); macro_rules! style_attr { ($($name:ident)*) => { $( #[inline] pub fn $name(self) -> FakeStyledObject { self } )* } } impl FakeStyledObject { style_attr!(red green yellow cyan bold dim underlined); } impl std::fmt::Display for FakeStyledObject { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } pub fn style(val: D) -> FakeStyledObject { FakeStyledObject(val) } } #[cfg(not(feature = "colors"))] pub use self::fake_colors::*; /// Returns the term width that insta should use. pub fn term_width() -> usize { #[cfg(feature = "colors")] { console::Term::stdout().size().1 as usize } #[cfg(not(feature = "colors"))] { 74 } } /// Converts a path into a string that can be persisted. pub fn path_to_storage(path: &Path) -> String { #[cfg(windows)] { path.to_str().unwrap().replace('\\', "/") } #[cfg(not(windows))] { path.to_string_lossy().into() } } /// Tries to format a given rust expression with rustfmt pub fn format_rust_expression(value: &str) -> Cow<'_, str> { const PREFIX: &str = "const x:() = "; const SUFFIX: &str = ";\n"; if let Ok(mut proc) = Command::new("rustfmt") .arg("--emit=stdout") .arg("--edition=2018") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::null()) .spawn() { { let stdin = proc.stdin.as_mut().unwrap(); stdin.write_all(PREFIX.as_bytes()).unwrap(); stdin.write_all(value.as_bytes()).unwrap(); stdin.write_all(SUFFIX.as_bytes()).unwrap(); } if let Ok(output) = proc.wait_with_output() { if output.status.success() { // slice between after the prefix and before the suffix // (currently 14 from the start and 2 before the end, respectively) let start = PREFIX.len() + 1; let end = output.stdout.len() - SUFFIX.len(); return std::str::from_utf8(&output.stdout[start..end]) .unwrap() .replace("\r\n", "\n") .into(); } } } Cow::Borrowed(value) } #[test] fn test_format_rust_expression() { use crate::assert_snapshot; assert_snapshot!(format_rust_expression("vec![1,2,3]"), @"vec![1, 2, 3]"); assert_snapshot!(format_rust_expression("vec![1,2,3].iter()"), @"vec![1, 2, 3].iter()"); assert_snapshot!(format_rust_expression(r#" "aoeu""#), @r###""aoeu""###); assert_snapshot!(format_rust_expression(r#" "aoe😄""#), @r###""aoe😄""###); assert_snapshot!(format_rust_expression("😄😄😄😄😄"), @"😄😄😄😄😄") } insta-1.39.0/tests/glob_submodule/mod.rs000064400000000000000000000027151046102023000163320ustar 00000000000000#![cfg(feature = "glob")] #[test] fn test_basic_globbing_parent_dir() { insta::glob!("../inputs", "*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_json_snapshot!(&contents); }); } #[test] fn test_basic_globbing_nested_parent_dir_base_path() { insta::glob!("../inputs-nested", "*/*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_snapshot!(&contents); }); } #[test] fn test_basic_globbing_nested_parent_glob() { insta::glob!("..", "inputs-nested/*/*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_snapshot!(&contents); }); } #[test] fn test_globs_follow_links_parent_dir_base_path() { insta::glob!("../link-to-inputs", "*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_json_snapshot!(&contents); }); } #[test] fn test_globs_follow_links_parent_dir_glob() { insta::glob!("..", "link-to-inputs/*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_json_snapshot!(&contents); }); } #[test] fn test_basic_globbing_absolute_dir() { insta::glob!( concat!(env!("CARGO_MANIFEST_DIR"), "/tests/inputs"), "*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_json_snapshot!(&contents); } ); } ././@LongLink00006440000000000000000000000164000000000000007774Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_absolute_dir@goodbye.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_absolute_dir@g000064400000000000000000000001771046102023000325230ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/goodbye.txt --- "Contents of goodbye" ././@LongLink00006440000000000000000000000162000000000000007772Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_absolute_dir@hello.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_absolute_dir@h000064400000000000000000000001731046102023000325200ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/hello.txt --- "Contents of hello" ././@LongLink00006440000000000000000000000203000000000000007766Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_dir_base_path@a__file.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_000064400000000000000000000001701046102023000325630ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs-nested/a/file.txt --- Hello A ././@LongLink00006440000000000000000000000203000000000000007766Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_dir_base_path@b__file.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_000064400000000000000000000001701046102023000325630ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs-nested/b/file.txt --- Hello B ././@LongLink00006440000000000000000000000172000000000000007773Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_glob@a__file.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_000064400000000000000000000001701046102023000325630ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs-nested/a/file.txt --- Hello A ././@LongLink00006440000000000000000000000172000000000000007773Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_glob@b__file.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_nested_parent_000064400000000000000000000001701046102023000325630ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs-nested/b/file.txt --- Hello B ././@LongLink00006440000000000000000000000162000000000000007772Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_parent_dir@goodbye.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_parent_dir@goo000064400000000000000000000001771046102023000325340ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/goodbye.txt --- "Contents of goodbye" ././@LongLink00006440000000000000000000000160000000000000007770Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_parent_dir@hello.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__basic_globbing_parent_dir@hel000064400000000000000000000001731046102023000325140ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/hello.txt --- "Contents of hello" ././@LongLink00006440000000000000000000000200000000000000007763Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir_base_path@goodbye.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir000064400000000000000000000001771046102023000326730ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/goodbye.txt --- "Contents of goodbye" ././@LongLink00006440000000000000000000000176000000000000007777Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir_base_path@hello.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir000064400000000000000000000001731046102023000326670ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/hello.txt --- "Contents of hello" ././@LongLink00006440000000000000000000000173000000000000007774Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir_glob@goodbye.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir000064400000000000000000000001771046102023000326730ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/goodbye.txt --- "Contents of goodbye" ././@LongLink00006440000000000000000000000171000000000000007772Lustar insta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir_glob@hello.txt.snapinsta-1.39.0/tests/glob_submodule/snapshots/test_glob__glob_submodule__globs_follow_links_parent_dir000064400000000000000000000001731046102023000326670ustar 00000000000000--- source: tests/glob_submodule/mod.rs expression: "&contents" input_file: tests/inputs/hello.txt --- "Contents of hello" insta-1.39.0/tests/inputs/goodbye.txt000064400000000000000000000000231046102023000157040ustar 00000000000000Contents of goodbyeinsta-1.39.0/tests/inputs/hello.txt000064400000000000000000000000211046102023000153550ustar 00000000000000Contents of helloinsta-1.39.0/tests/inputs-nested/a/file.txt000064400000000000000000000000101046102023000166670ustar 00000000000000Hello A insta-1.39.0/tests/inputs-nested/b/file.txt000064400000000000000000000000101046102023000166700ustar 00000000000000Hello B insta-1.39.0/tests/link-to-inputs/goodbye.txt000064400000000000000000000000231046102023000172570ustar 00000000000000Contents of goodbyeinsta-1.39.0/tests/link-to-inputs/hello.txt000064400000000000000000000000211046102023000167300ustar 00000000000000Contents of helloinsta-1.39.0/tests/snapshots/snapshot_no_module_prepending.snap000064400000000000000000000001341046102023000232140ustar 00000000000000--- source: tests/test_settings.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3, ] insta-1.39.0/tests/snapshots/test_basic__Testing.snap000064400000000000000000000000721046102023000210560ustar 00000000000000--- source: tests/test_basic.rs expression: expr --- name insta-1.39.0/tests/snapshots/test_basic__debug_vector.snap000064400000000000000000000001311046102023000221050ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3, ] insta-1.39.0/tests/snapshots/test_basic__display.snap000064400000000000000000000001061046102023000211040ustar 00000000000000--- source: tests/test_basic.rs expression: td --- TestDisplay struct insta-1.39.0/tests/snapshots/test_basic__insta_sort_order.snap000064400000000000000000000002031046102023000230150ustar 00000000000000--- source: tests/test_basic.rs expression: m --- ? - 1 - 3 : 4 ? - 1 - 4 : 4 ? - 2 - 3 : 4 ? - 3 - 3 : 4 ? - 9 - 3 : 4 insta-1.39.0/tests/snapshots/test_basic__json_vector.snap000064400000000000000000000001221046102023000217700ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3 ] insta-1.39.0/tests/snapshots/test_basic__nested__nested_module.snap000064400000000000000000000001001046102023000237610ustar 00000000000000--- source: tests/test_basic.rs expression: "\"aoeu\"" --- aoeu insta-1.39.0/tests/snapshots/test_basic__trailing_commas-2.snap000064400000000000000000000001321046102023000227450ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4, 5]" --- - 1 - 2 - 3 - 4 - 5 insta-1.39.0/tests/snapshots/test_basic__trailing_commas.snap000064400000000000000000000001061046102023000226070ustar 00000000000000--- source: tests/test_basic.rs expression: "\"Testing\"" --- Testing insta-1.39.0/tests/snapshots/test_basic__unnamed_debug_vector-2.snap000064400000000000000000000001431046102023000237560ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4]" --- [ 1, 2, 3, 4, ] insta-1.39.0/tests/snapshots/test_basic__unnamed_debug_vector-3.snap000064400000000000000000000001551046102023000237620ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4, 5]" --- [ 1, 2, 3, 4, 5, ] insta-1.39.0/tests/snapshots/test_basic__unnamed_debug_vector.snap000064400000000000000000000001311046102023000236140ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3, ] insta-1.39.0/tests/snapshots/test_basic__unnamed_display-2.snap000064400000000000000000000001101046102023000227450ustar 00000000000000--- source: tests/test_basic.rs expression: "\"whatever\"" --- whatever insta-1.39.0/tests/snapshots/test_basic__unnamed_display.snap000064400000000000000000000001061046102023000226130ustar 00000000000000--- source: tests/test_basic.rs expression: td --- TestDisplay struct insta-1.39.0/tests/snapshots/test_basic__unnamed_json_vector-2.snap000064400000000000000000000001321046102023000236370ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4]" --- [ 1, 2, 3, 4 ] insta-1.39.0/tests/snapshots/test_basic__unnamed_json_vector-3.snap000064400000000000000000000001421046102023000236410ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4, 5]" --- [ 1, 2, 3, 4, 5 ] insta-1.39.0/tests/snapshots/test_basic__unnamed_json_vector.snap000064400000000000000000000001221046102023000234770ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3 ] insta-1.39.0/tests/snapshots/test_basic__unnamed_nested_closure.snap000064400000000000000000000001311046102023000241620ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3, ] insta-1.39.0/tests/snapshots/test_basic__unnamed_yaml_vector-2.snap000064400000000000000000000001241046102023000236310ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4]" --- - 1 - 2 - 3 - 4 insta-1.39.0/tests/snapshots/test_basic__unnamed_yaml_vector-3.snap000064400000000000000000000001331046102023000236320ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3, 4, 5]" --- - 1 - 2 - 3 - 4 - 5 insta-1.39.0/tests/snapshots/test_basic__unnamed_yaml_vector.snap000064400000000000000000000001151046102023000234720ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- - 1 - 2 - 3 insta-1.39.0/tests/snapshots/test_basic__yaml_vector.snap000064400000000000000000000001151046102023000217630ustar 00000000000000--- source: tests/test_basic.rs expression: "vec![1, 2, 3]" --- - 1 - 2 - 3 insta-1.39.0/tests/snapshots/test_bugs__crlf.snap000064400000000000000000000001271046102023000202470ustar 00000000000000--- source: tests/test_bugs.rs expression: "\"foo\\r\\nbar\\r\\nbaz\"" --- foo bar baz insta-1.39.0/tests/snapshots/test_bugs__trailing_crlf.snap000064400000000000000000000001361046102023000221400ustar 00000000000000--- source: tests/test_bugs.rs expression: "\"foo\\r\\nbar\\r\\nbaz\\r\\n\"" --- foo bar baz insta-1.39.0/tests/snapshots/test_glob__basic_globbing@goodbye.txt.snap000064400000000000000000000001661046102023000245220ustar 00000000000000--- source: tests/test_glob.rs expression: "&contents" input_file: tests/inputs/goodbye.txt --- "Contents of goodbye" insta-1.39.0/tests/snapshots/test_glob__basic_globbing@hello.txt.snap000064400000000000000000000001621046102023000241710ustar 00000000000000--- source: tests/test_glob.rs expression: "&contents" input_file: tests/inputs/hello.txt --- "Contents of hello" insta-1.39.0/tests/snapshots/test_glob__basic_globbing_nested@a__file.txt.snap000064400000000000000000000001571046102023000260120ustar 00000000000000--- source: tests/test_glob.rs expression: "&contents" input_file: tests/inputs-nested/a/file.txt --- Hello A insta-1.39.0/tests/snapshots/test_glob__basic_globbing_nested@b__file.txt.snap000064400000000000000000000001571046102023000260130ustar 00000000000000--- source: tests/test_glob.rs expression: "&contents" input_file: tests/inputs-nested/b/file.txt --- Hello B insta-1.39.0/tests/snapshots/test_glob__globs_follow_links@goodbye.txt.snap000064400000000000000000000001661046102023000254660ustar 00000000000000--- source: tests/test_glob.rs expression: "&contents" input_file: tests/inputs/goodbye.txt --- "Contents of goodbye" insta-1.39.0/tests/snapshots/test_glob__globs_follow_links@hello.txt.snap000064400000000000000000000001621046102023000251350ustar 00000000000000--- source: tests/test_glob.rs expression: "&contents" input_file: tests/inputs/hello.txt --- "Contents of hello" insta-1.39.0/tests/snapshots/test_inline__unnamed_thread_single_line-2.snap000064400000000000000000000001311046102023000253170ustar 00000000000000--- source: tests/test_inline.rs expression: "\"Testing-thread-2\"" --- Testing-thread-2 insta-1.39.0/tests/snapshots/test_inline__unnamed_thread_single_line.snap000064400000000000000000000001251046102023000251630ustar 00000000000000--- source: tests/test_inline.rs expression: "\"Testing-thread\"" --- Testing-thread insta-1.39.0/tests/snapshots/test_redaction__foo_bar.snap000064400000000000000000000004051046102023000217370ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo.bar\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), ], ], } insta-1.39.0/tests/snapshots/test_redaction__foo_bar_alt.snap000064400000000000000000000004161046102023000226010ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo[\\\"bar\\\"]\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), ], ], } insta-1.39.0/tests/snapshots/test_redaction__foo_bar_deep.snap000064400000000000000000000004421046102023000227350ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo.bar.**\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), DeepWildcard, ], ], } insta-1.39.0/tests/snapshots/test_redaction__foo_bar_full_range.snap000064400000000000000000000005251046102023000241400ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo.bar[]\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), Range( None, None, ), ], ], } insta-1.39.0/tests/snapshots/test_redaction__foo_bar_range.snap000064400000000000000000000006601046102023000231160ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo.bar[10:20]\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), Range( Some( 10, ), Some( 20, ), ), ], ], } insta-1.39.0/tests/snapshots/test_redaction__foo_bar_range_from.snap000064400000000000000000000006031046102023000241360ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo.bar[10:]\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), Range( Some( 10, ), None, ), ], ], } insta-1.39.0/tests/snapshots/test_redaction__foo_bar_range_to.snap000064400000000000000000000006031046102023000236150ustar 00000000000000--- source: tests/test_redaction.rs expression: "Selector::parse(\".foo.bar[:10]\").unwrap()" --- Selector { selectors: [ [ Key( "foo", ), Key( "bar", ), Range( None, Some( 10, ), ), ], ], } insta-1.39.0/tests/snapshots/test_redaction__map_key_redaction.snap000064400000000000000000000002011046102023000237770ustar 00000000000000--- source: tests/test_redaction.rs expression: foo_value --- hm: ? bucket: "[bucket]" value: 0 : 42 btm: "[key]": 23 insta-1.39.0/tests/snapshots/test_redaction__rounded_redaction.snap000064400000000000000000000001741046102023000240230ustar 00000000000000--- source: tests/test_redaction.rs expression: "&MyPoint { x: 1.0 / 3.0, y: 6.0 / 3.0 }" --- { "x": 0.3333, "y": 2.0 } insta-1.39.0/tests/snapshots/test_redaction__struct_array_redaction.snap000064400000000000000000000003471046102023000251070ustar 00000000000000--- source: tests/test_redaction.rs expression: "vec![checkout]" --- - _id: "[checkout_id]" products: - _id: "[product_id]" product_name: "[product_name]" - _id: "[product_id]" product_name: "[product_name]" insta-1.39.0/tests/snapshots/test_redaction__user.snap000064400000000000000000000004371046102023000213130ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 23,\n username: \"john_doe\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id: "[id]" username: john_doe email: john@example.com extra: "" insta-1.39.0/tests/snapshots/test_redaction__user_csv.snap000064400000000000000000000004361046102023000221650ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 44,\n username: \"julius_csv\".to_string(),\n email: Email(\"julius@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id,username,email,extra [id],julius_csv,julius@example.com, insta-1.39.0/tests/snapshots/test_redaction__user_json.snap000064400000000000000000000005231046102023000223400ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 9999,\n username: \"jason_doe\".to_string(),\n email: Email(\"jason@example.com\".to_string()),\n extra: \"ssn goes here\".to_string(),\n }" --- { "id": "[id]", "username": "jason_doe", "email": "jason@example.com", "extra": "[extra]" } insta-1.39.0/tests/snapshots/test_redaction__user_json_flags.snap000064400000000000000000000005731046102023000235210ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 122,\n username: \"jason_doe\".to_string(),\n flags: vec![\"zzz\".into(), \"foo\".into(), \"aha\".into(),\n \"is_admin\".into()].into_iter().collect(),\n }" --- { "id": "[id]", "username": "jason_doe", "flags": [ "aha", "foo", "is_admin", "zzz" ] } insta-1.39.0/tests/snapshots/test_redaction__user_json_flags_alt.snap000064400000000000000000000006031046102023000243530ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 122,\n username: \"jason_doe\".to_string(),\n flags: MySet(vec![\"zzz\".into(), \"foo\".into(), \"aha\".into(),\n \"is_admin\".into()].into_iter().collect()),\n }" --- { "flags": [ "aha", "foo", "is_admin", "zzz" ], "id": 122, "username": "jason_doe" } insta-1.39.0/tests/snapshots/test_redaction__user_json_settings.snap000064400000000000000000000005221046102023000242570ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 122,\n username: \"jason_doe\".to_string(),\n email: Email(\"jason@example.com\".to_string()),\n extra: \"ssn goes here\".to_string(),\n }" --- { "id": "[id]", "username": "jason_doe", "email": "jason@example.com", "extra": "[extra]" } insta-1.39.0/tests/snapshots/test_redaction__user_json_settings_callback.snap000064400000000000000000000005231046102023000260740ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 1234,\n username: \"jason_doe\".to_string(),\n email: Email(\"jason@example.com\".to_string()),\n extra: \"extra here\".to_string(),\n }" --- { "id": "[id]", "username": "jason_doe", "email": "jason@example.com", "extra": "extra here" } insta-1.39.0/tests/snapshots/test_redaction__user_ron.snap000064400000000000000000000004751046102023000221730ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 53,\n username: \"john_ron\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- User( id: "[id]", username: "john_ron", email: Email("john@example.com"), extra: "", ) insta-1.39.0/tests/snapshots/test_redaction__user_toml.snap000064400000000000000000000004461046102023000223460ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 53,\n username: \"john_ron\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id = '[id]' username = 'john_ron' email = 'john@example.com' extra = '' insta-1.39.0/tests/snapshots/test_redaction__with_random_value_and_match_comma-2.snap000064400000000000000000000004361046102023000273540ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 11,\n username: \"john_doe\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id: "[id]" username: john_doe email: john@example.com extra: "" insta-1.39.0/tests/snapshots/test_redaction__with_random_value_and_match_comma.snap000064400000000000000000000004361046102023000272150ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 11,\n username: \"john_doe\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id: "[id]" username: john_doe email: john@example.com extra: "" insta-1.39.0/tests/snapshots/test_redaction__with_random_value_and_trailing_comma_match.snap000064400000000000000000000004371046102023000311070ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 11,\n username: \"john_doe\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id: "[id]" username: john_doe email: john@example.com extra: "" insta-1.39.0/tests/snapshots/test_redaction__with_random_value_csv_match.snap000064400000000000000000000004361046102023000260720ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 44,\n username: \"julius_csv\".to_string(),\n email: Email(\"julius@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id,username,email,extra [id],julius_csv,julius@example.com, insta-1.39.0/tests/snapshots/test_redaction__with_random_value_json_match.snap000064400000000000000000000005231046102023000262450ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 9999,\n username: \"jason_doe\".to_string(),\n email: Email(\"jason@example.com\".to_string()),\n extra: \"ssn goes here\".to_string(),\n }" --- { "id": "[id]", "username": "jason_doe", "email": "jason@example.com", "extra": "[extra]" } insta-1.39.0/tests/snapshots/test_redaction__with_random_value_json_settings2.snap000064400000000000000000000005221046102023000270720ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 975,\n username: \"jason_doe\".to_string(),\n email: Email(\"jason@example.com\".to_string()),\n extra: \"ssn goes here\".to_string(),\n }" --- { "id": "[id]", "username": "jason_doe", "email": "jason@example.com", "extra": "[extra]" } insta-1.39.0/tests/snapshots/test_redaction__with_random_value_ron_match.snap000064400000000000000000000004751046102023000261000ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 53,\n username: \"john_ron\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- User( id: "[id]", username: "john_ron", email: Email("john@example.com"), extra: "", ) insta-1.39.0/tests/snapshots/test_redaction__with_random_value_toml_match.snap000064400000000000000000000004461046102023000262530ustar 00000000000000--- source: tests/test_redaction.rs expression: "&User {\n id: 53,\n username: \"john_ron\".to_string(),\n email: Email(\"john@example.com\".to_string()),\n extra: \"\".to_string(),\n }" --- id = '[id]' username = 'john_ron' email = 'john@example.com' extra = '' insta-1.39.0/tests/snapshots/test_settings__snapshot_with_description.snap000064400000000000000000000002101046102023000255070ustar 00000000000000--- source: tests/test_settings.rs description: The snapshot is three integers expression: "vec![1, 2, 3]" --- [ 1, 2, 3, ] insta-1.39.0/tests/snapshots/test_settings__snapshot_with_description_and_info.snap000064400000000000000000000003351046102023000273540ustar 00000000000000--- source: tests/test_settings.rs description: The snapshot is four integers expression: "vec![1, 2, 3, 4]" info: env: ENVIRONMENT: production cmdline: - my-tool - run --- [ 1, 2, 3, 4, ] insta-1.39.0/tests/snapshots/test_settings__snapshot_with_description_and_raw_info.snap000064400000000000000000000003441046102023000302250ustar 00000000000000--- source: tests/test_settings.rs description: The snapshot is four integers expression: "vec![1, 2, 3, 4]" info: env: - ENVIRONMENT - production cmdline: - my-tool - run --- [ 1, 2, 3, 4, ] insta-1.39.0/tests/snapshots/test_suffixes__basic_suffixes@1.snap000064400000000000000000000000761046102023000233760ustar 00000000000000--- source: tests/test_suffixes.rs expression: "&value" --- 1 insta-1.39.0/tests/snapshots/test_suffixes__basic_suffixes@2.snap000064400000000000000000000000761046102023000233770ustar 00000000000000--- source: tests/test_suffixes.rs expression: "&value" --- 2 insta-1.39.0/tests/snapshots/test_suffixes__basic_suffixes@3.snap000064400000000000000000000000761046102023000234000ustar 00000000000000--- source: tests/test_suffixes.rs expression: "&value" --- 3 insta-1.39.0/tests/snapshots2/test_settings__snapshot_path.snap000064400000000000000000000001341046102023000231540ustar 00000000000000--- source: tests/test_settings.rs expression: "vec![1, 2, 3]" --- [ 1, 2, 3, ] insta-1.39.0/tests/test_allow_duplicates.rs000064400000000000000000000011201046102023000171300ustar 00000000000000use insta::{allow_duplicates, assert_debug_snapshot}; #[test] fn test_basic_duplicates_passes() { allow_duplicates! { for x in (0..10).step_by(2) { let is_even = x % 2 == 0; assert_debug_snapshot!(is_even, @"true"); } } } #[test] #[should_panic = "snapshot assertion for 'basic_duplicates_assertion_failed' failed in line"] fn test_basic_duplicates_assertion_failed() { allow_duplicates! { for x in (0..10).step_by(3) { let is_even = x % 2 == 0; assert_debug_snapshot!(is_even, @"true"); } } } insta-1.39.0/tests/test_basic.rs000064400000000000000000000051351046102023000146700ustar 00000000000000#[cfg(feature = "json")] use insta::assert_json_snapshot; #[cfg(feature = "yaml")] use insta::assert_yaml_snapshot; #[allow(deprecated)] use insta::{assert_debug_snapshot, assert_display_snapshot, assert_snapshot}; use std::fmt; #[test] fn test_debug_vector() { assert_debug_snapshot!("debug_vector", vec![1, 2, 3]); } #[test] fn test_unnamed_debug_vector() { assert_debug_snapshot!(vec![1, 2, 3]); assert_debug_snapshot!(vec![1, 2, 3, 4]); assert_debug_snapshot!(vec![1, 2, 3, 4, 5]); } #[test] fn test_unnamed_nested_closure() { #![allow(clippy::redundant_closure_call)] (|| { (|| { assert_debug_snapshot!(vec![1, 2, 3]); })(); })(); } #[cfg(feature = "yaml")] #[test] fn test_yaml_vector() { assert_yaml_snapshot!("yaml_vector", vec![1, 2, 3]); } #[cfg(feature = "yaml")] #[test] fn test_unnamed_yaml_vector() { assert_yaml_snapshot!(vec![1, 2, 3]); assert_yaml_snapshot!(vec![1, 2, 3, 4]); assert_yaml_snapshot!(vec![1, 2, 3, 4, 5]); } #[cfg(feature = "json")] #[test] fn test_json_vector() { assert_json_snapshot!("json_vector", vec![1, 2, 3]); } #[cfg(feature = "json")] #[test] fn test_unnamed_json_vector() { assert_json_snapshot!(vec![1, 2, 3]); assert_json_snapshot!(vec![1, 2, 3, 4]); assert_json_snapshot!(vec![1, 2, 3, 4, 5]); } mod nested { #[test] fn test_nested_module() { insta::assert_snapshot!("aoeu"); } } #[test] fn test_trailing_commas() { assert_snapshot!("Testing",); assert_snapshot!("Testing", "name",); assert_snapshot!("Testing", "name", "expr",); #[cfg(feature = "yaml")] assert_yaml_snapshot!(vec![1, 2, 3, 4, 5],); } struct TestDisplay; impl fmt::Display for TestDisplay { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "TestDisplay struct") } } #[test] #[allow(deprecated)] fn test_display() { let td = TestDisplay; assert_display_snapshot!("display", td); } #[test] #[allow(deprecated)] fn test_unnamed_display() { let td = TestDisplay; assert_display_snapshot!(td); assert_display_snapshot!("whatever"); } #[cfg(feature = "json")] #[test] fn test_u128_json() { let x: u128 = u128::from(u64::MAX) * 2; assert_json_snapshot!(&x, @"36893488147419103230"); } #[cfg(feature = "yaml")] #[test] fn insta_sort_order() { use std::collections::HashMap; let mut m = HashMap::new(); m.insert((1, 3), 4); m.insert((2, 3), 4); m.insert((1, 4), 4); m.insert((3, 3), 4); m.insert((9, 3), 4); insta::with_settings!({sort_maps =>true}, { insta::assert_yaml_snapshot!(m); }); } insta-1.39.0/tests/test_bugs.rs000064400000000000000000000004631046102023000145460ustar 00000000000000#[test] fn test_crlf() { insta::assert_snapshot!("foo\r\nbar\r\nbaz"); } #[test] fn test_trailing_crlf() { insta::assert_snapshot!("foo\r\nbar\r\nbaz\r\n"); } #[test] fn test_trailing_crlf_inline() { insta::assert_snapshot!("foo\r\nbar\r\nbaz\r\n", @r###" foo bar baz "###); } insta-1.39.0/tests/test_clash_detection.rs000064400000000000000000000027301046102023000167350ustar 00000000000000use std::env; use std::thread; fn test_foo_always_missing() { insta::assert_debug_snapshot!(42); } fn foo_always_missing() { insta::assert_debug_snapshot!(42); } #[test] fn test_clash_detection() { let old_update_value = env::var("INSTA_UPDATE"); let old_force_pass_value = env::var("INSTA_FORCE_PASS"); env::set_var("INSTA_UPDATE", "no"); env::set_var("INSTA_FORCE_PASS", "0"); let err1 = thread::Builder::new() .spawn(|| { test_foo_always_missing(); }) .unwrap() .join() .unwrap_err(); let err2 = thread::Builder::new() .spawn(|| { foo_always_missing(); }) .unwrap() .join() .unwrap_err(); if let Ok(value) = old_update_value { env::set_var("INSTA_UPDATE", value); } else { env::remove_var("INSTA_UPDATE"); } if let Ok(value) = old_force_pass_value { env::set_var("INSTA_FORCE_PASS", value); } else { env::remove_var("INSTA_FORCE_PASS"); } let s1 = err1.downcast_ref::().unwrap(); let s2 = err2.downcast_ref::().unwrap(); let mut values = [s1.as_str(), s2.as_str()]; values.sort(); assert_eq!(&values[..], &vec![ "Insta snapshot name clash detected between \'foo_always_missing\' and \'test_foo_always_missing\' in \'test_clash_detection\'. Rename one function.", "snapshot assertion for \'foo_always_missing\' failed in line 5", ][..]); } insta-1.39.0/tests/test_filters.rs000064400000000000000000000004311046102023000152510ustar 00000000000000#![cfg(feature = "filters")] use insta::{assert_snapshot, with_settings}; #[test] fn test_basic_filter() { with_settings!({filters => vec![ (r"\b[[:xdigit:]]{8}\b", "[SHORT_HEX]") ]}, { assert_snapshot!("Hello DEADBEEF!", @"Hello [SHORT_HEX]!"); }) } insta-1.39.0/tests/test_glob.rs000064400000000000000000000015161046102023000145310ustar 00000000000000#![cfg(feature = "glob")] mod glob_submodule; #[test] fn test_basic_globbing() { insta::glob!("inputs/*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_json_snapshot!(&contents); }); } #[test] fn test_basic_globbing_nested() { insta::glob!("inputs-nested/*/*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_snapshot!(&contents); }); } #[test] fn test_globs_follow_links() { insta::glob!("link-to-inputs/*.txt", |path| { let contents = std::fs::read_to_string(path).unwrap(); insta::assert_json_snapshot!(&contents); }); } #[test] #[should_panic(expected = "the glob! macro did not match any files.")] fn test_empty_glob_fails() { insta::glob!("nonexistent", |_| { // nothing }); } insta-1.39.0/tests/test_inline.rs000064400000000000000000000134151046102023000150650ustar 00000000000000#[cfg(feature = "csv")] use insta::assert_csv_snapshot; #[cfg(feature = "ron")] use insta::assert_ron_snapshot; #[cfg(feature = "toml")] use insta::assert_toml_snapshot; #[cfg(feature = "yaml")] use insta::assert_yaml_snapshot; #[cfg(feature = "json")] use insta::{assert_compact_json_snapshot, assert_json_snapshot}; use insta::{assert_debug_snapshot, assert_snapshot}; use std::thread; #[test] fn test_simple() { assert_debug_snapshot!(vec![1, 2, 3, 4], @r###" [ 1, 2, 3, 4, ] "###); } #[test] fn test_trailing_commas() { assert_snapshot!( "Testing", @"Testing", ); } #[test] fn test_single_line() { assert_snapshot!("Testing", @"Testing"); } // We used to use the thread name for snapshot name detection. This is unreliable // so this test now basically does exactly the same as `test_unnamed_single_line`. #[test] fn test_unnamed_thread_single_line() { let builder = thread::Builder::new().name("foo::lol::something".into()); let handler = builder .spawn(|| { assert_snapshot!("Testing-thread"); assert_snapshot!("Testing-thread-2"); }) .unwrap(); handler.join().unwrap(); } #[test] fn test_newline() { // https://github.com/mitsuhiko/insta/issues/39 assert_snapshot!("\n", @r###" "###); } #[cfg(feature = "csv")] #[test] fn test_csv_inline() { #[derive(serde::Serialize)] pub struct Email(String); #[derive(serde::Serialize)] pub struct User { id: u32, username: String, email: Email, } assert_csv_snapshot!(User { id: 1453, username: "mehmed-doe".into(), email: Email("mehmed@doe.invalid".into()), }, @r###" id,username,email 1453,mehmed-doe,mehmed@doe.invalid "###); } #[cfg(feature = "csv")] #[test] fn test_csv_inline_multiple_values() { #[derive(serde::Serialize)] pub struct Email(String); #[derive(serde::Serialize)] pub struct User { id: u32, username: String, email: Email, } let user1 = User { id: 1453, username: "mehmed-doe".into(), email: Email("mehmed@doe.invalid".into()), }; let user2 = User { id: 1455, username: "mehmed-doe-di".into(), email: Email("mehmed@doe-di.invalid".into()), }; assert_csv_snapshot!(vec![user1, user2], @r###" id,username,email 1453,mehmed-doe,mehmed@doe.invalid 1455,mehmed-doe-di,mehmed@doe-di.invalid "###); } #[cfg(feature = "ron")] #[test] fn test_ron_inline() { #[derive(serde::Serialize)] pub struct Email(String); #[derive(serde::Serialize)] pub struct User { id: u32, username: String, email: Email, } assert_ron_snapshot!(User { id: 42, username: "peter-doe".into(), email: Email("peter@doe.invalid".into()), }, @r###" User( id: 42, username: "peter-doe", email: Email("peter@doe.invalid"), ) "###); } #[cfg(feature = "toml")] #[test] fn test_toml_inline() { #[derive(serde::Serialize)] pub struct Email(String); #[derive(serde::Serialize)] pub struct User { id: u32, username: String, email: Email, } assert_toml_snapshot!(User { id: 42, username: "peter-doe".into(), email: Email("peter@doe.invalid".into()), }, @r###" id = 42 username = 'peter-doe' email = 'peter@doe.invalid' "###); } #[cfg(feature = "json")] #[test] fn test_json_inline() { assert_json_snapshot!(vec!["foo", "bar"], @r###" [ "foo", "bar" ] "###); } #[cfg(feature = "yaml")] #[test] fn test_yaml_inline() { #[derive(serde::Serialize)] pub struct User { id: u32, username: String, email: String, } assert_yaml_snapshot!(User { id: 42, username: "peter-pan".into(), email: "peterpan@wonderland.invalid".into() }, @r###" --- id: 42 username: peter-pan email: peterpan@wonderland.invalid "###); } #[cfg(all(feature = "redactions", feature = "yaml"))] #[test] fn test_yaml_inline_redacted() { #[derive(serde::Serialize)] pub struct User { id: u32, username: String, email: String, } assert_yaml_snapshot!(User { id: 42, username: "peter-pan".into(), email: "peterpan@wonderland.invalid".into() }, { ".id" => "[user-id]" }, @r###" --- id: "[user-id]" username: peter-pan email: peterpan@wonderland.invalid "###); } #[test] fn test_non_basic_plane() { assert_snapshot!("a 😀oeu", @"a 😀oeu"); } #[test] fn test_multiline_with_empty_lines() { assert_snapshot!("# first\nsecond\n third\n\n# alternative", @r###" # first second third # alternative "###); } #[cfg(feature = "json")] #[test] fn test_compact_json() { assert_compact_json_snapshot!((1..30).collect::>(), @"[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29]"); assert_compact_json_snapshot!((1..34).collect::>(), @r###" [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33 ] "###); } #[test] #[should_panic = "Insta does not allow inline snapshot assertions in loops"] fn test_inline_test_in_loop() { for i in 0..10 { assert_snapshot!(i.to_string(), @"0"); } } #[test] fn test_inline_snapshot_whitespace() { assert_snapshot!("\n\nfoo\n\n bar\n\n", @r###" foo bar "###); } insta-1.39.0/tests/test_redaction.rs000064400000000000000000000314571046102023000155650ustar 00000000000000#![cfg(feature = "redactions")] use insta::_macro_support::Selector; #[cfg(feature = "csv")] use insta::assert_csv_snapshot; #[cfg(feature = "json")] use insta::assert_json_snapshot; #[cfg(feature = "ron")] use insta::assert_ron_snapshot; #[cfg(feature = "toml")] use insta::assert_toml_snapshot; #[cfg(feature = "yaml")] use insta::assert_yaml_snapshot; use insta::assert_debug_snapshot; use serde::Serialize; #[test] fn test_selector_parser() { macro_rules! assert_selector_snapshot { ($short:expr, $sel:expr) => { assert_debug_snapshot!($short, Selector::parse($sel).unwrap()); }; } assert_selector_snapshot!("foo_bar", ".foo.bar"); assert_selector_snapshot!("foo_bar_alt", ".foo[\"bar\"]"); assert_selector_snapshot!("foo_bar_full_range", ".foo.bar[]"); assert_selector_snapshot!("foo_bar_range_to", ".foo.bar[:10]"); assert_selector_snapshot!("foo_bar_range_from", ".foo.bar[10:]"); assert_selector_snapshot!("foo_bar_range", ".foo.bar[10:20]"); assert_selector_snapshot!("foo_bar_deep", ".foo.bar.**"); } #[derive(Serialize)] pub struct Email(String); #[derive(Serialize)] pub struct User { id: u32, username: String, email: Email, extra: String, } #[cfg(feature = "yaml")] #[test] fn test_with_random_value() { assert_yaml_snapshot!("user", &User { id: 42, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, { ".id" => "[id]" }); } #[cfg(feature = "yaml")] #[test] fn test_with_random_value_inline_callback() { assert_yaml_snapshot!("user", &User { id: 23, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, { ".id" => insta::dynamic_redaction(|value, path| { similar_asserts::assert_eq!(path.to_string(), ".id"); similar_asserts::assert_eq!(value.as_u64().unwrap(), 23); "[id]" }), }); } #[cfg(feature = "yaml")] #[test] fn test_with_random_value_and_trailing_comma() { assert_yaml_snapshot!("user", &User { id: 11, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, { ".id" => "[id]", }); } #[cfg(feature = "yaml")] #[test] fn test_with_random_value_and_match_comma() { assert_yaml_snapshot!( &User { id: 11, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, match .. { ".id" => "[id]", } ); assert_yaml_snapshot!( &User { id: 11, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, match .. { ".id" => "[id]", }, // comma here ); assert_yaml_snapshot!( &User { id: 11, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, match .. { ".id" => "[id]", }, @r###" --- id: "[id]" username: john_doe email: john@example.com extra: "" "###, // comma here ); } #[cfg(feature = "csv")] #[test] fn test_with_random_value_csv() { assert_csv_snapshot!("user_csv", &User { id: 44, username: "julius_csv".to_string(), email: Email("julius@example.com".to_string()), extra: "".to_string(), }, { ".id" => "[id]" }); } #[cfg(feature = "csv")] #[test] fn test_with_random_value_csv_match() { assert_csv_snapshot!( &User { id: 44, username: "julius_csv".to_string(), email: Email("julius@example.com".to_string()), extra: "".to_string(), }, match .. { ".id" => "[id]", } ); } #[cfg(feature = "ron")] #[test] fn test_with_random_value_ron() { assert_ron_snapshot!("user_ron", &User { id: 53, username: "john_ron".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, { ".id" => "[id]" }); } #[cfg(feature = "ron")] #[test] fn test_with_random_value_ron_match() { assert_ron_snapshot!( &User { id: 53, username: "john_ron".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, match .. { ".id" => "[id]", } ); } #[cfg(feature = "toml")] #[test] fn test_with_random_value_toml() { assert_toml_snapshot!("user_toml", &User { id: 53, username: "john_ron".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, { ".id" => "[id]" }); } #[cfg(feature = "toml")] #[test] fn test_with_random_value_toml_match() { assert_toml_snapshot!( &User { id: 53, username: "john_ron".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }, match .. { ".id" => "[id]", } ); } #[cfg(feature = "json")] #[test] fn test_with_random_value_json() { assert_json_snapshot!("user_json", &User { id: 9999, username: "jason_doe".to_string(), email: Email("jason@example.com".to_string()), extra: "ssn goes here".to_string(), }, { ".id" => "[id]", ".extra" => "[extra]" }); } #[cfg(feature = "json")] #[test] fn test_with_random_value_json_match() { assert_json_snapshot!( &User { id: 9999, username: "jason_doe".to_string(), email: Email("jason@example.com".to_string()), extra: "ssn goes here".to_string(), }, match .. { ".id" => "[id]", ".extra" => "[extra]", } ); } #[cfg(feature = "json")] #[test] fn test_with_random_value_json_settings() { let mut settings = insta::Settings::new(); settings.add_redaction(".id", "[id]"); settings.add_redaction(".extra", "[extra]"); settings.bind(|| { assert_json_snapshot!( "user_json_settings", &User { id: 122, username: "jason_doe".to_string(), email: Email("jason@example.com".to_string()), extra: "ssn goes here".to_string(), } ); }); } #[cfg(feature = "json")] #[test] fn test_with_callbacks() { let mut settings = insta::Settings::new(); settings.add_dynamic_redaction(".id", |value, path| { similar_asserts::assert_eq!(path.to_string(), ".id"); similar_asserts::assert_eq!(value.as_u64().unwrap(), 1234); "[id]" }); settings.bind(|| { assert_json_snapshot!( "user_json_settings_callback", &User { id: 1234, username: "jason_doe".to_string(), email: Email("jason@example.com".to_string()), extra: "extra here".to_string(), } ); }); } #[cfg(feature = "json")] #[test] fn test_with_random_value_json_settings2() { insta::with_settings!({redactions => vec![ (".id", "[id]".into()), (".extra", "[extra]".into()), ]}, { assert_json_snapshot!( &User { id: 975, username: "jason_doe".to_string(), email: Email("jason@example.com".to_string()), extra: "ssn goes here".to_string(), } ); }); } #[cfg(feature = "json")] #[test] fn test_redact_newtype_struct() { #[derive(Serialize)] pub struct UserWrapper(User); let wrapper = UserWrapper(User { id: 42, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }); assert_json_snapshot!(wrapper, { r#".id"# => "[id]" }, @r###" { "id": "[id]", "username": "john_doe", "email": "john@example.com", "extra": "" } "###); } #[cfg(feature = "yaml")] #[test] fn test_redact_newtype_enum() { #[derive(Serialize)] pub enum Role { Admin(User), Visitor { id: String, name: String }, } let visitor = Role::Visitor { id: "my-id".into(), name: "my-name".into(), }; assert_yaml_snapshot!(visitor, { r#".id"# => "[id]", }, @r###" --- Visitor: id: "[id]" name: my-name "###); let admin = Role::Admin(User { id: 42, username: "john_doe".to_string(), email: Email("john@example.com".to_string()), extra: "".to_string(), }); assert_yaml_snapshot!(admin, { r#".id"# => "[id]", }, @r###" --- Admin: id: "[id]" username: john_doe email: john@example.com extra: "" "###); } #[cfg(feature = "json")] #[test] fn test_redact_recursive() { #[derive(Serialize)] pub struct Node { id: u64, next: Option>, } let root = Node { id: 0, next: Some(Box::new(Node { id: 1, next: None })), }; assert_json_snapshot!(root, { ".**.id" => "[id]", }, @r###" { "id": "[id]", "next": { "id": "[id]", "next": null } } "###); } #[cfg(feature = "yaml")] #[test] fn test_struct_array_redaction() { #[derive(Serialize)] pub struct Product { _id: String, product_name: String, } #[derive(Serialize)] pub struct Checkout { _id: String, products: Vec, } let checkout = Checkout { _id: "checkout/1".to_string(), products: vec![ Product { _id: "product/1".to_string(), product_name: "a car".to_string(), }, Product { _id: "product/2".to_string(), product_name: "a boat".to_string(), }, ], }; assert_yaml_snapshot!(vec![checkout], { "[]._id" => "[checkout_id]", "[].products[]._id" => "[product_id]", "[].products[].product_name" => "[product_name]", }); } #[cfg(feature = "yaml")] #[test] fn test_map_key_redaction() { #[derive(Serialize, Hash, PartialEq, PartialOrd, Eq, Ord)] struct Key { bucket: u32, value: u32, } #[derive(Serialize)] struct Foo { hm: std::collections::HashMap, btm: std::collections::BTreeMap<(u32, u32), u32>, } let mut hm = std::collections::HashMap::new(); hm.insert( Key { bucket: 1, value: 0, }, 42, ); let mut btm = std::collections::BTreeMap::new(); btm.insert((0, 0), 23); let foo_value = Foo { hm, btm }; assert_yaml_snapshot!(foo_value, { ".hm.$key.bucket" => "[bucket]", ".btm.$key" => "[key]", }); } #[cfg(feature = "json")] #[test] fn test_ordering() { #[derive(Debug, Serialize)] pub struct User { id: u64, username: String, flags: std::collections::HashSet, } let mut settings = insta::Settings::new(); settings.add_redaction(".id", "[id]"); settings.sort_selector(".flags"); settings.bind(|| { assert_json_snapshot!( "user_json_flags", &User { id: 122, username: "jason_doe".to_string(), flags: vec!["zzz".into(), "foo".into(), "aha".into(), "is_admin".into()] .into_iter() .collect(), } ); }); } #[cfg(feature = "json")] #[test] fn test_ordering_newtype_set() { #[derive(Debug, Serialize)] pub struct MySet(std::collections::HashSet); #[derive(Debug, Serialize)] pub struct User { id: u64, username: String, flags: MySet, } assert_json_snapshot!( "user_json_flags_alt", &User { id: 122, username: "jason_doe".to_string(), flags: MySet(vec!["zzz".into(), "foo".into(), "aha".into(), "is_admin".into()] .into_iter() .collect()), }, { "." => insta::sorted_redaction(), ".flags" => insta::sorted_redaction() } ); } #[cfg(feature = "json")] #[test] fn test_rounded_redaction() { #[derive(Debug, Serialize)] pub struct MyPoint { x: f64, y: f64, } assert_json_snapshot!( "rounded_redaction", &MyPoint { x: 1.0 / 3.0, y: 6.0 / 3.0, }, { ".x" => insta::rounded_redaction(4), ".y" => insta::rounded_redaction(4), } ); } insta-1.39.0/tests/test_settings.rs000064400000000000000000000072271046102023000154530ustar 00000000000000#[cfg(feature = "yaml")] use insta::assert_yaml_snapshot; use similar_asserts::assert_eq; use insta::{assert_debug_snapshot, with_settings, Settings}; #[cfg(feature = "yaml")] #[test] fn test_simple() { let mut map = std::collections::HashMap::new(); map.insert("a", "first value"); map.insert("b", "second value"); map.insert("c", "third value"); map.insert("d", "fourth value"); let mut settings = insta::Settings::new(); settings.set_sort_maps(true); settings.bind(|| { assert_yaml_snapshot!(&map, @r###" --- a: first value b: second value c: third value d: fourth value "###); }); } #[cfg(feature = "yaml")] #[test] fn test_bound_to_scope() { let mut map = std::collections::HashMap::new(); map.insert("a", "first value"); map.insert("b", "second value"); map.insert("c", "third value"); map.insert("d", "fourth value"); { let mut settings = Settings::new(); settings.set_sort_maps(true); let _guard = settings.bind_to_scope(); assert_yaml_snapshot!(&map, @r###" --- a: first value b: second value c: third value d: fourth value "###); } assert!(!Settings::clone_current().sort_maps()); } #[cfg(feature = "yaml")] #[test] fn test_settings_macro() { let mut map = std::collections::HashMap::new(); map.insert("a", "first value"); map.insert("b", "second value"); map.insert("c", "third value"); map.insert("d", "fourth value"); with_settings!({sort_maps => true}, { insta::assert_yaml_snapshot!(&map, @r###" --- a: first value b: second value c: third value d: fourth value "###); }); } #[test] fn test_snapshot_path() { with_settings!({snapshot_path => "snapshots2"}, { assert_debug_snapshot!(vec![1, 2, 3]); }); } #[test] fn test_snapshot_no_module_prepending() { with_settings!({prepend_module_to_snapshot => false}, { assert_debug_snapshot!(vec![1, 2, 3]); }); } #[test] fn test_snapshot_with_description() { with_settings!({description => "The snapshot is three integers"}, { assert_debug_snapshot!(vec![1, 2, 3]) }); } #[test] fn test_snapshot_with_description_and_raw_info() { use insta::internals::Content; let raw_info = Content::Map(vec![ ( Content::from("env"), Content::Seq(vec![ Content::from("ENVIRONMENT"), Content::from("production"), ]), ), ( Content::from("cmdline"), Content::Seq(vec![Content::from("my-tool"), Content::from("run")]), ), ]); with_settings!({description => "The snapshot is four integers", raw_info => &raw_info}, { assert_debug_snapshot!(vec![1, 2, 3, 4]) }); } #[cfg(feature = "serde")] #[test] fn test_snapshot_with_description_and_info() { #[derive(serde::Serialize)] pub struct Info { env: std::collections::HashMap<&'static str, &'static str>, cmdline: Vec<&'static str>, } let info = Info { env: From::from([("ENVIRONMENT", "production")]), cmdline: vec!["my-tool", "run"], }; with_settings!({description => "The snapshot is four integers", info => &info}, { assert_debug_snapshot!(vec![1, 2, 3, 4]) }); } #[test] fn test_with_settings_inherit() { with_settings!({sort_maps => true}, { with_settings!({description => "aha"}, { let settings = Settings::clone_current(); assert!(settings.sort_maps()); assert_eq!(settings.description(), Some("aha")); }); }); } insta-1.39.0/tests/test_suffixes.rs000064400000000000000000000003471046102023000154430ustar 00000000000000#[cfg(feature = "json")] #[test] fn test_basic_suffixes() { for value in [1, 2, 3] { insta::with_settings!({snapshot_suffix => value.to_string()}, { insta::assert_json_snapshot!(&value); }); } }