snafu-derive-0.7.1/.cargo_vcs_info.json0000644000000001520000000000100134270ustar { "git": { "sha1": "6159ea9f3a3e189f1faa2f63904402f04dea54ec" }, "path_in_vcs": "snafu-derive" }snafu-derive-0.7.1/.gitignore000064400000000000000000000000360072674642500142400ustar 00000000000000/target **/*.rs.bk Cargo.lock snafu-derive-0.7.1/Cargo.toml0000644000000017760000000000100114420ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "snafu-derive" version = "0.7.1" authors = ["Jake Goulding "] description = "An ergonomic error handling library" documentation = "https://docs.rs/snafu" license = "MIT OR Apache-2.0" repository = "https://github.com/shepmaster/snafu" [lib] proc-macro = true [dependencies.heck] version = "0.4" [dependencies.proc-macro2] version = "1.0" [dependencies.quote] version = "1.0" [dependencies.syn] version = "1.0" features = ["full"] [features] rust_1_46 = [] unstable-backtraces-impl-std = [] snafu-derive-0.7.1/Cargo.toml.orig000064400000000000000000000007540072674642500151460ustar 00000000000000[package] name = "snafu-derive" version = "0.7.1" authors = ["Jake Goulding "] edition = "2018" description = "An ergonomic error handling library" documentation = "https://docs.rs/snafu" repository = "https://github.com/shepmaster/snafu" license = "MIT OR Apache-2.0" [features] rust_1_46 = [] unstable-backtraces-impl-std = [] [lib] proc-macro = true [dependencies] syn = { version = "1.0", features = ["full"] } quote = "1.0" proc-macro2 = "1.0" heck = "0.4" snafu-derive-0.7.1/LICENSE-APACHE000064400000000000000000000251220072674642500141770ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2019- Jake Goulding Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. snafu-derive-0.7.1/LICENSE-MIT000064400000000000000000000020420072674642500137030ustar 00000000000000Copyright (c) 2019- Jake Goulding Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. snafu-derive-0.7.1/src/lib.rs000064400000000000000000001675320072674642500141720ustar 00000000000000#![recursion_limit = "128"] // https://github.com/rust-lang/rust/issues/62059 extern crate proc_macro; use crate::parse::attributes_from_syn; use proc_macro::TokenStream; use quote::quote; use std::collections::{BTreeSet, VecDeque}; use std::fmt; mod parse; mod shared; // The snafu crate re-exports this and adds useful documentation. #[proc_macro_derive(Snafu, attributes(snafu))] pub fn snafu_derive(input: TokenStream) -> TokenStream { let ast = syn::parse(input).expect("Could not parse type to derive Error for"); impl_snafu_macro(ast) } type MultiSynResult = std::result::Result>; /// Some arbitrary tokens we treat as a black box type UserInput = Box; enum ModuleName { Default, Custom(syn::Ident), } enum SnafuInfo { Enum(EnumInfo), NamedStruct(NamedStructInfo), TupleStruct(TupleStructInfo), } struct EnumInfo { crate_root: UserInput, name: syn::Ident, generics: syn::Generics, variants: Vec, default_visibility: Option, default_suffix: SuffixKind, module: Option, } struct FieldContainer { name: syn::Ident, backtrace_field: Option, implicit_fields: Vec, selector_kind: ContextSelectorKind, display_format: Option, doc_comment: Option, visibility: Option, module: Option, } enum SuffixKind { Default, None, Some(syn::Ident), } impl SuffixKind { fn resolve_with_default<'a>(&'a self, def: &'a Self) -> &'a Self { use SuffixKind::*; match self { Default => def, None => self, Some(_) => self, } } } enum ContextSelectorKind { Context { suffix: SuffixKind, source_field: Option, user_fields: Vec, }, Whatever { source_field: Option, message_field: Field, }, NoContext { source_field: SourceField, }, } impl ContextSelectorKind { fn is_whatever(&self) -> bool { match self { ContextSelectorKind::Whatever { .. } => true, _ => false, } } fn user_fields(&self) -> &[Field] { match self { ContextSelectorKind::Context { user_fields, .. } => user_fields, ContextSelectorKind::Whatever { .. } => &[], ContextSelectorKind::NoContext { .. } => &[], } } fn source_field(&self) -> Option<&SourceField> { match self { ContextSelectorKind::Context { source_field, .. } => source_field.as_ref(), ContextSelectorKind::Whatever { source_field, .. } => source_field.as_ref(), ContextSelectorKind::NoContext { source_field } => Some(source_field), } } fn message_field(&self) -> Option<&Field> { match self { ContextSelectorKind::Context { .. } => None, ContextSelectorKind::Whatever { message_field, .. } => Some(message_field), ContextSelectorKind::NoContext { .. } => None, } } } struct NamedStructInfo { crate_root: UserInput, field_container: FieldContainer, generics: syn::Generics, } struct TupleStructInfo { crate_root: UserInput, name: syn::Ident, generics: syn::Generics, transformation: Transformation, } #[derive(Clone)] pub(crate) struct Field { name: syn::Ident, ty: syn::Type, original: syn::Field, } impl Field { fn name(&self) -> &syn::Ident { &self.name } } struct SourceField { name: syn::Ident, transformation: Transformation, backtrace_delegate: bool, } impl SourceField { fn name(&self) -> &syn::Ident { &self.name } } enum Transformation { None { ty: syn::Type }, Transform { ty: syn::Type, expr: syn::Expr }, } impl Transformation { fn ty(&self) -> &syn::Type { match self { Transformation::None { ty } => ty, Transformation::Transform { ty, .. } => ty, } } fn transformation(&self) -> proc_macro2::TokenStream { match self { Transformation::None { .. } => quote! { |v| v }, Transformation::Transform { expr, .. } => quote! { #expr }, } } } /// SyntaxErrors is a convenience wrapper for a list of syntax errors discovered while parsing /// something that derives Snafu. It makes it easier for developers to add and return syntax /// errors while walking through the parse tree. #[derive(Debug, Default)] struct SyntaxErrors { inner: Vec, } impl SyntaxErrors { /// Start a set of errors that all share the same location fn scoped(&mut self, scope: ErrorLocation) -> SyntaxErrorsScoped<'_> { SyntaxErrorsScoped { errors: self, scope, } } /// Adds a new syntax error. The description will be used in the /// compile error pointing to the tokens. fn add(&mut self, tokens: impl quote::ToTokens, description: impl fmt::Display) { self.inner .push(syn::Error::new_spanned(tokens, description)); } /// Adds the given list of errors. fn extend(&mut self, errors: impl IntoIterator) { self.inner.extend(errors); } #[allow(dead_code)] /// Returns the number of errors that have been added. fn len(&self) -> usize { self.inner.len() } /// Consume the SyntaxErrors, returning Ok if there were no syntax errors added, or Err(list) /// if there were syntax errors. fn finish(self) -> MultiSynResult<()> { if self.inner.is_empty() { Ok(()) } else { Err(self.inner) } } /// Consume the SyntaxErrors and a Result, returning the success /// value if neither have errors, otherwise combining the errors. fn absorb(mut self, res: MultiSynResult) -> MultiSynResult { match res { Ok(v) => self.finish().map(|()| v), Err(e) => { self.inner.extend(e); Err(self.inner) } } } } #[derive(Debug, Copy, Clone)] enum ErrorLocation { OnEnum, OnVariant, InVariant, OnField, OnNamedStruct, InNamedStruct, OnTupleStruct, } impl fmt::Display for ErrorLocation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use crate::ErrorLocation::*; match self { OnEnum => "on an enum".fmt(f), OnVariant => "on an enum variant".fmt(f), InVariant => "within an enum variant".fmt(f), OnField => "on a field".fmt(f), OnNamedStruct => "on a named struct".fmt(f), InNamedStruct => "within a named struct".fmt(f), OnTupleStruct => "on a tuple struct".fmt(f), } } } trait ErrorForLocation { fn for_location(&self, location: ErrorLocation) -> String; } struct SyntaxErrorsScoped<'a> { errors: &'a mut SyntaxErrors, scope: ErrorLocation, } impl SyntaxErrorsScoped<'_> { /// Adds a new syntax error. The description will be used in the /// compile error pointing to the tokens. fn add(&mut self, tokens: impl quote::ToTokens, description: impl ErrorForLocation) { let description = description.for_location(self.scope); self.errors.add(tokens, description) } } /// Helper structure to handle cases where an attribute was used on an /// element where it's not valid. #[derive(Debug)] struct OnlyValidOn { /// The name of the attribute that was misused. attribute: &'static str, /// A description of where that attribute is valid. valid_on: &'static str, } impl ErrorForLocation for OnlyValidOn { fn for_location(&self, location: ErrorLocation) -> String { format!( "`{}` attribute is only valid on {}, not {}", self.attribute, self.valid_on, location, ) } } /// Helper structure to handle cases where a specific attribute value /// was used on an field where it's not valid. #[derive(Debug)] struct WrongField { /// The name of the attribute that was misused. attribute: &'static str, /// The name of the field where that attribute is valid. valid_field: &'static str, } impl ErrorForLocation for WrongField { fn for_location(&self, _location: ErrorLocation) -> String { format!( r#"`{}` attribute is only valid on a field named "{}", not on other fields"#, self.attribute, self.valid_field, ) } } /// Helper structure to handle cases where two incompatible attributes /// were specified on the same element. #[derive(Debug)] struct IncompatibleAttributes(&'static [&'static str]); impl ErrorForLocation for IncompatibleAttributes { fn for_location(&self, location: ErrorLocation) -> String { let attrs_string = self .0 .iter() .map(|attr| format!("`{}`", attr)) .collect::>() .join(", "); format!( "Incompatible attributes [{}] specified {}", attrs_string, location, ) } } /// Helper structure to handle cases where an attribute was /// incorrectly used multiple times on the same element. #[derive(Debug)] struct DuplicateAttribute { attribute: &'static str, location: ErrorLocation, } impl fmt::Display for DuplicateAttribute { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Multiple `{}` attributes are not supported {}", self.attribute, self.location, ) } } /// AtMostOne is a helper to track attributes seen during parsing. If more than one item is added, /// it's added to a list of DuplicateAttribute errors, using the given `name` and `location` as /// descriptors. /// /// When done parsing a structure, call `finish` to get first attribute found, if any, and the list /// of errors, or call `finish_with_location` to get the attribute and the token tree where it was /// found, which can be useful for error reporting. #[derive(Debug)] struct AtMostOne where U: quote::ToTokens, { name: &'static str, location: ErrorLocation, // We store all the values we've seen to allow for `iter`, which helps the `AtMostOne` be // useful for additional manual error checking. values: VecDeque<(T, U)>, errors: SyntaxErrors, } impl AtMostOne where U: quote::ToTokens + Clone, { /// Creates an AtMostOne to track an attribute with the given /// `name` on the given `location` (often referencing a parent /// element). fn new(name: &'static str, location: ErrorLocation) -> Self { Self { name, location, values: VecDeque::new(), errors: SyntaxErrors::default(), } } /// Add an occurence of the attribute found at the given token tree `tokens`. fn add(&mut self, item: T, tokens: U) { if !self.values.is_empty() { self.errors.add( tokens.clone(), DuplicateAttribute { attribute: self.name, location: self.location, }, ); } self.values.push_back((item, tokens)); } #[allow(dead_code)] /// Returns the number of elements that have been added. fn len(&self) -> usize { self.values.len() } /// Returns true if no elements have been added, otherwise false. #[allow(dead_code)] fn is_empty(&self) -> bool { self.values.is_empty() } /// Returns an iterator over all values that have been added. /// /// This can help with additional manual error checks beyond the duplication checks that /// `AtMostOne` handles for you. fn iter(&self) -> std::collections::vec_deque::Iter<(T, U)> { self.values.iter() } /// Consumes the AtMostOne, returning the first item added, if any, and the list of errors /// representing any items added beyond the first. fn finish(self) -> (Option, Vec) { let (value, errors) = self.finish_with_location(); (value.map(|(val, _location)| val), errors) } /// Like `finish` but also returns the location of the first item added. Useful when you have /// to do additional, manual error checking on the first item added, and you'd like to report /// an accurate location for it in case of errors. fn finish_with_location(mut self) -> (Option<(T, U)>, Vec) { let errors = match self.errors.finish() { Ok(()) => Vec::new(), Err(vec) => vec, }; (self.values.pop_front(), errors) } } fn impl_snafu_macro(ty: syn::DeriveInput) -> TokenStream { match parse_snafu_information(ty) { Ok(info) => info.into(), Err(e) => to_compile_errors(e).into(), } } fn to_compile_errors(errors: Vec) -> proc_macro2::TokenStream { let compile_errors = errors.iter().map(syn::Error::to_compile_error); quote! { #(#compile_errors)* } } fn parse_snafu_information(ty: syn::DeriveInput) -> MultiSynResult { use syn::spanned::Spanned; use syn::Data; let span = ty.span(); let syn::DeriveInput { ident, generics, data, attrs, .. } = ty; match data { Data::Enum(enum_) => parse_snafu_enum(enum_, ident, generics, attrs).map(SnafuInfo::Enum), Data::Struct(struct_) => parse_snafu_struct(struct_, ident, generics, attrs, span), _ => Err(vec![syn::Error::new( span, "Can only derive `Snafu` for an enum or a newtype", )]), } } const ATTR_DISPLAY: OnlyValidOn = OnlyValidOn { attribute: "display", valid_on: "enum variants or structs with named fields", }; const ATTR_SOURCE: OnlyValidOn = OnlyValidOn { attribute: "source", valid_on: "enum variant or struct fields with a name", }; const ATTR_SOURCE_BOOL: OnlyValidOn = OnlyValidOn { attribute: "source(bool)", valid_on: "enum variant or struct fields with a name", }; const ATTR_SOURCE_FALSE: WrongField = WrongField { attribute: "source(false)", valid_field: "source", }; const ATTR_SOURCE_FROM: OnlyValidOn = OnlyValidOn { attribute: "source(from)", valid_on: "enum variant or struct fields with a name", }; const ATTR_BACKTRACE: OnlyValidOn = OnlyValidOn { attribute: "backtrace", valid_on: "enum variant or struct fields with a name", }; const ATTR_BACKTRACE_FALSE: WrongField = WrongField { attribute: "backtrace(false)", valid_field: "backtrace", }; const ATTR_IMPLICIT: OnlyValidOn = OnlyValidOn { attribute: "implicit", valid_on: "enum variant or struct fields with a name", }; const ATTR_IMPLICIT_FALSE: WrongField = WrongField { attribute: "implicit(false)", valid_field: "location", }; const ATTR_VISIBILITY: OnlyValidOn = OnlyValidOn { attribute: "visibility", valid_on: "an enum, enum variants, or a struct with named fields", }; const ATTR_MODULE: OnlyValidOn = OnlyValidOn { attribute: "module", valid_on: "an enum or structs with named fields", }; const ATTR_CONTEXT: OnlyValidOn = OnlyValidOn { attribute: "context", valid_on: "enum variants or structs with named fields", }; const ATTR_CONTEXT_FLAG: OnlyValidOn = OnlyValidOn { attribute: "context(bool)", valid_on: "enum variants or structs with named fields", }; const ATTR_WHATEVER: OnlyValidOn = OnlyValidOn { attribute: "whatever", valid_on: "enum variants or structs with named fields", }; const ATTR_CRATE_ROOT: OnlyValidOn = OnlyValidOn { attribute: "crate_root", valid_on: "an enum or a struct", }; const SOURCE_BOOL_FROM_INCOMPATIBLE: IncompatibleAttributes = IncompatibleAttributes(&["source(false)", "source(from)"]); fn parse_snafu_enum( enum_: syn::DataEnum, name: syn::Ident, generics: syn::Generics, attrs: Vec, ) -> MultiSynResult { use syn::spanned::Spanned; use syn::Fields; let mut errors = SyntaxErrors::default(); let mut modules = AtMostOne::new("module", ErrorLocation::OnEnum); let mut default_visibilities = AtMostOne::new("visibility", ErrorLocation::OnEnum); let mut default_suffixes = AtMostOne::new("context(suffix)", ErrorLocation::OnEnum); let mut crate_roots = AtMostOne::new("crate_root", ErrorLocation::OnEnum); let mut enum_errors = errors.scoped(ErrorLocation::OnEnum); for attr in attributes_from_syn(attrs)? { use SnafuAttribute as Att; match attr { Att::Visibility(tokens, v) => default_visibilities.add(v, tokens), Att::Display(tokens, ..) => enum_errors.add(tokens, ATTR_DISPLAY), Att::Source(tokens, ss) => { for s in ss { match s { Source::Flag(..) => enum_errors.add(tokens.clone(), ATTR_SOURCE_BOOL), Source::From(..) => enum_errors.add(tokens.clone(), ATTR_SOURCE_FROM), } } } Att::CrateRoot(tokens, root) => crate_roots.add(root, tokens), Att::Context(tokens, c) => match c { Context::Suffix(s) => default_suffixes.add(s, tokens), Context::Flag(_) => enum_errors.add(tokens, ATTR_CONTEXT_FLAG), }, Att::Module(tokens, v) => modules.add(v, tokens), Att::Backtrace(tokens, ..) => enum_errors.add(tokens, ATTR_BACKTRACE), Att::Implicit(tokens, ..) => enum_errors.add(tokens, ATTR_IMPLICIT), Att::Whatever(tokens) => enum_errors.add(tokens, ATTR_WHATEVER), Att::DocComment(..) => { /* Just a regular doc comment. */ } } } let (module, errs) = modules.finish(); errors.extend(errs); let (default_visibility, errs) = default_visibilities.finish(); errors.extend(errs); let (maybe_default_suffix, errs) = default_suffixes.finish(); let default_suffix = maybe_default_suffix.unwrap_or(SuffixKind::Default); errors.extend(errs); let (maybe_crate_root, errs) = crate_roots.finish(); let crate_root = maybe_crate_root.unwrap_or_else(default_crate_root); errors.extend(errs); let variants: sponge::AllErrors<_, _> = enum_ .variants .into_iter() .map(|variant| { let fields = match variant.fields { Fields::Named(f) => f.named.into_iter().collect(), Fields::Unnamed(_) => { return Err(vec![syn::Error::new( variant.fields.span(), "Can only derive `Snafu` for enums with struct-like and unit enum variants", )]); } Fields::Unit => vec![], }; let name = variant.ident; let span = name.span(); let attrs = attributes_from_syn(variant.attrs)?; field_container( name, span, attrs, fields, &mut errors, ErrorLocation::OnVariant, ErrorLocation::InVariant, ) }) .collect(); let variants = errors.absorb(variants.into_result())?; Ok(EnumInfo { crate_root, name, generics, variants, default_visibility, default_suffix, module, }) } fn field_container( name: syn::Ident, variant_span: proc_macro2::Span, attrs: Vec, fields: Vec, errors: &mut SyntaxErrors, outer_error_location: ErrorLocation, inner_error_location: ErrorLocation, ) -> MultiSynResult { use quote::ToTokens; use syn::spanned::Spanned; let mut outer_errors = errors.scoped(outer_error_location); let mut modules = AtMostOne::new("module", outer_error_location); let mut display_formats = AtMostOne::new("display", outer_error_location); let mut visibilities = AtMostOne::new("visibility", outer_error_location); let mut contexts = AtMostOne::new("context", outer_error_location); let mut whatevers = AtMostOne::new("whatever", outer_error_location); let mut doc_comment = DocComment::default(); let mut reached_end_of_doc_comment = false; for attr in attrs { use SnafuAttribute as Att; match attr { Att::Module(tokens, n) => modules.add(n, tokens), Att::Display(tokens, d) => display_formats.add(d, tokens), Att::Visibility(tokens, v) => visibilities.add(v, tokens), Att::Context(tokens, c) => contexts.add(c, tokens), Att::Whatever(tokens) => whatevers.add((), tokens), Att::Source(tokens, ..) => outer_errors.add(tokens, ATTR_SOURCE), Att::Backtrace(tokens, ..) => outer_errors.add(tokens, ATTR_BACKTRACE), Att::Implicit(tokens, ..) => outer_errors.add(tokens, ATTR_IMPLICIT), Att::CrateRoot(tokens, ..) => outer_errors.add(tokens, ATTR_CRATE_ROOT), Att::DocComment(_tts, doc_comment_line) => { // We join all the doc comment attributes with a space, // but end once the summary of the doc comment is // complete, which is indicated by an empty line. if !reached_end_of_doc_comment { let trimmed = doc_comment_line.trim(); if trimmed.is_empty() { reached_end_of_doc_comment = true; } else { doc_comment.push_str(trimmed); } } } } } let mut user_fields = Vec::new(); let mut source_fields = AtMostOne::new("source", inner_error_location); let mut backtrace_fields = AtMostOne::new("backtrace", inner_error_location); let mut implicit_fields = Vec::new(); for syn_field in fields { let original = syn_field.clone(); let span = syn_field.span(); let name = syn_field .ident .as_ref() .ok_or_else(|| vec![syn::Error::new(span, "Must have a named field")])?; let field = Field { name: name.clone(), ty: syn_field.ty.clone(), original, }; // Check whether we have multiple source/backtrace attributes on this field. // We can't just add to source_fields/backtrace_fields from inside the attribute // loop because source and backtrace are connected and require a bit of special // logic after the attribute loop. For example, we need to know whether there's a // source transformation before we record a source field, but it might be on a // later attribute. We use the data field of `source_attrs` to track any // transformations in case it was a `source(from(...))`, but for backtraces we // don't need any more data. let mut source_attrs = AtMostOne::new("source", ErrorLocation::OnField); let mut backtrace_attrs = AtMostOne::new("backtrace", ErrorLocation::OnField); let mut implicit_attrs = AtMostOne::new("implicit", ErrorLocation::OnField); // Keep track of the negative markers so we can check for inconsistencies and // exclude fields even if they have the "source" or "backtrace" name. let mut source_opt_out = false; let mut backtrace_opt_out = false; let mut implicit_opt_out = false; let mut field_errors = errors.scoped(ErrorLocation::OnField); for attr in attributes_from_syn(syn_field.attrs.clone())? { use SnafuAttribute as Att; match attr { Att::Source(tokens, ss) => { for s in ss { match s { Source::Flag(v) => { // If we've seen a `source(from)` then there will be a // `Some` value in `source_attrs`. let seen_source_from = source_attrs .iter() .map(|(val, _location)| val) .any(Option::is_some); if !v && seen_source_from { field_errors.add(tokens.clone(), SOURCE_BOOL_FROM_INCOMPATIBLE); } if v { source_attrs.add(None, tokens.clone()); } else if name == "source" { source_opt_out = true; } else { field_errors.add(tokens.clone(), ATTR_SOURCE_FALSE); } } Source::From(t, e) => { if source_opt_out { field_errors.add(tokens.clone(), SOURCE_BOOL_FROM_INCOMPATIBLE); } source_attrs.add(Some((t, e)), tokens.clone()); } } } } Att::Backtrace(tokens, v) => { if v { backtrace_attrs.add((), tokens); } else if name == "backtrace" { backtrace_opt_out = true; } else { field_errors.add(tokens, ATTR_BACKTRACE_FALSE); } } Att::Implicit(tokens, v) => { if v { implicit_attrs.add((), tokens); } else if name == "location" { implicit_opt_out = true; } else { field_errors.add(tokens, ATTR_IMPLICIT_FALSE); } } Att::Module(tokens, ..) => field_errors.add(tokens, ATTR_MODULE), Att::Visibility(tokens, ..) => field_errors.add(tokens, ATTR_VISIBILITY), Att::Display(tokens, ..) => field_errors.add(tokens, ATTR_DISPLAY), Att::Context(tokens, ..) => field_errors.add(tokens, ATTR_CONTEXT), Att::Whatever(tokens) => field_errors.add(tokens, ATTR_WHATEVER), Att::CrateRoot(tokens, ..) => field_errors.add(tokens, ATTR_CRATE_ROOT), Att::DocComment(..) => { /* Just a regular doc comment. */ } } } // Add errors for any duplicated attributes on this field. let (source_attr, errs) = source_attrs.finish_with_location(); errors.extend(errs); let (backtrace_attr, errs) = backtrace_attrs.finish_with_location(); errors.extend(errs); let (implicit_attr, errs) = implicit_attrs.finish(); errors.extend(errs); let source_attr = source_attr.or_else(|| { if field.name == "source" && !source_opt_out { Some((None, syn_field.clone().into_token_stream())) } else { None } }); let backtrace_attr = backtrace_attr.or_else(|| { if field.name == "backtrace" && !backtrace_opt_out { Some(((), syn_field.clone().into_token_stream())) } else { None } }); let implicit_attr = implicit_attr.is_some() || (field.name == "location" && !implicit_opt_out); if let Some((maybe_transformation, location)) = source_attr { let Field { name, ty, .. } = field; let transformation = maybe_transformation .map(|(ty, expr)| Transformation::Transform { ty, expr }) .unwrap_or_else(|| Transformation::None { ty }); source_fields.add( SourceField { name, transformation, // Specifying `backtrace` on a source field is how you request // delegation of the backtrace to the source error type. backtrace_delegate: backtrace_attr.is_some(), }, location, ); } else if let Some((_, location)) = backtrace_attr { backtrace_fields.add(field, location); } else if implicit_attr { implicit_fields.push(field); } else { user_fields.push(field); } } let (source, errs) = source_fields.finish_with_location(); errors.extend(errs); let (backtrace, errs) = backtrace_fields.finish_with_location(); errors.extend(errs); match (&source, &backtrace) { (Some(source), Some(backtrace)) if source.0.backtrace_delegate => { let source_location = source.1.clone(); let backtrace_location = backtrace.1.clone(); errors.add( source_location, "Cannot have `backtrace` field and `backtrace` attribute on a source field in the same variant", ); errors.add( backtrace_location, "Cannot have `backtrace` field and `backtrace` attribute on a source field in the same variant", ); } _ => {} // no conflict } let (module, errs) = modules.finish(); errors.extend(errs); let (display_format, errs) = display_formats.finish(); errors.extend(errs); let (visibility, errs) = visibilities.finish(); errors.extend(errs); let (is_context, errs) = contexts.finish_with_location(); let is_context = is_context.map(|(c, tt)| (c.into_enabled(), tt)); errors.extend(errs); let (is_whatever, errs) = whatevers.finish_with_location(); errors.extend(errs); let source_field = source.map(|(val, _tts)| val); let selector_kind = match (is_context, is_whatever) { (Some(((true, _), c_tt)), Some(((), o_tt))) => { let txt = "Cannot be both a `context` and `whatever` error"; return Err(vec![ syn::Error::new_spanned(c_tt, txt), syn::Error::new_spanned(o_tt, txt), ]); } (Some(((true, suffix), _)), None) => ContextSelectorKind::Context { suffix, source_field, user_fields, }, (None, None) => ContextSelectorKind::Context { suffix: SuffixKind::Default, source_field, user_fields, }, (Some(((false, _), _)), Some(_)) | (None, Some(_)) => { let mut messages = AtMostOne::new("message", outer_error_location); for f in user_fields { if f.name == "message" { let l = f.original.clone(); messages.add(f, l); } else { errors.add( f.original, "Whatever selectors must not have context fields", ); // todo: phrasing? } } let (message_field, errs) = messages.finish(); errors.extend(errs); let message_field = message_field.ok_or_else(|| { vec![syn::Error::new( variant_span, "Whatever selectors must have a message field", )] })?; ContextSelectorKind::Whatever { source_field, message_field, } } (Some(((false, _), _)), None) => { errors.extend(user_fields.into_iter().map(|Field { original, .. }| { syn::Error::new_spanned( original, "Context selectors without context must not have context fields", ) })); let source_field = source_field.ok_or_else(|| { vec![syn::Error::new( variant_span, "Context selectors without context must have a source field", )] })?; ContextSelectorKind::NoContext { source_field } } }; Ok(FieldContainer { name, backtrace_field: backtrace.map(|(val, _tts)| val), implicit_fields, selector_kind, display_format, doc_comment: doc_comment.finish(), visibility, module, }) } fn parse_snafu_struct( struct_: syn::DataStruct, name: syn::Ident, generics: syn::Generics, attrs: Vec, span: proc_macro2::Span, ) -> MultiSynResult { use syn::Fields; match struct_.fields { Fields::Named(f) => { let f = f.named.into_iter().collect(); parse_snafu_named_struct(f, name, generics, attrs, span).map(SnafuInfo::NamedStruct) } Fields::Unnamed(f) => { parse_snafu_tuple_struct(f, name, generics, attrs, span).map(SnafuInfo::TupleStruct) } Fields::Unit => parse_snafu_named_struct(vec![], name, generics, attrs, span) .map(SnafuInfo::NamedStruct), } } fn parse_snafu_named_struct( fields: Vec, name: syn::Ident, generics: syn::Generics, attrs: Vec, span: proc_macro2::Span, ) -> MultiSynResult { let mut errors = SyntaxErrors::default(); let attrs = attributes_from_syn(attrs)?; let mut crate_roots = AtMostOne::new("crate_root", ErrorLocation::OnNamedStruct); let attrs = attrs .into_iter() .flat_map(|attr| match attr { SnafuAttribute::CrateRoot(tokens, root) => { crate_roots.add(root, tokens); None } other => Some(other), }) .collect(); let field_container = field_container( name, span, attrs, fields, &mut errors, ErrorLocation::OnNamedStruct, ErrorLocation::InNamedStruct, )?; let (maybe_crate_root, errs) = crate_roots.finish(); let crate_root = maybe_crate_root.unwrap_or_else(default_crate_root); errors.extend(errs); errors.finish()?; Ok(NamedStructInfo { crate_root, field_container, generics, }) } fn parse_snafu_tuple_struct( mut fields: syn::FieldsUnnamed, name: syn::Ident, generics: syn::Generics, attrs: Vec, span: proc_macro2::Span, ) -> MultiSynResult { let mut transformations = AtMostOne::new("source(from)", ErrorLocation::OnTupleStruct); let mut crate_roots = AtMostOne::new("crate_root", ErrorLocation::OnTupleStruct); let mut errors = SyntaxErrors::default(); let mut struct_errors = errors.scoped(ErrorLocation::OnTupleStruct); for attr in attributes_from_syn(attrs)? { use SnafuAttribute as Att; match attr { Att::Module(tokens, ..) => struct_errors.add(tokens, ATTR_MODULE), Att::Display(tokens, ..) => struct_errors.add(tokens, ATTR_DISPLAY), Att::Visibility(tokens, ..) => struct_errors.add(tokens, ATTR_VISIBILITY), Att::Source(tokens, ss) => { for s in ss { match s { Source::Flag(..) => struct_errors.add(tokens.clone(), ATTR_SOURCE_BOOL), Source::From(t, e) => transformations.add((t, e), tokens.clone()), } } } Att::Backtrace(tokens, ..) => struct_errors.add(tokens, ATTR_BACKTRACE), Att::Implicit(tokens, ..) => struct_errors.add(tokens, ATTR_IMPLICIT), Att::Context(tokens, ..) => struct_errors.add(tokens, ATTR_CONTEXT), Att::Whatever(tokens) => struct_errors.add(tokens, ATTR_CONTEXT), Att::CrateRoot(tokens, root) => crate_roots.add(root, tokens), Att::DocComment(..) => { /* Just a regular doc comment. */ } } } fn one_field_error(span: proc_macro2::Span) -> syn::Error { syn::Error::new( span, "Can only derive `Snafu` for tuple structs with exactly one field", ) } let inner = fields .unnamed .pop() .ok_or_else(|| vec![one_field_error(span)])?; if !fields.unnamed.is_empty() { return Err(vec![one_field_error(span)]); } let (maybe_transformation, errs) = transformations.finish(); let transformation = maybe_transformation .map(|(ty, expr)| Transformation::Transform { ty, expr }) .unwrap_or_else(|| Transformation::None { ty: inner.into_value().ty, }); errors.extend(errs); let (maybe_crate_root, errs) = crate_roots.finish(); let crate_root = maybe_crate_root.unwrap_or_else(default_crate_root); errors.extend(errs); errors.finish()?; Ok(TupleStructInfo { crate_root, name, generics, transformation, }) } enum Context { Flag(bool), Suffix(SuffixKind), } impl Context { fn into_enabled(self) -> (bool, SuffixKind) { match self { Context::Flag(b) => (b, SuffixKind::None), Context::Suffix(suffix) => (true, suffix), } } } enum Source { Flag(bool), From(syn::Type, syn::Expr), } struct Display { exprs: Vec, shorthand_names: BTreeSet, assigned_names: BTreeSet, } #[derive(Default)] struct DocComment { content: String, shorthand_names: BTreeSet, } impl DocComment { fn push_str(&mut self, s: &str) { if !self.content.is_empty() { self.content.push_str(" "); } self.content.push_str(s); } fn finish(mut self) -> Option { if self.content.is_empty() { None } else { self.shorthand_names.extend( crate::parse::extract_field_names(&self.content) .map(|n| quote::format_ident!("{}", n)), ); Some(self) } } } /// A SnafuAttribute represents one SNAFU-specific attribute inside of `#[snafu(...)]`. For /// example, in `#[snafu(visibility(pub), display("hi"))]`, `visibility(pub)` and `display("hi")` /// are each a SnafuAttribute. /// /// We store the location in the source where we found the attribute (as a `TokenStream`) along /// with the data. The location can be used to give accurate error messages in case there was a /// problem with the use of the attribute. enum SnafuAttribute { Backtrace(proc_macro2::TokenStream, bool), Context(proc_macro2::TokenStream, Context), CrateRoot(proc_macro2::TokenStream, UserInput), Display(proc_macro2::TokenStream, Display), DocComment(proc_macro2::TokenStream, String), Implicit(proc_macro2::TokenStream, bool), Module(proc_macro2::TokenStream, ModuleName), Source(proc_macro2::TokenStream, Vec), Visibility(proc_macro2::TokenStream, UserInput), Whatever(proc_macro2::TokenStream), } fn default_crate_root() -> UserInput { Box::new(quote! { ::snafu }) } fn private_visibility() -> UserInput { Box::new(quote! {}) } // Private context selectors wouldn't be accessible outside the // module, so we use `pub(super)`. fn default_context_selector_visibility_in_module() -> proc_macro2::TokenStream { quote! { pub(super) } } impl From for proc_macro::TokenStream { fn from(other: SnafuInfo) -> proc_macro::TokenStream { match other { SnafuInfo::Enum(e) => e.into(), SnafuInfo::NamedStruct(s) => s.into(), SnafuInfo::TupleStruct(s) => s.into(), } } } impl From for proc_macro::TokenStream { fn from(other: EnumInfo) -> proc_macro::TokenStream { other.generate_snafu().into() } } impl From for proc_macro::TokenStream { fn from(other: NamedStructInfo) -> proc_macro::TokenStream { other.generate_snafu().into() } } impl From for proc_macro::TokenStream { fn from(other: TupleStructInfo) -> proc_macro::TokenStream { other.generate_snafu().into() } } trait GenericAwareNames { fn name(&self) -> &syn::Ident; fn generics(&self) -> &syn::Generics; fn parameterized_name(&self) -> UserInput { let enum_name = self.name(); let original_generics = self.provided_generic_names(); Box::new(quote! { #enum_name<#(#original_generics,)*> }) } fn provided_generic_types_without_defaults(&self) -> Vec { use syn::TypeParam; self.generics() .type_params() .map(|t: &TypeParam| { let TypeParam { attrs, ident, colon_token, bounds, .. } = t; quote! { #(#attrs)* #ident #colon_token #bounds } }) .collect() } fn provided_generics_without_defaults(&self) -> Vec { self.provided_generic_lifetimes() .into_iter() .chain(self.provided_generic_types_without_defaults().into_iter()) .collect() } fn provided_generic_lifetimes(&self) -> Vec { use syn::{GenericParam, LifetimeDef}; self.generics() .params .iter() .flat_map(|p| match p { GenericParam::Lifetime(LifetimeDef { lifetime, .. }) => Some(quote! { #lifetime }), _ => None, }) .collect() } fn provided_generic_names(&self) -> Vec { use syn::{ConstParam, GenericParam, LifetimeDef, TypeParam}; self.generics() .params .iter() .map(|p| match p { GenericParam::Type(TypeParam { ident, .. }) => quote! { #ident }, GenericParam::Lifetime(LifetimeDef { lifetime, .. }) => quote! { #lifetime }, GenericParam::Const(ConstParam { ident, .. }) => quote! { #ident }, }) .collect() } fn provided_where_clauses(&self) -> Vec { self.generics() .where_clause .iter() .flat_map(|c| c.predicates.iter().map(|p| quote! { #p })) .collect() } } impl EnumInfo { fn generate_snafu(self) -> proc_macro2::TokenStream { let context_selectors = ContextSelectors(&self); let display_impl = DisplayImpl(&self); let error_impl = ErrorImpl(&self); let error_compat_impl = ErrorCompatImpl(&self); let context = match &self.module { None => quote! { #context_selectors }, Some(module_name) => { use crate::shared::ContextModule; let context_module = ContextModule { container_name: self.name(), body: &context_selectors, visibility: Some(&self.default_visibility), module_name, }; quote! { #context_module } } }; quote! { #context #display_impl #error_impl #error_compat_impl } } } impl GenericAwareNames for EnumInfo { fn name(&self) -> &syn::Ident { &self.name } fn generics(&self) -> &syn::Generics { &self.generics } } struct ContextSelectors<'a>(&'a EnumInfo); impl<'a> quote::ToTokens for ContextSelectors<'a> { fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) { let context_selectors = self .0 .variants .iter() .map(|variant| ContextSelector(self.0, variant)); stream.extend({ quote! { #(#context_selectors)* } }) } } struct ContextSelector<'a>(&'a EnumInfo, &'a FieldContainer); impl<'a> quote::ToTokens for ContextSelector<'a> { fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) { use crate::shared::ContextSelector; let enum_name = &self.0.name; let default_suffix = &self.0.default_suffix; let FieldContainer { name: variant_name, selector_kind, .. } = self.1; let default_visibility; let selector_visibility = match ( &self.1.visibility, &self.0.default_visibility, &self.0.module, ) { (Some(v), _, _) | (_, Some(v), _) => Some(&**v), (None, None, Some(_)) => { default_visibility = default_context_selector_visibility_in_module(); Some(&default_visibility as _) } (None, None, None) => None, }; let selector_doc_string = format!( "SNAFU context selector for the `{}::{}` variant", enum_name, variant_name, ); let context_selector = ContextSelector { backtrace_field: self.1.backtrace_field.as_ref(), implicit_fields: &self.1.implicit_fields, crate_root: &self.0.crate_root, error_constructor_name: "e! { #enum_name::#variant_name }, original_generics_without_defaults: &self.0.provided_generics_without_defaults(), parameterized_error_name: &self.0.parameterized_name(), selector_doc_string: &selector_doc_string, selector_kind: &selector_kind, selector_name: variant_name, user_fields: &selector_kind.user_fields(), visibility: selector_visibility, where_clauses: &self.0.provided_where_clauses(), default_suffix, }; stream.extend(quote! { #context_selector }); } } struct DisplayImpl<'a>(&'a EnumInfo); impl<'a> quote::ToTokens for DisplayImpl<'a> { fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) { use self::shared::{Display, DisplayMatchArm}; let enum_name = &self.0.name; let arms: Vec<_> = self .0 .variants .iter() .map(|variant| { let FieldContainer { backtrace_field, implicit_fields, display_format, doc_comment, name: variant_name, selector_kind, .. } = variant; let arm = DisplayMatchArm { backtrace_field: backtrace_field.as_ref(), implicit_fields: &implicit_fields, default_name: &variant_name, display_format: display_format.as_ref(), doc_comment: doc_comment.as_ref(), pattern_ident: "e! { #enum_name::#variant_name }, selector_kind, }; quote! { #arm } }) .collect(); let display = Display { arms: &arms, original_generics: &self.0.provided_generics_without_defaults(), parameterized_error_name: &self.0.parameterized_name(), where_clauses: &self.0.provided_where_clauses(), }; let display_impl = quote! { #display }; stream.extend(display_impl) } } struct ErrorImpl<'a>(&'a EnumInfo); impl<'a> quote::ToTokens for ErrorImpl<'a> { fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) { use self::shared::{Error, ErrorSourceMatchArm}; let (variants_to_description, variants_to_source): (Vec<_>, Vec<_>) = self .0 .variants .iter() .map(|field_container| { let enum_name = &self.0.name; let variant_name = &field_container.name; let pattern_ident = "e! { #enum_name::#variant_name }; let error_description_match_arm = quote! { #pattern_ident { .. } => stringify!(#pattern_ident), }; let error_source_match_arm = ErrorSourceMatchArm { field_container, pattern_ident, }; let error_source_match_arm = quote! { #error_source_match_arm }; (error_description_match_arm, error_source_match_arm) }) .unzip(); let error_impl = Error { crate_root: &self.0.crate_root, parameterized_error_name: &self.0.parameterized_name(), description_arms: &variants_to_description, source_arms: &variants_to_source, original_generics: &self.0.provided_generics_without_defaults(), where_clauses: &self.0.provided_where_clauses(), }; let error_impl = quote! { #error_impl }; stream.extend(error_impl); } } struct ErrorCompatImpl<'a>(&'a EnumInfo); impl<'a> quote::ToTokens for ErrorCompatImpl<'a> { fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) { use self::shared::{ErrorCompat, ErrorCompatBacktraceMatchArm}; let variants_to_backtrace: Vec<_> = self .0 .variants .iter() .map(|field_container| { let crate_root = &self.0.crate_root; let enum_name = &self.0.name; let variant_name = &field_container.name; let match_arm = ErrorCompatBacktraceMatchArm { field_container, crate_root, pattern_ident: "e! { #enum_name::#variant_name }, }; quote! { #match_arm } }) .collect(); let error_compat_impl = ErrorCompat { crate_root: &self.0.crate_root, parameterized_error_name: &self.0.parameterized_name(), backtrace_arms: &variants_to_backtrace, original_generics: &self.0.provided_generics_without_defaults(), where_clauses: &self.0.provided_where_clauses(), }; let error_compat_impl = quote! { #error_compat_impl }; stream.extend(error_compat_impl); } } impl NamedStructInfo { fn generate_snafu(self) -> proc_macro2::TokenStream { let parameterized_struct_name = self.parameterized_name(); let original_generics = self.provided_generics_without_defaults(); let where_clauses = self.provided_where_clauses(); let Self { crate_root, field_container: FieldContainer { name, selector_kind, backtrace_field, implicit_fields, display_format, doc_comment, visibility, module, }, .. } = &self; let field_container = &self.field_container; let user_fields = selector_kind.user_fields(); use crate::shared::{Error, ErrorSourceMatchArm}; let pattern_ident = "e! { Self }; let error_description_match_arm = quote! { #pattern_ident { .. } => stringify!(#name), }; let error_source_match_arm = ErrorSourceMatchArm { field_container: &field_container, pattern_ident, }; let error_source_match_arm = quote! { #error_source_match_arm }; let error_impl = Error { crate_root: &crate_root, parameterized_error_name: ¶meterized_struct_name, description_arms: &[error_description_match_arm], source_arms: &[error_source_match_arm], original_generics: &original_generics, where_clauses: &where_clauses, }; let error_impl = quote! { #error_impl }; use self::shared::{ErrorCompat, ErrorCompatBacktraceMatchArm}; let match_arm = ErrorCompatBacktraceMatchArm { field_container, crate_root: &crate_root, pattern_ident: "e! { Self }, }; let match_arm = quote! { #match_arm }; let error_compat_impl = ErrorCompat { crate_root: &crate_root, parameterized_error_name: ¶meterized_struct_name, backtrace_arms: &[match_arm], original_generics: &original_generics, where_clauses: &where_clauses, }; use crate::shared::{Display, DisplayMatchArm}; let arm = DisplayMatchArm { backtrace_field: backtrace_field.as_ref(), implicit_fields: &implicit_fields, default_name: &name, display_format: display_format.as_ref(), doc_comment: doc_comment.as_ref(), pattern_ident: "e! { Self }, selector_kind: &selector_kind, }; let arm = quote! { #arm }; let display_impl = Display { arms: &[arm], original_generics: &original_generics, parameterized_error_name: ¶meterized_struct_name, where_clauses: &where_clauses, }; use crate::shared::ContextSelector; let selector_doc_string = format!("SNAFU context selector for the `{}` error", name); let default_visibility; let selector_visibility = match (visibility, module) { (Some(v), _) => Some(&**v), (None, Some(_)) => { default_visibility = default_context_selector_visibility_in_module(); Some(&default_visibility as _) } (None, None) => None, }; let context_selector = ContextSelector { backtrace_field: backtrace_field.as_ref(), implicit_fields: implicit_fields, crate_root: &crate_root, error_constructor_name: &name, original_generics_without_defaults: &original_generics, parameterized_error_name: ¶meterized_struct_name, selector_doc_string: &selector_doc_string, selector_kind: &selector_kind, selector_name: &field_container.name, user_fields: &user_fields, visibility: selector_visibility, where_clauses: &where_clauses, default_suffix: &SuffixKind::Default, }; let context = match module { None => quote! { #context_selector }, Some(module_name) => { use crate::shared::ContextModule; let context_module = ContextModule { container_name: self.name(), body: &context_selector, visibility: visibility.as_ref().map(|x| &**x), module_name, }; quote! { #context_module } } }; quote! { #error_impl #error_compat_impl #display_impl #context } } } impl GenericAwareNames for NamedStructInfo { fn name(&self) -> &syn::Ident { &self.field_container.name } fn generics(&self) -> &syn::Generics { &self.generics } } impl TupleStructInfo { fn generate_snafu(self) -> proc_macro2::TokenStream { let parameterized_struct_name = self.parameterized_name(); let TupleStructInfo { crate_root, generics, name, transformation, } = self; let inner_type = transformation.ty(); let transformation = transformation.transformation(); let where_clauses: Vec<_> = generics .where_clause .iter() .flat_map(|c| c.predicates.iter().map(|p| quote! { #p })) .collect(); let description_fn = quote! { fn description(&self) -> &str { #crate_root::Error::description(&self.0) } }; let cause_fn = quote! { fn cause(&self) -> ::core::option::Option<&dyn #crate_root::Error> { #crate_root::Error::cause(&self.0) } }; let source_fn = quote! { fn source(&self) -> ::core::option::Option<&(dyn #crate_root::Error + 'static)> { #crate_root::Error::source(&self.0) } }; let backtrace_fn = quote! { fn backtrace(&self) -> ::core::option::Option<&#crate_root::Backtrace> { #crate_root::ErrorCompat::backtrace(&self.0) } }; let std_backtrace_fn = if cfg!(feature = "unstable-backtraces-impl-std") { quote! { fn backtrace(&self) -> ::core::option::Option<&std::backtrace::Backtrace> { #crate_root::ErrorCompat::backtrace(self) } } } else { quote! {} }; let error_impl = quote! { #[allow(single_use_lifetimes)] impl#generics #crate_root::Error for #parameterized_struct_name where #(#where_clauses),* { #description_fn #cause_fn #source_fn #std_backtrace_fn } }; let error_compat_impl = quote! { #[allow(single_use_lifetimes)] impl#generics #crate_root::ErrorCompat for #parameterized_struct_name where #(#where_clauses),* { #backtrace_fn } }; let display_impl = quote! { #[allow(single_use_lifetimes)] impl#generics ::core::fmt::Display for #parameterized_struct_name where #(#where_clauses),* { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { ::core::fmt::Display::fmt(&self.0, f) } } }; let from_impl = quote! { impl#generics ::core::convert::From<#inner_type> for #parameterized_struct_name where #(#where_clauses),* { fn from(other: #inner_type) -> Self { #name((#transformation)(other)) } } }; quote! { #error_impl #error_compat_impl #display_impl #from_impl } } } impl GenericAwareNames for TupleStructInfo { fn name(&self) -> &syn::Ident { &self.name } fn generics(&self) -> &syn::Generics { &self.generics } } trait Transpose { fn my_transpose(self) -> Result, E>; } impl Transpose for Option> { fn my_transpose(self) -> Result, E> { match self { Some(Ok(v)) => Ok(Some(v)), Some(Err(e)) => Err(e), None => Ok(None), } } } mod sponge { use std::iter::FromIterator; pub struct AllErrors(Result>); impl AllErrors { pub fn into_result(self) -> Result> { self.0 } } impl FromIterator> for AllErrors where T: FromIterator, { fn from_iter(i: I) -> Self where I: IntoIterator>, { let mut errors = Vec::new(); let inner = i .into_iter() .flat_map(|v| match v { Ok(v) => Ok(v), Err(e) => { errors.push(e); Err(()) } }) .collect(); if errors.is_empty() { AllErrors(Ok(inner)) } else { AllErrors(Err(errors)) } } } impl FromIterator>> for AllErrors where T: FromIterator, { fn from_iter(i: I) -> Self where I: IntoIterator>>, { let mut errors = Vec::new(); let inner = i .into_iter() .flat_map(|v| match v { Ok(v) => Ok(v), Err(e) => { errors.extend(e); Err(()) } }) .collect(); if errors.is_empty() { AllErrors(Ok(inner)) } else { AllErrors(Err(errors)) } } } } snafu-derive-0.7.1/src/parse.rs000064400000000000000000000450440072674642500145270ustar 00000000000000use std::collections::BTreeSet; use crate::{ModuleName, SnafuAttribute}; use proc_macro2::TokenStream; use quote::{format_ident, ToTokens}; use syn::{ parenthesized, parse::{Parse, ParseStream, Result}, punctuated::Punctuated, token, Expr, Ident, Lit, LitBool, LitStr, Path, Type, }; mod kw { use syn::custom_keyword; custom_keyword!(backtrace); custom_keyword!(context); custom_keyword!(crate_root); custom_keyword!(display); custom_keyword!(implicit); custom_keyword!(module); custom_keyword!(source); custom_keyword!(visibility); custom_keyword!(whatever); custom_keyword!(from); custom_keyword!(suffix); } pub(crate) fn attributes_from_syn( attrs: Vec, ) -> super::MultiSynResult> { let mut ours = Vec::new(); let mut errs = Vec::new(); for attr in attrs { if attr.path.is_ident("snafu") { let attr_list = Punctuated::::parse_terminated; match attr.parse_args_with(attr_list) { Ok(attrs) => { ours.extend(attrs.into_iter().map(Into::into)); } Err(e) => errs.push(e), } } else if attr.path.is_ident("doc") { // Ignore any errors that occur while parsing the doc // comment. This isn't our attribute so we shouldn't // assume that we know what values are acceptable. if let Ok(comment) = syn::parse2::(attr.tokens) { ours.push(comment.into()); } } } if errs.is_empty() { Ok(ours) } else { Err(errs) } } enum Attribute { Backtrace(Backtrace), Context(Context), CrateRoot(CrateRoot), Display(Display), Implicit(Implicit), Module(Module), Source(Source), Visibility(Visibility), Whatever(Whatever), } impl From for SnafuAttribute { fn from(other: Attribute) -> Self { use self::Attribute::*; match other { Backtrace(b) => SnafuAttribute::Backtrace(b.to_token_stream(), b.into_bool()), Context(c) => SnafuAttribute::Context(c.to_token_stream(), c.into_component()), CrateRoot(cr) => SnafuAttribute::CrateRoot(cr.to_token_stream(), cr.into_arbitrary()), Display(d) => SnafuAttribute::Display(d.to_token_stream(), d.into_display()), Implicit(d) => SnafuAttribute::Implicit(d.to_token_stream(), d.into_bool()), Module(v) => SnafuAttribute::Module(v.to_token_stream(), v.into_value()), Source(s) => SnafuAttribute::Source(s.to_token_stream(), s.into_components()), Visibility(v) => SnafuAttribute::Visibility(v.to_token_stream(), v.into_arbitrary()), Whatever(o) => SnafuAttribute::Whatever(o.to_token_stream()), } } } impl Parse for Attribute { fn parse(input: ParseStream) -> Result { let lookahead = input.lookahead1(); if lookahead.peek(kw::backtrace) { input.parse().map(Attribute::Backtrace) } else if lookahead.peek(kw::context) { input.parse().map(Attribute::Context) } else if lookahead.peek(kw::crate_root) { input.parse().map(Attribute::CrateRoot) } else if lookahead.peek(kw::display) { input.parse().map(Attribute::Display) } else if lookahead.peek(kw::implicit) { input.parse().map(Attribute::Implicit) } else if lookahead.peek(kw::module) { input.parse().map(Attribute::Module) } else if lookahead.peek(kw::source) { input.parse().map(Attribute::Source) } else if lookahead.peek(kw::visibility) { input.parse().map(Attribute::Visibility) } else if lookahead.peek(kw::whatever) { input.parse().map(Attribute::Whatever) } else { Err(lookahead.error()) } } } struct Backtrace { backtrace_token: kw::backtrace, arg: MaybeArg, } impl Backtrace { fn into_bool(self) -> bool { self.arg.into_option().map_or(true, |a| a.value) } } impl Parse for Backtrace { fn parse(input: ParseStream) -> Result { Ok(Self { backtrace_token: input.parse()?, arg: input.parse()?, }) } } impl ToTokens for Backtrace { fn to_tokens(&self, tokens: &mut TokenStream) { self.backtrace_token.to_tokens(tokens); self.arg.to_tokens(tokens); } } struct Context { context_token: kw::context, arg: MaybeArg, } impl Context { fn into_component(self) -> super::Context { use super::{Context::*, SuffixKind}; match self.arg.into_option() { None => Flag(true), Some(arg) => match arg { ContextArg::Flag { value } => Flag(value.value), ContextArg::Suffix { suffix: SuffixArg::Flag { value: LitBool { value: true, .. }, }, .. } => Suffix(SuffixKind::Default), ContextArg::Suffix { suffix: SuffixArg::Flag { value: LitBool { value: false, .. }, }, .. } => Suffix(SuffixKind::None), ContextArg::Suffix { suffix: SuffixArg::Suffix { suffix, .. }, .. } => Suffix(SuffixKind::Some(suffix)), }, } } } impl Parse for Context { fn parse(input: ParseStream) -> Result { Ok(Self { context_token: input.parse()?, arg: input.parse()?, }) } } impl ToTokens for Context { fn to_tokens(&self, tokens: &mut TokenStream) { self.context_token.to_tokens(tokens); self.arg.to_tokens(tokens); } } enum ContextArg { Flag { value: LitBool, }, Suffix { suffix_token: kw::suffix, paren_token: token::Paren, suffix: SuffixArg, }, } impl Parse for ContextArg { fn parse(input: ParseStream) -> Result { let lookahead = input.lookahead1(); if lookahead.peek(LitBool) { Ok(ContextArg::Flag { value: input.parse()?, }) } else if lookahead.peek(kw::suffix) { let content; Ok(ContextArg::Suffix { suffix_token: input.parse()?, paren_token: parenthesized!(content in input), suffix: content.parse()?, }) } else { Err(lookahead.error()) } } } impl ToTokens for ContextArg { fn to_tokens(&self, tokens: &mut TokenStream) { match self { ContextArg::Flag { value } => { value.to_tokens(tokens); } ContextArg::Suffix { suffix_token, paren_token, suffix, } => { suffix_token.to_tokens(tokens); paren_token.surround(tokens, |tokens| { suffix.to_tokens(tokens); }) } } } } enum SuffixArg { Flag { value: LitBool }, Suffix { suffix: Ident }, } impl Parse for SuffixArg { fn parse(input: ParseStream) -> Result { let lookahead = input.lookahead1(); if lookahead.peek(LitBool) { Ok(SuffixArg::Flag { value: input.parse()?, }) } else if lookahead.peek(syn::Ident) { Ok(SuffixArg::Suffix { suffix: input.parse()?, }) } else { Err(lookahead.error()) } } } impl ToTokens for SuffixArg { fn to_tokens(&self, tokens: &mut TokenStream) { match self { SuffixArg::Flag { value } => { value.to_tokens(tokens); } SuffixArg::Suffix { suffix } => { suffix.to_tokens(tokens); } } } } struct CrateRoot { crate_root_token: kw::crate_root, paren_token: token::Paren, arg: Path, } impl CrateRoot { // TODO: Remove boxed trait object fn into_arbitrary(self) -> Box { Box::new(self.arg) } } impl Parse for CrateRoot { fn parse(input: ParseStream) -> Result { let content; Ok(Self { crate_root_token: input.parse()?, paren_token: parenthesized!(content in input), arg: content.parse()?, }) } } impl ToTokens for CrateRoot { fn to_tokens(&self, tokens: &mut TokenStream) { self.crate_root_token.to_tokens(tokens); self.paren_token.surround(tokens, |tokens| { self.arg.to_tokens(tokens); }); } } struct Display { display_token: kw::display, paren_token: token::Paren, args: Punctuated, } impl Display { fn into_display(self) -> crate::Display { let exprs: Vec<_> = self.args.into_iter().collect(); let mut shorthand_names = BTreeSet::new(); let mut assigned_names = BTreeSet::new(); // Do a best-effort parsing here; if we fail, the compiler // will likely spit out something more useful when it tries to // parse it. if let Some((Expr::Lit(l), args)) = exprs.split_first() { if let Lit::Str(s) = &l.lit { let format_str = s.value(); let names = extract_field_names(&format_str).map(|n| format_ident!("{}", n)); shorthand_names.extend(names); } for arg in args { if let Expr::Assign(a) = arg { if let Expr::Path(p) = &*a.left { assigned_names.extend(p.path.get_ident().cloned()); } } } } crate::Display { exprs, shorthand_names, assigned_names, } } } pub(crate) fn extract_field_names(mut s: &str) -> impl Iterator { std::iter::from_fn(move || loop { let open_curly = s.find('{')?; s = &s[open_curly + '{'.len_utf8()..]; if s.starts_with('{') { s = &s['{'.len_utf8()..]; continue; } let end_curly = s.find('}')?; let format_contents = &s[..end_curly]; let name = match format_contents.find(':') { Some(idx) => &format_contents[..idx], None => format_contents, }; if name.is_empty() { continue; } return Some(name); }) } impl Parse for Display { fn parse(input: ParseStream) -> Result { let content; Ok(Self { display_token: input.parse()?, paren_token: parenthesized!(content in input), args: Punctuated::parse_terminated(&content)?, }) } } impl ToTokens for Display { fn to_tokens(&self, tokens: &mut TokenStream) { self.display_token.to_tokens(tokens); self.paren_token.surround(tokens, |tokens| { self.args.to_tokens(tokens); }); } } struct DocComment { eq_token: token::Eq, str: LitStr, } impl DocComment { fn into_value(self) -> String { self.str.value() } } impl From for SnafuAttribute { fn from(other: DocComment) -> Self { SnafuAttribute::DocComment(other.to_token_stream(), other.into_value()) } } impl Parse for DocComment { fn parse(input: ParseStream) -> Result { Ok(Self { eq_token: input.parse()?, str: input.parse()?, }) } } impl ToTokens for DocComment { fn to_tokens(&self, tokens: &mut TokenStream) { self.eq_token.to_tokens(tokens); self.str.to_tokens(tokens); } } struct Implicit { implicit_token: kw::implicit, arg: MaybeArg, } impl Implicit { fn into_bool(self) -> bool { self.arg.into_option().map_or(true, |a| a.value) } } impl Parse for Implicit { fn parse(input: ParseStream) -> Result { Ok(Self { implicit_token: input.parse()?, arg: input.parse()?, }) } } impl ToTokens for Implicit { fn to_tokens(&self, tokens: &mut TokenStream) { self.implicit_token.to_tokens(tokens); self.arg.to_tokens(tokens); } } struct Module { module_token: kw::module, arg: MaybeArg, } impl Module { fn into_value(self) -> ModuleName { match self.arg.into_option() { None => ModuleName::Default, Some(name) => ModuleName::Custom(name), } } } impl Parse for Module { fn parse(input: ParseStream) -> Result { Ok(Self { module_token: input.parse()?, arg: input.parse()?, }) } } impl ToTokens for Module { fn to_tokens(&self, tokens: &mut TokenStream) { self.module_token.to_tokens(tokens); self.arg.to_tokens(tokens); } } struct Source { source_token: kw::source, args: MaybeArg>, } impl Source { fn into_components(self) -> Vec { match self.args.into_option() { None => vec![super::Source::Flag(true)], Some(args) => args .into_iter() .map(|sa| match sa { SourceArg::Flag { value } => super::Source::Flag(value.value), SourceArg::From { r#type, expr, .. } => super::Source::From(r#type, expr), }) .collect(), } } } impl Parse for Source { fn parse(input: ParseStream) -> Result { Ok(Self { source_token: input.parse()?, args: MaybeArg::parse_with(&input, Punctuated::parse_terminated)?, }) } } impl ToTokens for Source { fn to_tokens(&self, tokens: &mut TokenStream) { self.source_token.to_tokens(tokens); self.args.to_tokens(tokens); } } enum SourceArg { Flag { value: LitBool, }, From { from_token: kw::from, paren_token: token::Paren, r#type: Type, comma_token: token::Comma, expr: Expr, }, } impl Parse for SourceArg { fn parse(input: ParseStream) -> Result { let lookahead = input.lookahead1(); if lookahead.peek(LitBool) { Ok(SourceArg::Flag { value: input.parse()?, }) } else if lookahead.peek(kw::from) { let content; Ok(SourceArg::From { from_token: input.parse()?, paren_token: parenthesized!(content in input), r#type: content.parse()?, comma_token: content.parse()?, expr: content.parse()?, }) } else { Err(lookahead.error()) } } } impl ToTokens for SourceArg { fn to_tokens(&self, tokens: &mut TokenStream) { match self { SourceArg::Flag { value } => { value.to_tokens(tokens); } SourceArg::From { from_token, paren_token, r#type, comma_token, expr, } => { from_token.to_tokens(tokens); paren_token.surround(tokens, |tokens| { r#type.to_tokens(tokens); comma_token.to_tokens(tokens); expr.to_tokens(tokens); }) } } } } struct Visibility { visibility_token: kw::visibility, visibility: MaybeArg, } impl Visibility { // TODO: Remove boxed trait object fn into_arbitrary(self) -> Box { // TODO: Move this default value out of parsing self.visibility .into_option() .map_or_else(super::private_visibility, |v| Box::new(v)) } } impl Parse for Visibility { fn parse(input: ParseStream) -> Result { Ok(Self { visibility_token: input.parse()?, visibility: input.parse()?, }) } } impl ToTokens for Visibility { fn to_tokens(&self, tokens: &mut TokenStream) { self.visibility_token.to_tokens(tokens); self.visibility.to_tokens(tokens); } } struct Whatever { whatever_token: kw::whatever, } impl Parse for Whatever { fn parse(input: ParseStream) -> Result { Ok(Self { whatever_token: input.parse()?, }) } } impl ToTokens for Whatever { fn to_tokens(&self, tokens: &mut TokenStream) { self.whatever_token.to_tokens(tokens); } } enum MaybeArg { None, Some { paren_token: token::Paren, content: T, }, } impl MaybeArg { fn into_option(self) -> Option { match self { MaybeArg::None => None, MaybeArg::Some { content, .. } => Some(content), } } fn parse_with(input: ParseStream<'_>, parser: F) -> Result where F: FnOnce(ParseStream<'_>) -> Result, { let lookahead = input.lookahead1(); if lookahead.peek(token::Paren) { let content; Ok(MaybeArg::Some { paren_token: parenthesized!(content in input), content: parser(&content)?, }) } else { Ok(MaybeArg::None) } } } impl Parse for MaybeArg { fn parse(input: ParseStream) -> Result { Self::parse_with(input, Parse::parse) } } impl ToTokens for MaybeArg { fn to_tokens(&self, tokens: &mut TokenStream) { if let MaybeArg::Some { paren_token, content, } = self { paren_token.surround(tokens, |tokens| { content.to_tokens(tokens); }); } } } #[cfg(test)] mod test { use super::*; fn names(s: &str) -> Vec<&str> { extract_field_names(s).collect::>() } #[test] fn ignores_positional_arguments() { assert_eq!(names("{}"), [] as [&str; 0]); } #[test] fn finds_named_argument() { assert_eq!(names("{a}"), ["a"]); } #[test] fn finds_multiple_named_arguments() { assert_eq!(names("{a} {b}"), ["a", "b"]); } #[test] fn ignores_escaped_braces() { assert_eq!(names("{{a}}"), [] as [&str; 0]); } #[test] fn finds_named_arguments_around_escaped() { assert_eq!(names("{a} {{b}} {c}"), ["a", "c"]); } #[test] fn ignores_format_spec() { assert_eq!(names("{a:?}"), ["a"]); } } snafu-derive-0.7.1/src/shared.rs000064400000000000000000000655250072674642500146710ustar 00000000000000pub(crate) use self::context_module::ContextModule; pub(crate) use self::context_selector::ContextSelector; pub(crate) use self::display::{Display, DisplayMatchArm}; pub(crate) use self::error::{Error, ErrorSourceMatchArm}; pub(crate) use self::error_compat::{ErrorCompat, ErrorCompatBacktraceMatchArm}; pub mod context_module { use crate::ModuleName; use heck::ToSnakeCase; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; use syn::Ident; #[derive(Copy, Clone)] pub(crate) struct ContextModule<'a, T> { pub container_name: &'a Ident, pub module_name: &'a ModuleName, pub visibility: Option<&'a dyn ToTokens>, pub body: &'a T, } impl<'a, T> ToTokens for ContextModule<'a, T> where T: ToTokens, { fn to_tokens(&self, stream: &mut TokenStream) { let module_name = match self.module_name { ModuleName::Default => { let name_str = self.container_name.to_string().to_snake_case(); syn::Ident::new(&name_str, self.container_name.span()) } ModuleName::Custom(name) => name.clone(), }; let visibility = self.visibility; let body = self.body; let module_tokens = quote! { #visibility mod #module_name { use super::*; #body } }; stream.extend(module_tokens); } } } pub mod context_selector { use crate::{ContextSelectorKind, Field, SuffixKind}; use proc_macro2::TokenStream; use quote::{format_ident, quote, IdentFragment, ToTokens}; const DEFAULT_SUFFIX: &str = "Snafu"; #[derive(Copy, Clone)] pub(crate) struct ContextSelector<'a> { pub backtrace_field: Option<&'a Field>, pub implicit_fields: &'a [Field], pub crate_root: &'a dyn ToTokens, pub error_constructor_name: &'a dyn ToTokens, pub original_generics_without_defaults: &'a [TokenStream], pub parameterized_error_name: &'a dyn ToTokens, pub selector_doc_string: &'a str, pub selector_kind: &'a ContextSelectorKind, pub selector_name: &'a proc_macro2::Ident, pub user_fields: &'a [Field], pub visibility: Option<&'a dyn ToTokens>, pub where_clauses: &'a [TokenStream], pub default_suffix: &'a SuffixKind, } impl ToTokens for ContextSelector<'_> { fn to_tokens(&self, stream: &mut TokenStream) { use self::ContextSelectorKind::*; let context_selector = match self.selector_kind { Context { source_field, .. } => { let context_selector_type = self.generate_type(); let context_selector_impl = match source_field { Some(_) => None, None => Some(self.generate_leaf()), }; let context_selector_into_error_impl = self.generate_into_error(source_field.as_ref()); quote! { #context_selector_type #context_selector_impl #context_selector_into_error_impl } } Whatever { source_field, message_field, } => self.generate_whatever(source_field.as_ref(), message_field), NoContext { source_field } => self.generate_from_source(source_field), }; stream.extend(context_selector) } } impl ContextSelector<'_> { fn user_field_generics(&self) -> Vec { (0..self.user_fields.len()) .map(|i| format_ident!("__T{}", i)) .collect() } fn user_field_names(&self) -> Vec<&syn::Ident> { self.user_fields .iter() .map(|Field { name, .. }| name) .collect() } fn parameterized_selector_name(&self) -> TokenStream { let selector_name = self.selector_name.to_string(); let selector_name = selector_name.trim_end_matches("Error"); let suffix: &dyn IdentFragment = match self.selector_kind { ContextSelectorKind::Context { suffix, .. } => { match suffix.resolve_with_default(self.default_suffix) { SuffixKind::Some(s) => s, SuffixKind::None => &"", SuffixKind::Default => &DEFAULT_SUFFIX, } } _ => &DEFAULT_SUFFIX, }; let selector_name = format_ident!( "{}{}", selector_name, suffix, span = self.selector_name.span() ); let user_generics = self.user_field_generics(); quote! { #selector_name<#(#user_generics,)*> } } fn extended_where_clauses(&self) -> Vec { let user_fields = self.user_fields; let user_field_generics = self.user_field_generics(); let where_clauses = self.where_clauses; let target_types = user_fields .iter() .map(|Field { ty, .. }| quote! { ::core::convert::Into<#ty>}); user_field_generics .into_iter() .zip(target_types) .map(|(gen, bound)| quote! { #gen: #bound }) .chain(where_clauses.iter().cloned()) .collect() } fn transfer_user_fields(&self) -> Vec { self.user_field_names() .into_iter() .map(|name| { quote! { #name: ::core::convert::Into::into(self.#name) } }) .collect() } fn construct_implicit_fields(&self) -> TokenStream { self.implicit_fields .iter() .chain(self.backtrace_field) .map(|field| { let crate_root = self.crate_root; let name = &field.name; quote! { #name: #crate_root::GenerateImplicitData::generate(), } }) .collect() } fn generate_type(self) -> TokenStream { let visibility = self.visibility; let parameterized_selector_name = self.parameterized_selector_name(); let user_field_generics = self.user_field_generics(); let user_field_names = self.user_field_names(); let selector_doc_string = self.selector_doc_string; let body = if user_field_names.is_empty() { quote! { ; } } else { quote! { { #( #[allow(missing_docs)] #visibility #user_field_names: #user_field_generics ),* } } }; quote! { #[derive(Debug, Copy, Clone)] #[doc = #selector_doc_string] #visibility struct #parameterized_selector_name #body } } fn generate_leaf(self) -> TokenStream { let error_constructor_name = self.error_constructor_name; let original_generics_without_defaults = self.original_generics_without_defaults; let parameterized_error_name = self.parameterized_error_name; let parameterized_selector_name = self.parameterized_selector_name(); let user_field_generics = self.user_field_generics(); let visibility = self.visibility; let extended_where_clauses = self.extended_where_clauses(); let transfer_user_fields = self.transfer_user_fields(); let construct_implicit_fields = self.construct_implicit_fields(); let track_caller = track_caller(); quote! { impl<#(#user_field_generics,)*> #parameterized_selector_name { #[doc = "Consume the selector and return the associated error"] #[must_use] #track_caller #visibility fn build<#(#original_generics_without_defaults,)*>(self) -> #parameterized_error_name where #(#extended_where_clauses),* { #error_constructor_name { #construct_implicit_fields #(#transfer_user_fields,)* } } #[doc = "Consume the selector and return a `Result` with the associated error"] #track_caller #visibility fn fail<#(#original_generics_without_defaults,)* __T>(self) -> ::core::result::Result<__T, #parameterized_error_name> where #(#extended_where_clauses),* { ::core::result::Result::Err(self.build()) } } } } fn generate_into_error(self, source_field: Option<&crate::SourceField>) -> TokenStream { let crate_root = self.crate_root; let error_constructor_name = self.error_constructor_name; let original_generics_without_defaults = self.original_generics_without_defaults; let parameterized_error_name = self.parameterized_error_name; let parameterized_selector_name = self.parameterized_selector_name(); let user_field_generics = self.user_field_generics(); let extended_where_clauses = self.extended_where_clauses(); let transfer_user_fields = self.transfer_user_fields(); let construct_implicit_fields = self.construct_implicit_fields(); let (source_ty, transfer_source_field) = match source_field { Some(source_field) => { let (ty, transfer) = build_source_info(source_field); (quote! { #ty }, transfer) } None => (quote! { #crate_root::NoneError }, quote! {}), }; let track_caller = track_caller(); quote! { impl<#(#original_generics_without_defaults,)* #(#user_field_generics,)*> #crate_root::IntoError<#parameterized_error_name> for #parameterized_selector_name where #parameterized_error_name: #crate_root::Error + #crate_root::ErrorCompat, #(#extended_where_clauses),* { type Source = #source_ty; #track_caller fn into_error(self, error: Self::Source) -> #parameterized_error_name { #error_constructor_name { #transfer_source_field #construct_implicit_fields #(#transfer_user_fields),* } } } } } fn generate_whatever( self, source_field: Option<&crate::SourceField>, message_field: &crate::Field, ) -> TokenStream { let crate_root = self.crate_root; let parameterized_error_name = self.parameterized_error_name; let error_constructor_name = self.error_constructor_name; let construct_implicit_fields = self.construct_implicit_fields(); // testme: transform let (source_ty, transfer_source_field, empty_source_field) = match source_field { Some(f) => { let source_field_type = f.transformation.ty(); let source_field_name = &f.name; let source_transformation = f.transformation.transformation(); ( quote! { #source_field_type }, Some(quote! { #source_field_name: (#source_transformation)(error), }), Some(quote! { #source_field_name: core::option::Option::None, }), ) } None => (quote! { #crate_root::NoneError }, None, None), }; let message_field_name = &message_field.name; let track_caller = track_caller(); quote! { impl #crate_root::FromString for #parameterized_error_name { type Source = #source_ty; #track_caller fn without_source(message: String) -> Self { #error_constructor_name { #empty_source_field #message_field_name: message, #construct_implicit_fields } } #track_caller fn with_source(error: Self::Source, message: String) -> Self { #error_constructor_name { #transfer_source_field #message_field_name: message, #construct_implicit_fields } } } } } fn generate_from_source(self, source_field: &crate::SourceField) -> TokenStream { let parameterized_error_name = self.parameterized_error_name; let error_constructor_name = self.error_constructor_name; let construct_implicit_fields = self.construct_implicit_fields(); let original_generics_without_defaults = self.original_generics_without_defaults; let user_field_generics = self.user_field_generics(); let where_clauses = self.where_clauses; let (source_field_type, transfer_source_field) = build_source_info(source_field); let track_caller = track_caller(); quote! { impl<#(#original_generics_without_defaults,)* #(#user_field_generics,)*> ::core::convert::From<#source_field_type> for #parameterized_error_name where #(#where_clauses),* { #track_caller fn from(error: #source_field_type) -> Self { #error_constructor_name { #transfer_source_field #construct_implicit_fields } } } } } } // Assumes that the error is in a variable called "error" fn build_source_info(source_field: &crate::SourceField) -> (&syn::Type, TokenStream) { let source_field_name = source_field.name(); let source_field_type = source_field.transformation.ty(); let source_transformation = source_field.transformation.transformation(); ( source_field_type, quote! { #source_field_name: (#source_transformation)(error), }, ) } fn track_caller() -> proc_macro2::TokenStream { if cfg!(feature = "rust_1_46") { quote::quote! { #[track_caller] } } else { quote::quote! {} } } } pub mod display { use crate::{Field, SourceField}; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; use std::collections::BTreeSet; struct StaticIdent(&'static str); impl quote::ToTokens for StaticIdent { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { proc_macro2::Ident::new(self.0, proc_macro2::Span::call_site()).to_tokens(tokens) } } const FORMATTER_ARG: StaticIdent = StaticIdent("__snafu_display_formatter"); pub(crate) struct Display<'a> { pub(crate) arms: &'a [TokenStream], pub(crate) original_generics: &'a [TokenStream], pub(crate) parameterized_error_name: &'a dyn ToTokens, pub(crate) where_clauses: &'a [TokenStream], } impl ToTokens for Display<'_> { fn to_tokens(&self, stream: &mut TokenStream) { let Self { arms, original_generics, parameterized_error_name, where_clauses, } = *self; let display_impl = quote! { #[allow(single_use_lifetimes)] impl<#(#original_generics),*> ::core::fmt::Display for #parameterized_error_name where #(#where_clauses),* { fn fmt(&self, #FORMATTER_ARG: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { #[allow(unused_variables)] match *self { #(#arms),* } } } }; stream.extend(display_impl); } } pub(crate) struct DisplayMatchArm<'a> { pub(crate) backtrace_field: Option<&'a crate::Field>, pub(crate) implicit_fields: &'a [crate::Field], pub(crate) default_name: &'a dyn ToTokens, pub(crate) display_format: Option<&'a crate::Display>, pub(crate) doc_comment: Option<&'a crate::DocComment>, pub(crate) pattern_ident: &'a dyn ToTokens, pub(crate) selector_kind: &'a crate::ContextSelectorKind, } impl ToTokens for DisplayMatchArm<'_> { fn to_tokens(&self, stream: &mut TokenStream) { let Self { backtrace_field, implicit_fields, default_name, display_format, doc_comment, pattern_ident, selector_kind, } = *self; let user_fields = selector_kind.user_fields(); let source_field = selector_kind.source_field(); let message_field = selector_kind.message_field(); let mut shorthand_names = &BTreeSet::new(); let mut assigned_names = &BTreeSet::new(); let format = match (display_format, doc_comment, source_field) { (Some(v), _, _) => { let exprs = &v.exprs; shorthand_names = &v.shorthand_names; assigned_names = &v.assigned_names; quote! { #(#exprs),* } } (_, Some(d), _) => { let content = &d.content; shorthand_names = &d.shorthand_names; quote! { #content } } (_, _, Some(f)) => { let field_name = &f.name; quote! { concat!(stringify!(#default_name), ": {}"), #field_name } } _ => quote! { stringify!(#default_name)}, }; let field_names = user_fields .iter() .chain(backtrace_field) .chain(implicit_fields) .chain(message_field) .map(Field::name) .chain(source_field.map(SourceField::name)) .collect::>(); let field_names_pat = quote! { #(ref #field_names),* }; let shorthand_names = shorthand_names.iter().collect::>(); let assigned_names = assigned_names.iter().collect::>(); let shorthand_fields = &shorthand_names & &field_names; let shorthand_fields = &shorthand_fields - &assigned_names; let shorthand_assignments = quote! { #( #shorthand_fields = #shorthand_fields ),* }; let match_arm = quote! { #pattern_ident { #field_names_pat } => { write!(#FORMATTER_ARG, #format, #shorthand_assignments) } }; stream.extend(match_arm); } } } pub mod error { use crate::{FieldContainer, SourceField}; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; pub(crate) struct Error<'a> { pub(crate) crate_root: &'a dyn ToTokens, pub(crate) parameterized_error_name: &'a dyn ToTokens, pub(crate) description_arms: &'a [TokenStream], pub(crate) source_arms: &'a [TokenStream], pub(crate) original_generics: &'a [TokenStream], pub(crate) where_clauses: &'a [TokenStream], } impl ToTokens for Error<'_> { fn to_tokens(&self, stream: &mut TokenStream) { let Self { crate_root, parameterized_error_name, description_arms, source_arms, original_generics, where_clauses, } = *self; let description_fn = quote! { fn description(&self) -> &str { match *self { #(#description_arms)* } } }; let source_body = quote! { use #crate_root::AsErrorSource; match *self { #(#source_arms)* } }; let cause_fn = quote! { fn cause(&self) -> ::core::option::Option<&dyn #crate_root::Error> { #source_body } }; let source_fn = quote! { fn source(&self) -> ::core::option::Option<&(dyn #crate_root::Error + 'static)> { #source_body } }; let std_backtrace_fn = if cfg!(feature = "unstable-backtraces-impl-std") { Some(quote! { fn backtrace(&self) -> ::core::option::Option<&::std::backtrace::Backtrace> { #crate_root::ErrorCompat::backtrace(self) } }) } else { None }; let error = quote! { #[allow(single_use_lifetimes)] impl<#(#original_generics),*> #crate_root::Error for #parameterized_error_name where Self: ::core::fmt::Debug + ::core::fmt::Display, #(#where_clauses),* { #description_fn #cause_fn #source_fn #std_backtrace_fn } }; stream.extend(error); } } pub(crate) struct ErrorSourceMatchArm<'a> { pub(crate) field_container: &'a FieldContainer, pub(crate) pattern_ident: &'a dyn ToTokens, } impl ToTokens for ErrorSourceMatchArm<'_> { fn to_tokens(&self, stream: &mut TokenStream) { let Self { field_container: FieldContainer { selector_kind, .. }, pattern_ident, } = *self; let source_field = selector_kind.source_field(); let arm = match source_field { Some(source_field) => { let SourceField { name: field_name, .. } = source_field; let convert_to_error_source = if selector_kind.is_whatever() { quote! { #field_name.as_ref().map(|e| e.as_error_source()) } } else { quote! { ::core::option::Option::Some(#field_name.as_error_source()) } }; quote! { #pattern_ident { ref #field_name, .. } => { #convert_to_error_source } } } None => { quote! { #pattern_ident { .. } => { ::core::option::Option::None } } } }; stream.extend(arm); } } } pub mod error_compat { use crate::{Field, FieldContainer, SourceField}; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; pub(crate) struct ErrorCompat<'a> { pub(crate) crate_root: &'a dyn ToTokens, pub(crate) parameterized_error_name: &'a dyn ToTokens, pub(crate) backtrace_arms: &'a [TokenStream], pub(crate) original_generics: &'a [TokenStream], pub(crate) where_clauses: &'a [TokenStream], } impl ToTokens for ErrorCompat<'_> { fn to_tokens(&self, stream: &mut TokenStream) { let Self { crate_root, parameterized_error_name, backtrace_arms, original_generics, where_clauses, } = *self; let backtrace_fn = quote! { fn backtrace(&self) -> ::core::option::Option<&#crate_root::Backtrace> { match *self { #(#backtrace_arms),* } } }; let error_compat_impl = quote! { #[allow(single_use_lifetimes)] impl<#(#original_generics),*> #crate_root::ErrorCompat for #parameterized_error_name where #(#where_clauses),* { #backtrace_fn } }; stream.extend(error_compat_impl); } } pub(crate) struct ErrorCompatBacktraceMatchArm<'a> { pub(crate) crate_root: &'a dyn ToTokens, pub(crate) field_container: &'a FieldContainer, pub(crate) pattern_ident: &'a dyn ToTokens, } impl ToTokens for ErrorCompatBacktraceMatchArm<'_> { fn to_tokens(&self, stream: &mut TokenStream) { let Self { crate_root, field_container: FieldContainer { backtrace_field, selector_kind, .. }, pattern_ident, } = *self; let match_arm = match (selector_kind.source_field(), backtrace_field) { (Some(source_field), _) if source_field.backtrace_delegate => { let SourceField { name: field_name, .. } = source_field; quote! { #pattern_ident { ref #field_name, .. } => { #crate_root::ErrorCompat::backtrace(#field_name) } } } (_, Some(backtrace_field)) => { let Field { name: field_name, .. } = backtrace_field; quote! { #pattern_ident { ref #field_name, .. } => { #crate_root::AsBacktrace::as_backtrace(#field_name) } } } _ => { quote! { #pattern_ident { .. } => { ::core::option::Option::None } } } }; stream.extend(match_arm); } } }