neli-proc-macros-0.1.1/.cargo_vcs_info.json0000644000000001560000000000100142070ustar { "git": { "sha1": "4cbbee7251986e11a9f40a76b1eae19b9b11ed38" }, "path_in_vcs": "neli-proc-macros" }neli-proc-macros-0.1.1/.gitignore000064400000000000000000000000130072674642500150070ustar 00000000000000Cargo.lock neli-proc-macros-0.1.1/Cargo.toml0000644000000017240000000000100122070ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "neli-proc-macros" version = "0.1.1" authors = ["John Baublitz "] description = "Procedural macros for neli" license = "BSD-3-Clause" repository = "https://github.com/jbaublitz/neli" [lib] proc-macro = true [dependencies.either] version = "1.6" [dependencies.proc-macro2] version = "1" [dependencies.quote] version = "1" [dependencies.serde] version = "1" features = ["derive"] [dependencies.syn] version = "1" features = ["full", "extra-traits"] neli-proc-macros-0.1.1/Cargo.toml.orig000064400000000000000000000007120072674642500157140ustar 00000000000000[package] name = "neli-proc-macros" version = "0.1.1" authors = ["John Baublitz "] edition = "2018" description = "Procedural macros for neli" license = "BSD-3-Clause" repository = "https://github.com/jbaublitz/neli" [lib] proc-macro = true [dependencies] quote = "1" proc-macro2 = "1" either = "1.6" [dependencies.serde] version = "1" features = ["derive"] [dependencies.syn] version = "1" features = ["full", "extra-traits"] neli-proc-macros-0.1.1/src/derive_frombytes.rs000064400000000000000000000152110072674642500175320ustar 00000000000000use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use syn::{ parse_str, AngleBracketedGenericArguments, Attribute, Fields, GenericArgument, GenericParam, Ident, ItemStruct, LifetimeDef, PathArguments, Token, TraitBound, Type, TypeParamBound, }; use crate::shared::{process_input, process_lifetime, process_size, StructInfo}; fn add_lifetime(trt: &mut TraitBound, lt: &LifetimeDef) { trt.path.segments.iter_mut().for_each(|elem| { if elem.ident == parse_str::("FromBytes").unwrap() || elem.ident == parse_str::("FromBytesWithInput").unwrap() { if let PathArguments::AngleBracketed(ref mut args) = elem.arguments { args.args = std::iter::once(GenericArgument::Lifetime(lt.lifetime.clone())) .chain(args.args.clone()) .collect(); } else if let PathArguments::None = elem.arguments { elem.arguments = PathArguments::AngleBracketed(AngleBracketedGenericArguments { colon2_token: Some(Token![::](Span::call_site())), lt_token: Token![<](Span::call_site()), args: std::iter::once(GenericArgument::Lifetime(lt.lifetime.clone())).collect(), gt_token: Token![>](Span::call_site()), }); } } }); } fn process_attrs(lt: &LifetimeDef, field_type: Type, field_attrs: Vec) -> TokenStream2 { let input = process_input(&field_attrs); let size = process_size(&field_attrs) .unwrap_or_else(|| parse_str("input").expect("input is a valid expression")); match input { Some(Some(input)) => quote! { { let input = #input; log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; log::trace!( "Buffer to be deserialized: {:?}", &buffer.get_ref()[position..position + #size], ); let ok = <#field_type as neli::FromBytesWithInput<#lt>>::from_bytes_with_input( buffer, input, )?; log::trace!("Field deserialized: {:?}", ok); ok } }, Some(None) => quote! { { log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; log::trace!( "Buffer to be deserialized: {:?}", &buffer.get_ref()[position..position + #size], ); let ok = <#field_type as neli::FromBytesWithInput<#lt>>::from_bytes_with_input( buffer, input, )?; log::trace!("Field deserialized: {:?}", ok); ok } }, None => quote! { { log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; log::trace!( "Buffer to be deserialized: {:?}", &buffer.get_ref()[position..position + <#field_type as neli::TypeSize>::type_size()], ); let ok = <#field_type as neli::FromBytes<#lt>>::from_bytes(buffer)?; log::trace!("Field deserialized: {:?}", ok); ok } }, } } pub fn impl_frombytes_struct( is: ItemStruct, trt: &str, method_name: &str, input_type: Option, input: Option, ) -> TokenStream2 { let is_named = matches!(is.fields, Fields::Named(_)); let info = StructInfo::from_item_struct(is, Some(trt), "from_bytes_bound", false); let trt = Ident::new(trt, Span::call_site()); let method_name = Ident::new(method_name, Span::call_site()); let ( struct_name, mut generics, generics_without_bounds, field_names, field_types, field_attrs, padded, ) = info.into_tuple(); let lt = process_lifetime(&mut generics); if field_names.is_empty() { return quote! { impl#generics neli::#trt<#lt> for #struct_name#generics_without_bounds { #input_type fn #method_name(buffer: &mut std::io::Cursor<&#lt [u8]> #input) -> Result { Ok(#struct_name) } } }; } let struct_expr = if is_named { quote! { #struct_name { #( #field_names, )* } } } else { quote! { #struct_name( #( #field_names, )* ) } }; for generic in generics.params.iter_mut() { if let GenericParam::Type(ref mut ty) = generic { for bound in ty.bounds.iter_mut() { if let TypeParamBound::Trait(ref mut trt) = bound { add_lifetime(trt, <); } } } } let from_bytes_exprs = field_types .into_iter() .zip(field_attrs.into_iter()) .map(|(field_type, field_attrs)| process_attrs(<, field_type, field_attrs)); let padding = if padded { quote! { <#struct_name#generics_without_bounds as neli::FromBytes<#lt>>::strip(buffer)?; } } else { TokenStream2::new() }; quote! { impl#generics neli::#trt<#lt> for #struct_name#generics_without_bounds { #input_type fn #method_name(buffer: &mut std::io::Cursor<&#lt [u8]> #input) -> Result { let pos = buffer.position(); let res = { let mut from_bytes_impl = || { log::trace!("Deserializing data type {}", stringify!(#struct_name)); #( let #field_names = #from_bytes_exprs; )* #padding Ok(#struct_expr) }; from_bytes_impl() }; match res { Ok(res) => Ok(res), Err(e) => { buffer.set_position(pos); Err(e) }, } } } } } neli-proc-macros-0.1.1/src/derive_header.rs000064400000000000000000000012730072674642500167530ustar 00000000000000use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::ItemStruct; use crate::shared::StructInfo; fn generate_header(mut i: StructInfo) -> TokenStream2 { let _ = i.pop_field(); let (struct_name, generics, generics_without_bounds, _, field_types, _, _) = i.into_tuple(); quote! { impl#generics neli::Header for #struct_name#generics_without_bounds { fn header_size() -> usize { #( <#field_types as neli::TypeSize>::type_size() )+* } } } } pub fn impl_header_struct(is: ItemStruct) -> TokenStream2 { let info = StructInfo::from_item_struct(is, None, "header_bound", false); generate_header(info) } neli-proc-macros-0.1.1/src/derive_size.rs000064400000000000000000000044630072674642500165010ustar 00000000000000use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{FieldsNamed, FieldsUnnamed, Ident, ItemEnum, ItemStruct}; use crate::shared::{ generate_arms, generate_named_fields, generate_unnamed_fields, process_impl_generics, FieldInfo, StructInfo, }; fn generate_size(i: StructInfo) -> TokenStream2 { let (struct_name, generics, generics_without_bounds, field_names, field_types, _, _) = i.into_tuple(); quote! { impl#generics neli::Size for #struct_name#generics_without_bounds { fn unpadded_size(&self) -> usize { #( <#field_types as neli::Size>::unpadded_size(&self.#field_names) )+* } } } } pub fn impl_size_struct(is: ItemStruct) -> TokenStream2 { let struct_info = StructInfo::from_item_struct(is, Some("Size"), "size_bound", true); generate_size(struct_info) } fn generate_named_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsNamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_named_fields(fields).into_iter()); quote! { #enum_name::#var_name { #(#field_names),* } => { #(<#types as neli::Size>::unpadded_size(&#field_names))+* }, } } fn generate_unnamed_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsUnnamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_unnamed_fields(fields, false).into_iter()); quote! { #enum_name::#var_name( #( #field_names ),* ) => { #( <#types as neli::Size>::unpadded_size(&#field_names) )+* } } } pub fn impl_size_enum(ie: ItemEnum) -> TokenStream2 { let (generics, generics_without_bounds) = process_impl_generics(ie.generics, Some("Size")); let enum_name = ie.ident; let arms = generate_arms( enum_name.clone(), ie.variants.into_iter().collect::>(), generate_named_pat_and_expr, generate_unnamed_pat_and_expr, quote! { 0 }, ); quote! { impl#generics neli::Size for #enum_name#generics_without_bounds { fn unpadded_size(&self) -> usize { match self { #(#arms)* } } } } } neli-proc-macros-0.1.1/src/derive_tobytes.rs000064400000000000000000000060760072674642500172220ustar 00000000000000use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{FieldsNamed, FieldsUnnamed, Ident, ItemEnum, ItemStruct}; use crate::shared::{ generate_arms, generate_named_fields, generate_unnamed_fields, process_impl_generics, process_trait_bounds, FieldInfo, StructInfo, }; pub fn impl_tobytes_struct(is: ItemStruct) -> TokenStream2 { let info = StructInfo::from_item_struct(is, Some("ToBytes"), "to_bytes_bound", true); let (struct_name, generics, generics_without_bounds, field_names, field_types, _, padded) = info.into_tuple(); if field_names.is_empty() { return quote! { impl neli::ToBytes for #struct_name { fn to_bytes(&self, _: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { Ok(()) } } }; } let padding = if padded { quote! { <#struct_name#generics_without_bounds as neli::ToBytes>::pad(&self, buffer)?; } } else { TokenStream2::new() }; quote! { impl#generics neli::ToBytes for #struct_name#generics_without_bounds { fn to_bytes(&self, buffer: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { #( <#field_types as neli::ToBytes>::to_bytes(&self.#field_names, buffer)?; )* #padding Ok(()) } } } } fn generate_named_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsNamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_named_fields(fields).into_iter()); quote! { #enum_name::#var_name { #(#field_names),* } => { #(<#types as neli::ToBytes>::to_bytes(&#field_names, buffer)?; )* Ok(()) }, } } fn generate_unnamed_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsUnnamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_unnamed_fields(fields, false).into_iter()); quote! { #enum_name::#var_name( #( #field_names ),* ) => { #( <#types as neli::ToBytes>::to_bytes(#field_names, buffer)?; )* Ok(()) } } } pub fn impl_tobytes_enum(ie: ItemEnum) -> TokenStream2 { let (generics, generics_without_bounds) = process_impl_generics(ie.generics, Some("ToBytes")); let trait_bounds = process_trait_bounds(&ie.attrs, "to_bytes_bound"); let enum_name = ie.ident; let arms = generate_arms( enum_name.clone(), ie.variants.into_iter().collect::>(), generate_named_pat_and_expr, generate_unnamed_pat_and_expr, quote! { Ok(()) }, ); quote! { impl#generics neli::ToBytes for #enum_name#generics_without_bounds where #( #trait_bounds ),* { fn to_bytes(&self, buffer: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { match self { #(#arms)* } } } } } neli-proc-macros-0.1.1/src/lib.rs000064400000000000000000000170630072674642500147370ustar 00000000000000//! Procedural macros to be used with the library //! [`neli`](https://github.com/jbaublitz/neli). //! //! All derive macros other than `Header` generate implicit type //! parameter bounds on every type parameter which can be overriden //! with struct attributes. use proc_macro::TokenStream; use quote::quote; use syn::{parse, Item, Meta}; #[macro_use] mod shared; mod derive_frombytes; mod derive_header; mod derive_size; mod derive_tobytes; mod neli_enum; use derive_frombytes::*; use derive_header::*; use derive_size::*; use derive_tobytes::*; use neli_enum::*; /// This method converts a method from the form: /// /// ```no_compile /// use neli_proc_macros::neli_enum; /// /// #[neli_enum(serialized_type = "u16")] /// pub enum MyConstants { /// ConstOne = 1, /// ConstTwo = 2, /// ConstThree = 3, /// } /// ``` /// /// to: /// /// ``` /// pub enum MyConstants { /// ConstOne, /// ConstTwo, /// ConstThree, /// } /// ``` /// /// with [`From`] implemented reflexively for `MyConstants` and /// `u16`. #[proc_macro_attribute] pub fn neli_enum(attr: TokenStream, item: TokenStream) -> TokenStream { let attr_string = attr.to_string(); let meta = parse::(attr).unwrap_or_else(|_| panic!("{} is not a valid attribute", attr_string)); let enum_item = parse::(item).unwrap(); let enm = if let Item::Enum(e) = enum_item { e } else { panic!("This macro only operates on enums"); }; TokenStream::from(generate_neli_enum(enm, meta)) } /// Derives the neli `Size` trait for a struct or enum. /// /// Acceptable struct attribute is: /// * `#[neli(size_bound = "T: MyTrait")]` which will generate a /// trait bound in the impl for the specified type parameter. /// /// Implicit type parameter bound: `Size`. #[proc_macro_derive(Size, attributes(neli))] pub fn proc_macro_size(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_size_struct(strct), Item::Enum(enm) => impl_size_enum(enm), _ => panic!("Size can only be derived for structs and enums"), }) } /// Derives the neli `Header` trait for a struct or enum. Unlike /// other derive macros in this crate, the `Header` derive macro /// does not impose type parameter bounds on type parameters. /// See the accepted attribute for more information. The reason for /// this is that the last field is considered to be the payload. /// Because the payload may be represented by a type parameter, /// we cannot blindly restrict type parameters or else we impose /// an artificial restriction of `TypeSize` on the payload type /// parameter. This is a problem for the `Header` trait as the /// payload may be unsized even if the rest of the header is /// composed exclusively of statically sized types and are therefore /// compatible with the `TypeSize` trait. /// /// Acceptable struct attribute is: /// * `#[neli(header_bound = "T: MyTrait")]` which will generate a /// trait bound in the impl for the specified type parameter. /// /// While there is no implicit type parameter bound, every type /// parameter that does not correspond to a payload should have /// a specified type parameter bound of `TypeSize`. #[proc_macro_derive(Header, attributes(neli))] pub fn proc_macro_header(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_header_struct(strct), _ => panic!("Header can only be derived for structs"), }) } /// Derives the neli `FromBytes` trait for a struct. /// /// Acceptable struct attribute is: /// * `#[neli(from_bytes_bound = "T: MyTrait")]` which will generate /// a trait bound in the impl for the specified type parameter. /// * `#[neli(padding)]` which will add special handling for padding /// for this struct. /// /// Acceptable field attribute forms are: /// * `#[neli(input = "input_expression")]` which may only be used /// once for a struct. The behavior of this attribute is that a /// bound requirement will change from the implicit `FromBytes` to /// an implicit `FromBytesWithInput` bound. The method in this trait /// will be called with `input_expression` as the input provided. /// * `#[neli(input)]` which will transparently pass the input /// provided in the `FromBytesWithInput` method through to the /// `FromBytesWithInput` method for this field unchanged according /// to the rules described above. /// * `#[neli(size = "size_var_name")] which allows specifying a size of the data type /// that is different from the input specified by `#[neli(input)]`. Not specifying /// this attribute defaults to using `input` as the size as well. /// /// Implicit type parameter bound: `FromBytes`. #[proc_macro_derive(FromBytes, attributes(neli))] pub fn proc_macro_frombytes(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_frombytes_struct(strct, "FromBytes", "from_bytes", None, None), _ => panic!("FromBytes can only be derived for structs"), }) } /// Derives the neli `FromBytesWithInput` trait for a struct. /// /// Acceptable struct attribute is: /// * `#[neli(from_bytes_bound = "T: MyTrait")]` which will generate /// a trait bound in the impl for the specified type parameter. /// * `#[neli(padding)]` which will add special handling for padding /// for this struct. /// /// Acceptable field attribute forms are: /// * `#[neli(input = "input_expression")]` which may only be used /// once for a struct. The behavior of this attribute is that a /// bound requirement will change from the implicit `FromBytes` to /// an implicit `FromBytesWithInput` bound. The method in this trait /// will be called with `input_expression` as the input provided. /// * `#[neli(input)]` which will transparently pass the input /// provided in the `FromBytesWithInput` method through to the /// `FromBytesWithInput` method for this field unchanged according /// to the rules described above. /// * `#[neli(size = "size_var_name")] which allows specifying a size of the data type /// that is different from the input specified by `#[neli(input)]`. Not specifying /// this attribute defaults to using `input` as the size as well. /// /// Implicit type parameter bound: `FromBytes`. #[proc_macro_derive(FromBytesWithInput, attributes(neli))] pub fn proc_macro_frombyteswithinput(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_frombytes_struct( strct, "FromBytesWithInput", "from_bytes_with_input", Some(quote! { type Input = usize; }), Some(quote! { , input: Self::Input }), ), _ => panic!("FromBytesWithInput can only be derived for structs"), }) } /// Derives the neli `ToBytes` trait for a struct or enum. /// /// Acceptable struct attribute is: /// * `#[neli(to_bytes_bound = "T: MyTrait")]` which will generate a /// trait bound in the impl for the specified type parameter. /// * `#[neli(padding)]` which will add special handling for padding /// for this struct. /// /// Implicit type parameter bound: `ToBytes`. #[proc_macro_derive(ToBytes, attributes(neli))] pub fn proc_macro_tobytes(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_tobytes_struct(strct), Item::Enum(enm) => impl_tobytes_enum(enm), _ => panic!("ToBytes can only be derived for structs and enums"), }) } neli-proc-macros-0.1.1/src/neli_enum.rs000064400000000000000000000115270072674642500161430ustar 00000000000000use proc_macro::TokenStream; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use syn::{ parse, parse_str, Arm, Attribute, Expr, Ident, ItemEnum, Lit, Meta, Path, Token, Type, Variant, }; fn parse_type_attr(attr: Meta) -> Type { if let Meta::NameValue(nv) = attr { if nv.path == parse_str::("serialized_type").unwrap() { if let Lit::Str(ls) = nv.lit { return parse_str::(&ls.value()) .unwrap_or_else(|_| panic!("Invalid type supplied: {}", ls.value())); } } } panic!("Attribute in the form #[neli(serialized_type = \"TYPE_LITERAL_STR\")] required") } fn parse_enum(enm: &mut ItemEnum, ty: &Type) -> Vec<(Vec, Ident, Expr)> { let exprs = enm .variants .iter_mut() .map(|var| { if let Some((_, expr)) = var.discriminant.take() { (var.attrs.clone(), var.ident.clone(), expr) } else { panic!("All variants in the provided enum require an expression assignment") } }) .collect(); if !enm.variants.trailing_punct() { enm.variants.push_punct(Token![,](Span::call_site())); } enm.variants.push_value( parse::(TokenStream::from(quote! { UnrecognizedConst(#ty) })) .expect("Could not parse tokens as a variant"), ); exprs } fn parse_from_info( enum_name: Ident, var_info: Vec<(Vec, Ident, Expr)>, ) -> (Vec, Vec) { let mut from_const_info = Vec::new(); let mut from_type_info = Vec::new(); for (attributes, ident, expr) in var_info { let mut from_const_arm = parse::(TokenStream::from(quote! { i if i == #expr => #enum_name::#ident, })) .expect("Failed to parse tokens as a match arm"); from_const_arm.attrs = attributes.clone(); from_const_info.push(from_const_arm); let mut from_type_arm = parse::(TokenStream::from(quote! { #enum_name::#ident => #expr, })) .expect("Failed to parse tokens as a match arm"); from_type_arm.attrs = attributes.clone(); from_type_info.push(from_type_arm); } (from_const_info, from_type_info) } pub fn generate_neli_enum(mut enm: ItemEnum, meta: Meta) -> TokenStream2 { let enum_name = enm.ident.clone(); let ty = parse_type_attr(meta); let variant_info = parse_enum(&mut enm, &ty); let (from_const_info, from_type_info) = parse_from_info(enum_name.clone(), variant_info); quote! { #[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[allow(missing_docs)] #enm impl #enum_name { /// Check whether a given method is an unrecognized /// constant for the set of possible constants /// associated with the current type. pub fn is_unrecognized(&self) -> bool { match *self { #enum_name::UnrecognizedConst(_) => true, _ => false, } } } impl neli::Size for #enum_name { fn unpadded_size(&self) -> usize { std::mem::size_of::<#ty>() } } impl neli::TypeSize for #enum_name { fn type_size() -> usize { std::mem::size_of::<#ty>() } } impl neli::ToBytes for #enum_name { fn to_bytes(&self, buffer: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { let bin_rep: #ty = self.into(); bin_rep.to_bytes(buffer) } } impl<'lt> neli::FromBytes<'lt> for #enum_name { fn from_bytes(buffer: &mut std::io::Cursor<&'lt [u8]>) -> Result { Ok(#enum_name::from(<#ty as neli::FromBytes>::from_bytes( buffer )?)) } } impl From<#ty> for #enum_name { fn from(cnst: #ty) -> Self { match cnst { #( #from_const_info )* i => #enum_name::UnrecognizedConst(i), } } } impl From<#enum_name> for #ty { fn from(enm: #enum_name) -> Self { match enm { #( #from_type_info )* #enum_name::UnrecognizedConst(i) => i, } } } impl From<&#enum_name> for #ty { fn from(enm: &#enum_name) -> Self { match *enm { #( #from_type_info )* #enum_name::UnrecognizedConst(i) => i, } } } } } neli-proc-macros-0.1.1/src/shared.rs000064400000000000000000000427140072674642500154400ustar 00000000000000use std::{any::type_name, collections::HashMap}; use proc_macro::TokenStream; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::{quote, ToTokens}; use syn::{ parse, parse::Parse, parse_str, punctuated::Punctuated, token::{Add, Colon2}, Attribute, Expr, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics, Ident, Index, ItemStruct, LifetimeDef, Lit, Meta, MetaNameValue, NestedMeta, Path, PathArguments, PathSegment, Token, TraitBound, TraitBoundModifier, Type, TypeParam, TypeParamBound, Variant, }; /// Represents a field as either an identifier or an index. pub enum FieldRepr { Index(Index), Ident(Ident), } impl ToTokens for FieldRepr { fn to_tokens(&self, tokens: &mut TokenStream2) { match self { FieldRepr::Index(i) => i.to_tokens(tokens), FieldRepr::Ident(i) => i.to_tokens(tokens), } } } /// Represents the field name, type, and all attributes associated /// with this field. pub struct FieldInfo { field_name: FieldRepr, field_type: Type, field_attrs: Vec, } impl FieldInfo { /// Convert field info to a tuple. fn into_tuple(self) -> (FieldRepr, Type, Vec) { (self.field_name, self.field_type, self.field_attrs) } /// Convert a vector of [`FieldInfo`]s to a tuple of vectors /// each containing name, type, or attributes. pub fn to_vecs(v: I) -> (Vec, Vec, Vec>) where I: Iterator, { v.into_iter().fold( (Vec::new(), Vec::new(), Vec::new()), |(mut names, mut types, mut attrs), info| { let (name, ty, attr) = info.into_tuple(); names.push(name); types.push(ty); attrs.push(attr); (names, types, attrs) }, ) } } /// Necessary information for a given struct to generate trait /// implementations. pub struct StructInfo { struct_name: Ident, generics: Generics, generics_without_bounds: Generics, field_info: Vec, padded: bool, } type StructInfoTuple = ( Ident, Generics, Generics, Vec, Vec, Vec>, bool, ); impl StructInfo { /// Extract the necessary information from an /// [`ItemStruct`][syn::ItemStruct] data structure. pub fn from_item_struct( i: ItemStruct, trait_name: Option<&str>, trait_bound_path: &str, uses_self: bool, ) -> Self { let (mut generics, generics_without_bounds) = process_impl_generics(i.generics, trait_name); let trait_bounds = process_trait_bounds(&i.attrs, trait_bound_path); override_trait_bounds_on_generics(&mut generics, &trait_bounds); let field_info = match i.fields { Fields::Named(fields_named) => generate_named_fields(fields_named), Fields::Unnamed(fields_unnamed) => generate_unnamed_fields(fields_unnamed, uses_self), Fields::Unit => Vec::new(), }; let padded = process_padding(&i.attrs); StructInfo { struct_name: i.ident, generics, generics_without_bounds, field_info, padded, } } /// Remove the last field from the record. pub fn pop_field(&mut self) { let _ = self.field_info.pop(); } /// Convert all necessary struct information into a tuple of /// values. pub fn into_tuple(mut self) -> StructInfoTuple { let (field_names, field_types, field_attrs) = self.field_info(); ( self.struct_name, self.generics, self.generics_without_bounds, field_names, field_types, field_attrs, self.padded, ) } /// Convert all field information into a tuple. fn field_info(&mut self) -> (Vec, Vec, Vec>) { FieldInfo::to_vecs(self.field_info.drain(..)) } } /// Convert a list of identifiers into a path where the path segments /// are added in the order that they appear in the list. fn path_from_idents(idents: &[&str]) -> Path { Path { leading_colon: None, segments: idents .iter() .map(|ident| PathSegment { ident: Ident::new(ident, Span::call_site()), arguments: PathArguments::None, }) .collect::>(), } } /// Process all type parameters in the type parameter definition for /// an `impl` block. Optionally add a trait bound for all type parameters /// if `required_trait` is `Some(_)`. /// /// The first return value in the tuple is the list of type parameters /// with trait bounds added. The second argument is a list of type /// parameters without trait bounds to be passed into the type parameter /// list for a struct. /// /// # Example: /// ## impl block /// /// ```no_compile /// trait MyTrait {} /// /// impl MyStruct { /// fn nothing() {} /// } /// ``` /// /// ## Method call /// `neli_proc_macros::process_impl_generics(generics, Some("MyTrait"))` /// /// ## Result /// ```no_compile /// (, ) /// ``` /// /// or rather: /// /// ```no_compile /// impl MyStruct { /// fn nothing() {} /// } /// ``` pub fn process_impl_generics( mut generics: Generics, required_trait: Option<&str>, ) -> (Generics, Generics) { if let Some(rt) = required_trait { for gen in generics.params.iter_mut() { if let GenericParam::Type(param) = gen { param.colon_token = Some(Token![:](Span::call_site())); param.bounds.push(TypeParamBound::Trait(TraitBound { paren_token: None, modifier: TraitBoundModifier::None, lifetimes: None, path: path_from_idents(&["neli", rt]), })); param.eq_token = None; param.default = None; } } } let mut generics_without_bounds: Generics = generics.clone(); for gen in generics_without_bounds.params.iter_mut() { if let GenericParam::Type(param) = gen { param.colon_token = None; param.bounds.clear(); param.eq_token = None; param.default = None; } } (generics, generics_without_bounds) } /// Remove attributes that should not be carried over to an `impl` /// definition and only belong in the data structure like documentation /// attributes. fn remove_bad_attrs(attrs: Vec) -> Vec { attrs .into_iter() .filter(|attr| { if let Ok(meta) = attr.parse_meta() { match meta { Meta::NameValue(MetaNameValue { path, .. }) => { !(path == parse_str::("doc").expect("doc should be valid path")) } _ => true, } } else { panic!("Could not parse provided attribute {}", attr.tokens,) } }) .collect() } /// Generate a pattern and associated expression for each variant /// in an enum. fn generate_pat_and_expr( enum_name: Ident, var_name: Ident, fields: Fields, generate_named_pat_and_expr: &N, generate_unnamed_pat_and_expr: &U, unit: &TokenStream2, ) -> TokenStream2 where N: Fn(Ident, Ident, FieldsNamed) -> TokenStream2, U: Fn(Ident, Ident, FieldsUnnamed) -> TokenStream2, { match fields { Fields::Named(fields) => generate_named_pat_and_expr(enum_name, var_name, fields), Fields::Unnamed(fields) => generate_unnamed_pat_and_expr(enum_name, var_name, fields), Fields::Unit => quote! { #enum_name::#var_name => #unit, }, } } /// Convert an enum variant into an arm of a match statement. fn generate_arm( attrs: Vec, enum_name: Ident, var_name: Ident, fields: Fields, generate_named_pat_and_expr: &N, generate_unnamed_pat_and_expr: &U, unit: &TokenStream2, ) -> TokenStream2 where N: Fn(Ident, Ident, FieldsNamed) -> TokenStream2, U: Fn(Ident, Ident, FieldsUnnamed) -> TokenStream2, { let attrs = remove_bad_attrs(attrs) .into_iter() .map(|attr| { attr.parse_meta() .unwrap_or_else(|_| panic!("Failed to parse attribute {}", attr.tokens)) }) .collect::>(); let arm = generate_pat_and_expr( enum_name, var_name, fields, generate_named_pat_and_expr, generate_unnamed_pat_and_expr, unit, ); quote! { #( #attrs )* #arm } } /// Generate all arms of a match statement. pub fn generate_arms( enum_name: Ident, variants: Vec, generate_named_pat_and_expr: N, generate_unnamed_pat_and_expr: U, unit: TokenStream2, ) -> Vec where N: Fn(Ident, Ident, FieldsNamed) -> TokenStream2, U: Fn(Ident, Ident, FieldsUnnamed) -> TokenStream2, { variants .into_iter() .map(|var| { let variant_name = var.ident; generate_arm( var.attrs, enum_name.clone(), variant_name, var.fields, &generate_named_pat_and_expr, &generate_unnamed_pat_and_expr, &unit, ) }) .collect() } /// Generate a list of named fields in accordance with the struct. pub fn generate_named_fields(fields: FieldsNamed) -> Vec { fields .named .into_iter() .fold(Vec::new(), |mut info, field| { info.push(FieldInfo { field_name: FieldRepr::Ident(field.ident.expect("Must be named")), field_type: field.ty, field_attrs: field.attrs, }); info }) } /// Generate unnamed fields as either indicies to be accessed using /// `self` or placeholder variable names for match-style patterns. pub fn generate_unnamed_fields(fields: FieldsUnnamed, uses_self: bool) -> Vec { fields .unnamed .into_iter() .enumerate() .fold(Vec::new(), |mut fields, (index, field)| { fields.push(FieldInfo { field_name: if uses_self { FieldRepr::Index(Index { index: index as u32, span: Span::call_site(), }) } else { FieldRepr::Ident(Ident::new( &String::from((b'a' + index as u8) as char), Span::call_site(), )) }, field_type: field.ty, field_attrs: field.attrs, }); fields }) } /// Returns [`true`] if the given attribute is present in the list. fn attr_present(attrs: &[Attribute], attr_name: &str) -> bool { for attr in attrs { let meta = attr .parse_meta() .unwrap_or_else(|_| panic!("Failed to parse attribute {}", attr.tokens)); if let Meta::List(list) = meta { if list.path == parse_str::("neli").expect("neli is valid path") { for nested in list.nested { if let NestedMeta::Meta(Meta::Path(path)) = nested { if path == parse_str::(attr_name) .unwrap_or_else(|_| panic!("{} should be valid path", attr_name)) { return true; } } } } } } false } /// Process attributes to find all attributes with the name `attr_name`. /// Return a [`Vec`] of [`Option`] types with the associated literal parsed /// into type parameter `T`. `T` must allow parsing from a string to be /// used with this method. fn process_attr(attrs: &[Attribute], attr_name: &str) -> Vec> where T: Parse, { let mut output = Vec::new(); for attr in attrs { let meta = attr .parse_meta() .unwrap_or_else(|_| panic!("Failed to parse attribute {}", attr.tokens)); if let Meta::List(list) = meta { if list.path == parse_str::("neli").expect("neli is valid path") { for nested in list.nested { if let NestedMeta::Meta(Meta::NameValue(MetaNameValue { path, lit: Lit::Str(lit), .. })) = nested { if path == parse_str::(attr_name) .unwrap_or_else(|_| panic!("{} should be valid path", attr_name)) { output.push(Some(parse_str::(&lit.value()).unwrap_or_else(|_| { panic!( "{} should be valid tokens of type {}", &lit.value(), type_name::() ) }))); } } else if let NestedMeta::Meta(Meta::Path(path)) = nested { if path == parse_str::(attr_name) .unwrap_or_else(|_| panic!("{} should be valid path", attr_name)) { output.push(None); } } } } } } output } pub fn process_trait_bounds(attrs: &[Attribute], trait_bound_path: &str) -> Vec { process_attr(attrs, trait_bound_path) .into_iter() .flatten() .collect() } /// Handles the attribute `#[neli(padding)]`. pub fn process_padding(attrs: &[Attribute]) -> bool { attr_present(attrs, "padding") } /// Handles the attribute `#[neli(input)]` or `#[neli(input = "...")]` /// when deriving [`FromBytes`][neli::FromBytes] implementations. /// /// Returns: /// * [`None`] if the attribute is not present /// * [`Some(None)`] if the attribute is present and has no /// associated expression /// * [`Some(Some(_))`] if the attribute is present and /// has an associated expression pub fn process_input(attrs: &[Attribute]) -> Option> { let mut exprs = process_attr(attrs, "input"); if exprs.len() > 1 { panic!("Only one input expression allowed for attribute #[neli(input = \"...\")]"); } else { exprs.pop() } } /// Handles the attribute `#[neli(size = "...")]` /// when deriving [`FromBytes`][neli::FromBytes] implementations. /// /// Returns: /// * [`None`] if the attribute is not present /// associated expression /// * [`Some(_)`] if the attribute is present and has an associated expression pub fn process_size(attrs: &[Attribute]) -> Option { let mut exprs = process_attr(attrs, "size"); if exprs.len() > 1 { panic!("Only one input expression allowed for attribute #[neli(size = \"...\")]"); } else { exprs .pop() .map(|opt| opt.expect("#[neli(size = \"...\")] must have associated expression")) } } /// If the first type parameter of a list of type parameters is a lifetime, /// extract it for use in other parts of the procedural macro code. /// /// # Example /// `impl<'a, I, P>` would return `'a`. pub fn process_lifetime(generics: &mut Generics) -> LifetimeDef { if let Some(GenericParam::Lifetime(lt)) = generics.params.first() { lt.clone() } else { let mut punc = Punctuated::new(); let lt = parse::(TokenStream::from(quote! { 'lifetime })) .expect("'lifetime should be valid lifetime"); punc.push(GenericParam::Lifetime(lt.clone())); punc.push_punct(Token![,](Span::call_site())); punc.extend(generics.params.iter().cloned()); generics.params = punc; lt } } /// Allow overriding the trait bounds specified by the method /// [`process_impl_generics`][process_impl_generics]. /// /// # Example /// ```no_compile /// use std::marker::PhantomData; /// /// struct MyStruct(PhantomData, PhantomData); /// /// trait MyTrait {} /// trait AnotherTrait {} /// /// // Input /// /// impl MyStruct { /// fn nothing() {} /// } /// /// // Result /// /// impl MyStruct { /// fn nothing() {} /// } /// ``` fn override_trait_bounds_on_generics(generics: &mut Generics, trait_bound_overrides: &[TypeParam]) { let mut overrides = trait_bound_overrides.iter().cloned().fold( HashMap::>::new(), |mut map, param| { if let Some(bounds) = map.get_mut(¶m.ident) { bounds.extend(param.bounds); } else { map.insert(param.ident, param.bounds); } map }, ); for generic in generics.params.iter_mut() { if let GenericParam::Type(ref mut ty) = generic { let ident = &ty.ident; if let Some(ors) = overrides.remove(ident) { ty.colon_token = Some(Token![:](Span::call_site())); ty.bounds = ors; ty.eq_token = None; ty.default = None; } } } }