rkyv_derive-0.7.44/.cargo_vcs_info.json0000644000000001510000000000100134560ustar { "git": { "sha1": "02010146cb414f730b7ed098aca8c3f09a5544d9" }, "path_in_vcs": "rkyv_derive" }rkyv_derive-0.7.44/Cargo.toml0000644000000017310000000000100114610ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "rkyv_derive" version = "0.7.44" authors = ["David Koloski "] description = "Derive macro for rkyv" license = "MIT" repository = "https://github.com/rkyv/rkyv" [package.metadata.docs.rs] all-features = true [lib] proc-macro = true [dependencies.proc-macro2] version = "1.0" [dependencies.quote] version = "1.0" [dependencies.syn] version = "1.0" [features] arbitrary_enum_discriminant = [] archive_be = [] archive_le = [] copy = [] default = [] strict = [] rkyv_derive-0.7.44/Cargo.toml.orig000064400000000000000000000010710072674642500151670ustar 00000000000000[package] name = "rkyv_derive" description = "Derive macro for rkyv" version.workspace = true edition.workspace = true authors.workspace = true license.workspace = true repository.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [lib] proc-macro = true [dependencies] proc-macro2.workspace = true syn.workspace = true quote.workspace = true [features] default = [] arbitrary_enum_discriminant = [] archive_be = [] archive_le = [] copy = [] strict = [] [package.metadata.docs.rs] all-features = true rkyv_derive-0.7.44/LICENSE000064400000000000000000000020350072674642500133060ustar 00000000000000Copyright 2021 David Koloski Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. rkyv_derive-0.7.44/src/archive.rs000064400000000000000000002002400072674642500150550ustar 00000000000000use crate::{ attributes::{parse_attributes, Attributes}, repr::{BaseRepr, IntRepr, Repr}, util::{add_bounds, strip_raw}, with::{make_with_cast, make_with_ty}, }; use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{ parse_quote, spanned::Spanned, Attribute, Data, DeriveInput, Error, Field, Fields, Ident, Index, LitStr, Meta, NestedMeta, Type, }; pub fn derive(input: DeriveInput) -> Result { let attributes = parse_attributes(&input)?; derive_archive_impl(input, &attributes) } fn field_archive_attrs(field: &Field) -> impl '_ + Iterator { field .attrs .iter() .filter_map(|attr| { if let Ok(Meta::List(list)) = attr.parse_meta() { if list.path.is_ident("archive_attr") { Some(list.nested) } else { None } } else { None } }) .flatten() } fn derive_archive_impl( mut input: DeriveInput, attributes: &Attributes, ) -> Result { let where_clause = input.generics.make_where_clause(); if let Some(ref bounds) = attributes.archive_bound { add_bounds(bounds, where_clause)?; } let name = &input.ident; let vis = &input.vis; let generics = &input.generics; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let where_clause = where_clause.unwrap(); let default_rkyv_path = parse_quote! { ::rkyv }; let rkyv_path = attributes.rkyv_path.as_ref().unwrap_or(&default_rkyv_path); let with_ty = make_with_ty(rkyv_path); let with_cast = make_with_cast(rkyv_path); let derive_check_bytes = if attributes.check_bytes.is_some() { let bytecheck_path_str = attributes .rkyv_path_str .as_ref() .map(|x| LitStr::new(&format!("{}::bytecheck", x.value()), x.span())) .unwrap_or_else(|| parse_quote!("::rkyv::bytecheck")); vec![ parse_quote! { #[derive(#rkyv_path::bytecheck::CheckBytes)] }, parse_quote! { #[check_bytes(crate = #bytecheck_path_str)] }, ] } else { Vec::new() }; let archive_attrs = derive_check_bytes.into_iter().chain( attributes .attrs .iter() .map::(|d| parse_quote! { #[#d] }), ); if let Some(ref archive_as) = attributes.archive_as { if let Some(ref ident) = attributes.archived { return Err(Error::new_spanned( ident, "archived = \"...\" may not be used with as = \"...\" because no type is generated", )); } if let Some(first) = attributes.attrs.first() { return Err(Error::new_spanned( first, format!( "\ archive_attr(...) may not be used with as = \"...\"\n\ place any attributes on the archived type ({}) instead\ ", archive_as.value(), ), )); } if let Some(span) = attributes .archived_repr .base_repr .map(|(_, s)| s) .or_else(|| attributes.archived_repr.modifier.as_ref().map(|(_, s)| *s)) { return Err(Error::new( span, format!( "\ repr(...) may not be used with as = \"...\"\n\ place the repr attribute on the archived type ({}) instead\ ", archive_as.value() ), )); } } let archived_name = attributes.archived.as_ref().map_or_else( || Ident::new(&format!("Archived{}", strip_raw(name)), name.span()), |value| value.clone(), ); let archived_doc = format!("An archived [`{}`]", name); let archived_type = attributes.archive_as.as_ref().map_or_else( || Ok(parse_quote! { #archived_name #ty_generics }), |lit| lit.parse::(), )?; let resolver = attributes.resolver.as_ref().map_or_else( || Ident::new(&format!("{}Resolver", strip_raw(name)), name.span()), |value| value.clone(), ); let resolver_doc = format!("The resolver for an archived [`{}`]", name); let (archive_types, archive_impls) = match input.data { Data::Struct(ref data) => { let base_repr = if cfg!(feature = "strict") { Some(match attributes.archived_repr.base_repr { // The base repr for structs may not be i*/u* in strict mode Some((BaseRepr::Int(_), span)) => return Err(Error::new( span, "archived structs may only be repr(C) or repr(transparent) in strict mode", )), // The base repr may be C or transparent in strict mode Some((repr @ BaseRepr::C | repr @ BaseRepr::Transparent, span)) => (repr, span), // If unspecified, the base repr is set to C None => (BaseRepr::C, Span::call_site()), }) } else { attributes.archived_repr.base_repr }; let repr = Repr { base_repr, modifier: attributes.archived_repr.modifier.clone(), }; match data.fields { Fields::Named(ref fields) => { let mut archive_where = where_clause.clone(); for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; archive_where .predicates .push(parse_quote! { #ty: #rkyv_path::Archive }); } let resolver_fields = fields.named.iter().map(|f| { let name = &f.ident; let ty = with_ty(f).unwrap(); quote! { #name: #rkyv_path::Resolver<#ty> } }); let archived_def = if attributes.archive_as.is_none() { let archived_fields = fields.named.iter().map(|f| { let field_name = f.ident.as_ref(); let ty = with_ty(f).unwrap(); let vis = &f.vis; let field_doc = format!( "The archived counterpart of [`{}::{}`]", name, field_name.unwrap() ); let archive_attrs = field_archive_attrs(f); quote! { #[doc = #field_doc] #(#[#archive_attrs])* #vis #field_name: #rkyv_path::Archived<#ty> } }); Some(quote! { #[automatically_derived] #[doc = #archived_doc] #(#archive_attrs)* #repr #vis struct #archived_name #generics #archive_where { #(#archived_fields,)* } }) } else { None }; let resolve_fields = fields.named.iter().map(|f| { let name = &f.ident; let field = with_cast(f, parse_quote! { (&self.#name) }).unwrap(); quote! { let (fp, fo) = out_field!(out.#name); #rkyv_path::Archive::resolve(#field, pos + fp, resolver.#name, fo); } }); let mut partial_eq_impl = None; let mut partial_ord_impl = None; if let Some((_, ref compares)) = attributes.compares { for compare in compares { if compare.is_ident("PartialEq") { let mut partial_eq_where = archive_where.clone(); for field in fields.named.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_eq_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialEq<#ty> }, ); } let field_names = fields.named.iter().map(|f| &f.ident); partial_eq_impl = Some(quote! { impl #impl_generics PartialEq<#archived_type> for #name #ty_generics #partial_eq_where { #[inline] fn eq(&self, other: &#archived_type) -> bool { true #(&& other.#field_names.eq(&self.#field_names))* } } impl #impl_generics PartialEq<#name #ty_generics> for #archived_type #partial_eq_where { #[inline] fn eq(&self, other: &#name #ty_generics) -> bool { other.eq(self) } } }); } else if compare.is_ident("PartialOrd") { let mut partial_ord_where = archive_where.clone(); for field in fields.named.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let archived_ty = with_ty(field).unwrap(); partial_ord_where.predicates.push( parse_quote! { Archived<#archived_ty>: PartialOrd<#ty> }, ); } let field_names = fields.named.iter().map(|f| &f.ident); partial_ord_impl = Some(quote! { impl #impl_generics PartialOrd<#archived_type> for #name #ty_generics #partial_ord_where { #[inline] fn partial_cmp(&self, other: &#archived_type) -> Option<::core::cmp::Ordering> { #( match other.#field_names.partial_cmp(&self.#field_names) { Some(::core::cmp::Ordering::Equal) => (), x => return x, } )* Some(::core::cmp::Ordering::Equal) } } impl #impl_generics PartialOrd<#name #ty_generics> for #archived_type #partial_ord_where { #[inline] fn partial_cmp(&self, other: &#name #ty_generics) -> Option<::core::cmp::Ordering> { other.partial_cmp(self) } } }); } else { return Err(Error::new_spanned( compare, "unrecognized compare argument, supported compares are PartialEq and PartialOrd" )); } } } let copy_safe_impl = if cfg!(feature = "copy") && attributes.copy_safe.is_some() { let mut copy_safe_where = where_clause.clone(); for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field).unwrap(); copy_safe_where .predicates .push(parse_quote! { #ty: #rkyv_path::copy::ArchiveCopySafe }); } Some(quote! { unsafe impl #impl_generics #rkyv_path::copy::ArchiveCopySafe for #name #ty_generics #copy_safe_where {} }) } else { None }; ( quote! { #archived_def #[automatically_derived] #[doc = #resolver_doc] #vis struct #resolver #generics #archive_where { #(#resolver_fields,)* } }, quote! { impl #impl_generics Archive for #name #ty_generics #archive_where { type Archived = #archived_type; type Resolver = #resolver #ty_generics; // Some resolvers will be (), this allow is to prevent clippy from complaining #[allow(clippy::unit_arg)] #[inline] unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) { #(#resolve_fields)* } } #partial_eq_impl #partial_ord_impl #copy_safe_impl }, ) } Fields::Unnamed(ref fields) => { let mut archive_where = where_clause.clone(); for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; archive_where .predicates .push(parse_quote! { #ty: #rkyv_path::Archive }); } let resolver_fields = fields.unnamed.iter().map(|f| { let ty = with_ty(f).unwrap(); quote! { #rkyv_path::Resolver<#ty> } }); let archived_def = if attributes.archive_as.is_none() { let archived_fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let ty = with_ty(f).unwrap(); let vis = &f.vis; let field_doc = format!("The archived counterpart of [`{}::{}`]", name, i); let archive_attrs = field_archive_attrs(f); quote! { #[doc = #field_doc] #(#[#archive_attrs])* #vis #rkyv_path::Archived<#ty> } }); Some(quote! { #[automatically_derived] #[doc = #archived_doc] #(#archive_attrs)* #repr #vis struct #archived_name #generics (#(#archived_fields,)*) #archive_where; }) } else { None }; let resolve_fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let index = Index::from(i); let field = with_cast(f, parse_quote! { (&self.#index) }).unwrap(); quote! { let (fp, fo) = out_field!(out.#index); #rkyv_path::Archive::resolve(#field, pos + fp, resolver.#index, fo); } }); let mut partial_eq_impl = None; let mut partial_ord_impl = None; if let Some((_, ref compares)) = attributes.compares { for compare in compares { if compare.is_ident("PartialEq") { let mut partial_eq_where = archive_where.clone(); for field in fields.unnamed.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_eq_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialEq<#ty> }, ); } let field_names = fields .unnamed .iter() .enumerate() .map(|(i, _)| Index::from(i)); partial_eq_impl = Some(quote! { impl #impl_generics PartialEq<#archived_type> for #name #ty_generics #partial_eq_where { #[inline] fn eq(&self, other: &#archived_type) -> bool { true #(&& other.#field_names.eq(&self.#field_names))* } } impl #impl_generics PartialEq<#name #ty_generics> for #archived_type #partial_eq_where { #[inline] fn eq(&self, other: &#name #ty_generics) -> bool { other.eq(self) } } }); } else if compare.is_ident("PartialOrd") { let mut partial_ord_where = archive_where.clone(); for field in fields.unnamed.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_ord_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialOrd<#ty> }, ); } let field_names = fields .unnamed .iter() .enumerate() .map(|(i, _)| Index::from(i)); partial_ord_impl = Some(quote! { impl #impl_generics PartialOrd<#archived_type> for #name #ty_generics #partial_ord_where { #[inline] fn partial_cmp(&self, other: &#archived_type) -> Option<::core::cmp::Ordering> { #( match other.#field_names.partial_cmp(&self.#field_names) { Some(::core::cmp::Ordering::Equal) => (), x => return x, } )* Some(::core::cmp::Ordering::Equal) } } impl #impl_generics PartialOrd<#name #ty_generics> for #archived_type #partial_ord_where { #[inline] fn partial_cmp(&self, other: &#name #ty_generics) -> Option<::core::cmp::Ordering> { other.partial_cmp(self) } } }); } else { return Err(Error::new_spanned(compare, "unrecognized compare argument, supported compares are PartialEq and PartialOrd")); } } } let copy_safe_impl = if cfg!(feature = "copy") && attributes.copy_safe.is_some() { let mut copy_safe_where = where_clause.clone(); for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field).unwrap(); copy_safe_where .predicates .push(parse_quote! { #ty: #rkyv_path::copy::ArchiveCopySafe }); } Some(quote! { unsafe impl #impl_generics #rkyv_path::copy::ArchiveCopySafe for #name #ty_generics #copy_safe_where {} }) } else { None }; ( quote! { #archived_def #[automatically_derived] #[doc = #resolver_doc] #vis struct #resolver #generics (#(#resolver_fields,)*) #archive_where; }, quote! { impl #impl_generics Archive for #name #ty_generics #archive_where { type Archived = #archived_type; type Resolver = #resolver #ty_generics; // Some resolvers will be (), this allow is to prevent clippy from complaining #[allow(clippy::unit_arg)] #[inline] unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) { #(#resolve_fields)* } } #partial_eq_impl #partial_ord_impl #copy_safe_impl }, ) } Fields::Unit => { let archived_def = if attributes.archive_as.is_none() { Some(quote! { #[automatically_derived] #[doc = #archived_doc] #(#archive_attrs)* #repr #vis struct #archived_name #generics #where_clause; }) } else { None }; let mut partial_eq_impl = None; let mut partial_ord_impl = None; if let Some((_, ref compares)) = attributes.compares { for compare in compares { if compare.is_ident("PartialEq") { partial_eq_impl = Some(quote! { impl #impl_generics PartialEq<#archived_type> for #name #ty_generics #where_clause { #[inline] fn eq(&self, _: &#archived_type) -> bool { true } } impl #impl_generics PartialEq<#name #ty_generics> for #archived_type #where_clause { #[inline] fn eq(&self, _: &#name #ty_generics) -> bool { true } } }); } else if compare.is_ident("PartialOrd") { partial_ord_impl = Some(quote! { impl #impl_generics PartialOrd<#archived_type> for #name #ty_generics #where_clause { #[inline] fn partial_cmp(&self, _: &#archived_type) -> Option<::core::cmp::Ordering> { Some(::core::cmp::Ordering::Equal) } } impl #impl_generics PartialOrd<#name #ty_generics> for #archived_type #where_clause { #[inline] fn partial_cmp(&self, _:&#name #ty_generics) -> Option<::core::cmp::Ordering> { Some(::core::cmp::Ordering::Equal) } } }); } else { return Err(Error::new_spanned( compare, "unrecognized compare argument, supported compares are PartialEq and PartialOrd", )); } } } let copy_safe_impl = if cfg!(feature = "copy") && attributes.copy_safe.is_some() { Some(quote! { unsafe impl #impl_generics #rkyv_path::copy::ArchiveCopySafe for #name #ty_generics #where_clause {} }) } else { None }; ( quote! { #archived_def #[automatically_derived] #[doc = #resolver_doc] #vis struct #resolver #generics #where_clause; }, quote! { impl #impl_generics Archive for #name #ty_generics #where_clause { type Archived = #archived_type; type Resolver = #resolver #ty_generics; #[inline] unsafe fn resolve(&self, _: usize, _: Self::Resolver, _: *mut Self::Archived) {} } #partial_eq_impl #partial_ord_impl #copy_safe_impl }, ) } } } Data::Enum(ref data) => { let mut archive_where = where_clause.clone(); for variant in data.variants.iter() { match variant.fields { Fields::Named(ref fields) => { for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; archive_where .predicates .push(parse_quote! { #ty: #rkyv_path::Archive }); } } Fields::Unnamed(ref fields) => { for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; archive_where .predicates .push(parse_quote! { #ty: #rkyv_path::Archive }); } } Fields::Unit => (), } } let resolver_variants = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let fields = fields.named.iter().map(|f| { let field_name = f.ident.as_ref(); let ty = with_ty(f).unwrap(); let field_doc = format!( "The resolver for [`{}::{}::{}`]", name, variant, field_name.unwrap(), ); quote! { #[doc = #field_doc] #field_name: #rkyv_path::Resolver<#ty> } }); let variant_doc = format!("The resolver for [`{}::{}`]", name, variant); quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant { #(#fields,)* } } } Fields::Unnamed(ref fields) => { let fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let ty = with_ty(f).unwrap(); let field_doc = format!("The resolver for [`{}::{}::{}`]", name, variant, i); quote! { #[doc = #field_doc] #rkyv_path::Resolver<#ty> } }); let variant_doc = format!("The resolver for [`{}::{}`]", name, variant); quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant(#(#fields,)*) } } Fields::Unit => { let variant_doc = format!("The resolver for [`{}::{}`]", name, variant); quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant } } } }); let resolve_arms = data.variants.iter().map(|v| { let variant = &v.ident; let archived_variant_name = Ident::new(&format!("ArchivedVariant{}", strip_raw(variant)), v.span()); match v.fields { Fields::Named(ref fields) => { let self_bindings = fields.named.iter().map(|f| { let name = &f.ident; let binding = Ident::new(&format!("self_{}", strip_raw(name.as_ref().unwrap())), name.span()); quote! { #name: #binding } }); let resolver_bindings = fields.named.iter().map(|f| { let name = &f.ident; let binding = Ident::new(&format!("resolver_{}", strip_raw(name.as_ref().unwrap())), name.span()); quote! { #name: #binding } }); let resolves = fields.named.iter().map(|f| { let name = &f.ident; let self_binding = Ident::new(&format!("self_{}", strip_raw(name.as_ref().unwrap())), name.span()); let resolver_binding = Ident::new(&format!("resolver_{}", strip_raw(name.as_ref().unwrap())), name.span()); let value = with_cast(f, parse_quote! { #self_binding }).unwrap(); quote! { let (fp, fo) = out_field!(out.#name); #rkyv_path::Archive::resolve(#value, pos + fp, #resolver_binding, fo); } }); quote! { #resolver::#variant { #(#resolver_bindings,)* } => { match self { #name::#variant { #(#self_bindings,)* } => { let out = out.cast::<#archived_variant_name #ty_generics>(); ::core::ptr::addr_of_mut!((*out).__tag) .write(ArchivedTag::#variant); #(#resolves)* }, #[allow(unreachable_patterns)] _ => ::core::hint::unreachable_unchecked(), } } } } Fields::Unnamed(ref fields) => { let self_bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { let name = Ident::new(&format!("self_{}", i), f.span()); quote! { #name } }); let resolver_bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { let name = Ident::new(&format!("resolver_{}", i), f.span()); quote! { #name } }); let resolves = fields.unnamed.iter().enumerate().map(|(i, f)| { let index = Index::from(i + 1); let self_binding = Ident::new(&format!("self_{}", i), f.span()); let resolver_binding = Ident::new(&format!("resolver_{}", i), f.span()); let value = with_cast(f, parse_quote! { #self_binding }).unwrap(); quote! { let (fp, fo) = out_field!(out.#index); #rkyv_path::Archive::resolve(#value, pos + fp, #resolver_binding, fo); } }); quote! { #resolver::#variant( #(#resolver_bindings,)* ) => { match self { #name::#variant(#(#self_bindings,)*) => { let out = out.cast::<#archived_variant_name #ty_generics>(); ::core::ptr::addr_of_mut!((*out).0).write(ArchivedTag::#variant); #(#resolves)* }, #[allow(unreachable_patterns)] _ => ::core::hint::unreachable_unchecked(), } } } } Fields::Unit => quote! { #resolver::#variant => { out.cast::().write(ArchivedTag::#variant); } } } }); let (int_repr, int_repr_span) = match attributes.archived_repr.base_repr { // The base repr for enums may not be Rust, transparent, or C Some((BaseRepr::Transparent | BaseRepr::C, span)) => { return Err(Error::new(span, "enums may only be repr(i*) or repr(u*)")) } // The base repr for enums may be i*/u* Some((BaseRepr::Int(int_repr), span)) => (int_repr, span), // If unspecified, the base repr is set to u* with the smallest unsigned integer // that can represent the number of variants None => { let int_repr = match data.variants.len() as u128 { 0..=255 => IntRepr::U8, 256..=65_535 => IntRepr::U16, 65_536..=4_294_967_295 => IntRepr::U32, 4_294_967_296..=18_446_744_073_709_551_615 => IntRepr::U64, _ => IntRepr::U128, }; (int_repr, Span::call_site()) } }; let repr = Repr { base_repr: Some((BaseRepr::Int(int_repr), int_repr_span)), modifier: attributes.archived_repr.modifier.clone(), }; let is_fieldless = data .variants .iter() .all(|v| matches!(v.fields, Fields::Unit)); #[cfg(all( not(feature = "arbitrary_enum_discriminant"), any(feature = "archive_le", feature = "archive_be") ))] if !is_fieldless && !matches!(int_repr, IntRepr::U8 | IntRepr::I8) { return Err(Error::new_spanned( name, "\ enums with variant data cannot have multibyte discriminants when using endian-aware features\n\ enabling the `arbitrary_enum_discriminant` feature will allow this behavior\ ", )); } let archived_def = if attributes.archive_as.is_none() { let archived_variants = data.variants.iter().enumerate().map(|(i, v)| { let variant = &v.ident; let discriminant = if is_fieldless || cfg!(feature = "arbitrary_enum_discriminant") { Some(int_repr.enum_discriminant(i)) } else { None }; match v.fields { Fields::Named(ref fields) => { let fields = fields.named.iter().map(|f| { let field_name = f.ident.as_ref(); let ty = with_ty(f).unwrap(); let vis = &f.vis; let field_doc = format!( "The archived counterpart of [`{}::{}::{}`]", name, variant, field_name.unwrap(), ); let archive_attrs = field_archive_attrs(f); quote! { #[doc = #field_doc] #(#[#archive_attrs])* #vis #field_name: #rkyv_path::Archived<#ty> } }); let variant_doc = format!("The archived counterpart of [`{}::{}`]", name, variant); quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant { #(#fields,)* } #discriminant } } Fields::Unnamed(ref fields) => { let fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let ty = with_ty(f).unwrap(); let vis = &f.vis; let field_doc = format!( "The archived counterpart of [`{}::{}::{}`]", name, variant, i, ); let archive_attrs = field_archive_attrs(f); quote! { #[doc = #field_doc] #(#[#archive_attrs])* #vis #rkyv_path::Archived<#ty> } }); let variant_doc = format!("The archived counterpart of [`{}::{}`]", name, variant); quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant(#(#fields,)*) #discriminant } } Fields::Unit => { let variant_doc = format!("The archived counterpart of [`{}::{}`]", name, variant); quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant #discriminant } } } }); Some(quote! { #[automatically_derived] #[doc = #archived_doc] #(#archive_attrs)* #repr #vis enum #archived_name #generics #archive_where { #(#archived_variants,)* } }) } else { None }; let archived_variant_tags = data.variants.iter().enumerate().map(|(i, v)| { let variant = &v.ident; let discriminant = int_repr.enum_discriminant(i); quote! { #variant #discriminant } }); let archived_variant_structs = data.variants.iter().map(|v| { let variant = &v.ident; let archived_variant_name = Ident::new(&format!("ArchivedVariant{}", strip_raw(variant)), v.span()); match v.fields { Fields::Named(ref fields) => { let fields = fields.named.iter().map(|f| { let name = &f.ident; let ty = with_ty(f).unwrap(); quote! { #name: Archived<#ty> } }); quote! { #[repr(C)] struct #archived_variant_name #generics #archive_where { __tag: ArchivedTag, #(#fields,)* __phantom: PhantomData<#name #ty_generics>, } } } Fields::Unnamed(ref fields) => { let fields = fields.unnamed.iter().map(|f| { let ty = with_ty(f).unwrap(); quote! { Archived<#ty> } }); quote! { #[repr(C)] struct #archived_variant_name #generics (ArchivedTag, #(#fields,)* PhantomData<#name #ty_generics>) #archive_where; } } Fields::Unit => quote! {} } }); let mut partial_eq_impl = None; let mut partial_ord_impl = None; if let Some((_, ref compares)) = attributes.compares { for compare in compares { if compare.is_ident("PartialEq") { let mut partial_eq_where = archive_where.clone(); for variant in data.variants.iter() { match variant.fields { Fields::Named(ref fields) => { for field in fields.named.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_eq_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialEq<#ty> }, ); } } Fields::Unnamed(ref fields) => { for field in fields.unnamed.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_eq_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialEq<#ty> }, ); } } Fields::Unit => (), } } let variant_impls = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let field_names = fields.named.iter() .map(|f| &f.ident) .collect::>(); let self_bindings = fields.named.iter().map(|f| { f.ident.as_ref().map(|ident| { Ident::new(&format!("self_{}", strip_raw(ident)), ident.span()) }) }).collect::>(); let other_bindings = fields.named.iter().map(|f| { f.ident.as_ref().map(|ident| { Ident::new(&format!("other_{}", strip_raw(ident)), ident.span()) }) }).collect::>(); quote! { #name::#variant { #(#field_names: #self_bindings,)* } => match other { #archived_name::#variant { #(#field_names: #other_bindings,)* } => true #(&& #other_bindings.eq(#self_bindings))*, #[allow(unreachable_patterns)] _ => false, } } } Fields::Unnamed(ref fields) => { let self_bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { Ident::new(&format!("self_{}", i), f.span()) }).collect::>(); let other_bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { Ident::new(&format!("other_{}", i), f.span()) }).collect::>(); quote! { #name::#variant(#(#self_bindings,)*) => match other { #archived_name::#variant(#(#other_bindings,)*) => true #(&& #other_bindings.eq(#self_bindings))*, #[allow(unreachable_patterns)] _ => false, } } } Fields::Unit => quote! { #name::#variant => match other { #archived_name::#variant => true, #[allow(unreachable_patterns)] _ => false, } } } }); partial_eq_impl = Some(quote! { impl #impl_generics PartialEq<#archived_type> for #name #ty_generics #partial_eq_where { #[inline] fn eq(&self, other: &#archived_type) -> bool { match self { #(#variant_impls,)* } } } impl #impl_generics PartialEq<#name #ty_generics> for #archived_type #partial_eq_where { #[inline] fn eq(&self, other: &#name #ty_generics) -> bool { other.eq(self) } } }); } else if compare.is_ident("PartialOrd") { let mut partial_ord_where = archive_where.clone(); for variant in data.variants.iter() { match variant.fields { Fields::Named(ref fields) => { for field in fields.named.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_ord_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialOrd<#ty> }, ); } } Fields::Unnamed(ref fields) => { for field in fields.unnamed.iter().filter(|f| { !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")) }) { let ty = &field.ty; let wrapped_ty = with_ty(field).unwrap(); partial_ord_where.predicates.push( parse_quote! { Archived<#wrapped_ty>: PartialOrd<#ty> }, ); } } Fields::Unit => (), } } let self_disc = data.variants.iter().enumerate().map(|(i, v)| { let variant = &v.ident; match v.fields { Fields::Named(_) => quote! { #name::#variant { .. } => #i }, Fields::Unnamed(_) => quote! { #name::#variant ( .. ) => #i }, Fields::Unit => quote! { #name::#variant => #i }, } }); let other_disc = data.variants.iter().enumerate().map(|(i, v)| { let variant = &v.ident; match v.fields { Fields::Named(_) => quote! { #archived_name::#variant { .. } => #i }, Fields::Unnamed(_) => quote! { #archived_name::#variant ( .. ) => #i }, Fields::Unit => quote! { #archived_name::#variant => #i }, } }); let variant_impls = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let field_names = fields.named.iter() .map(|f| &f.ident) .collect::>(); let self_bindings = fields.named.iter().map(|f| { f.ident.as_ref().map(|ident| { Ident::new(&format!("self_{}", strip_raw(ident)), ident.span()) }) }).collect::>(); let other_bindings = fields.named.iter().map(|f| { f.ident.as_ref().map(|ident| { Ident::new(&format!("other_{}", strip_raw(ident)), ident.span()) }) }).collect::>(); quote! { #name::#variant { #(#field_names: #self_bindings,)* } => match other { #archived_name::#variant { #(#field_names: #other_bindings,)* } => { #( match #other_bindings.partial_cmp(#self_bindings) { Some(::core::cmp::Ordering::Equal) => (), cmp => return cmp, } )* Some(::core::cmp::Ordering::Equal) } #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } } Fields::Unnamed(ref fields) => { let self_bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { Ident::new(&format!("self_{}", i), f.span()) }).collect::>(); let other_bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { Ident::new(&format!("other_{}", i), f.span()) }).collect::>(); quote! { #name::#variant(#(#self_bindings,)*) => match other { #archived_name::#variant(#(#other_bindings,)*) => { #( match #other_bindings.partial_cmp(#self_bindings) { Some(::core::cmp::Ordering::Equal) => (), cmp => return cmp, } )* Some(::core::cmp::Ordering::Equal) } #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } } Fields::Unit => quote! { #name::#variant => match other { #archived_name::#variant => Some(::core::cmp::Ordering::Equal), #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } } }); partial_ord_impl = Some(quote! { impl #impl_generics PartialOrd<#archived_type> for #name #ty_generics #partial_ord_where { #[inline] fn partial_cmp(&self, other: &#archived_type) -> Option<::core::cmp::Ordering> { let self_disc = match self { #(#self_disc,)* }; let other_disc = match other { #(#other_disc,)* }; if self_disc == other_disc { match self { #(#variant_impls,)* } } else { self_disc.partial_cmp(&other_disc) } } } impl #impl_generics PartialOrd<#name #ty_generics> for #archived_type #partial_ord_where { #[inline] fn partial_cmp(&self, other: &#name #ty_generics) -> Option<::core::cmp::Ordering> { match other.partial_cmp(self) { Some(::core::cmp::Ordering::Less) => Some(::core::cmp::Ordering::Greater), Some(::core::cmp::Ordering::Greater) => Some(::core::cmp::Ordering::Less), cmp => cmp, } } } }); } else { return Err(Error::new_spanned(compare, "unrecognized compare argument, supported compares are PartialEq (PartialOrd is not supported for enums)")); } } } let copy_safe_impl = if cfg!(feature = "copy") && attributes.copy_safe.is_some() { let mut copy_safe_where = where_clause.clone(); for variant in data.variants.iter() { match variant.fields { Fields::Named(ref fields) => { for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field).unwrap(); copy_safe_where .predicates .push(parse_quote! { #ty: #rkyv_path::copy::ArchiveCopySafe }); } } Fields::Unnamed(ref fields) => { for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field).unwrap(); copy_safe_where .predicates .push(parse_quote! { #ty: #rkyv_path::copy::ArchiveCopySafe }); } } Fields::Unit => (), } } Some(quote! { unsafe impl #impl_generics #rkyv_path::copy::ArchiveCopySafe for #name #ty_generics #copy_safe_where {} }) } else { None }; ( quote! { #archived_def #[automatically_derived] #[doc = #resolver_doc] #vis enum #resolver #generics #archive_where { #(#resolver_variants,)* } }, quote! { #[repr(#int_repr)] enum ArchivedTag { #(#archived_variant_tags,)* } #(#archived_variant_structs)* impl #impl_generics Archive for #name #ty_generics #archive_where { type Archived = #archived_type; type Resolver = #resolver #ty_generics; // Some resolvers will be (), this allow is to prevent clippy from complaining #[allow(clippy::unit_arg)] #[inline] unsafe fn resolve(&self, pos: usize, resolver: ::Resolver, out: *mut ::Archived) { match resolver { #(#resolve_arms,)* } } } #partial_eq_impl #partial_ord_impl #copy_safe_impl }, ) } Data::Union(_) => { return Err(Error::new_spanned( input, "Archive cannot be derived for unions", )) } }; Ok(quote! { #archive_types #[automatically_derived] const _: () = { use ::core::marker::PhantomData; use #rkyv_path::{out_field, Archive, Archived}; #archive_impls }; }) } rkyv_derive-0.7.44/src/attributes.rs000064400000000000000000000217600072674642500156320ustar 00000000000000use crate::repr::Repr; use quote::ToTokens; use syn::{AttrStyle, DeriveInput, Error, Ident, Lit, LitStr, Meta, NestedMeta, Path}; #[derive(Default)] pub struct Attributes { pub archive_as: Option, pub archived: Option, pub resolver: Option, pub attrs: Vec, pub archived_repr: Repr, pub compares: Option<(Path, Vec)>, pub archive_bound: Option, pub serialize_bound: Option, pub deserialize_bound: Option, pub check_bytes: Option, pub copy_safe: Option, pub rkyv_path: Option, pub rkyv_path_str: Option, } fn try_set_attribute( attribute: &mut Option, value: T, name: &'static str, ) -> Result<(), Error> { if attribute.is_none() { *attribute = Some(value); Ok(()) } else { Err(Error::new_spanned( value, format!("{} already specified", name), )) } } fn parse_archive_attributes(attributes: &mut Attributes, meta: &Meta) -> Result<(), Error> { match meta { Meta::Path(path) => { if path.is_ident("check_bytes") { try_set_attribute(&mut attributes.check_bytes, path.clone(), "check_bytes") } else if path.is_ident("copy_safe") { try_set_attribute(&mut attributes.copy_safe, path.clone(), "copy_safe") } else { Err(Error::new_spanned(meta, "unrecognized archive argument")) } } Meta::List(list) => { if list.path.is_ident("compare") { if attributes.compares.is_none() { let mut compares = Vec::new(); for compare in list.nested.iter() { if let NestedMeta::Meta(Meta::Path(path)) = compare { compares.push(path.clone()); } else { return Err(Error::new_spanned( compare, "compare arguments must be compare traits to derive", )); } } attributes.compares = Some((list.path.clone(), compares)); Ok(()) } else { Err(Error::new_spanned(list, "compares already specified")) } } else if list.path.is_ident("bound") { for bound in list.nested.iter() { if let NestedMeta::Meta(Meta::NameValue(name_value)) = bound { if let Lit::Str(ref lit_str) = name_value.lit { if name_value.path.is_ident("archive") { try_set_attribute( &mut attributes.archive_bound, lit_str.clone(), "archive bound", )?; } else if name_value.path.is_ident("serialize") { try_set_attribute( &mut attributes.serialize_bound, lit_str.clone(), "serialize bound", )?; } else if name_value.path.is_ident("deserialize") { try_set_attribute( &mut attributes.deserialize_bound, lit_str.clone(), "deserialize bound", )?; } else { return Err(Error::new_spanned( bound, "bound must be either serialize or deserialize", )); } } else { return Err(Error::new_spanned( bound, "bound arguments must be a string", )); } } else { return Err(Error::new_spanned( bound, "bound arguments must be serialize or deserialize bounds to apply", )); } } Ok(()) } else if list.path.is_ident("repr") { // TODO: remove `archive(repr(...))` syntax attributes.archived_repr.parse_args(list.nested.iter()) } else { Err(Error::new_spanned( &list.path, "unrecognized archive argument", )) } } Meta::NameValue(meta) => { if meta.path.is_ident("archived") { if let Lit::Str(ref lit_str) = meta.lit { try_set_attribute( &mut attributes.archived, Ident::new(&lit_str.value(), lit_str.span()), "archived", ) } else { Err(Error::new_spanned(meta, "archived must be a string")) } } else if meta.path.is_ident("resolver") { if let Lit::Str(ref lit_str) = meta.lit { try_set_attribute( &mut attributes.resolver, Ident::new(&lit_str.value(), lit_str.span()), "resolver", ) } else { Err(Error::new_spanned(meta, "resolver must be a string")) } } else if meta.path.is_ident("as") { if let Lit::Str(ref lit_str) = meta.lit { try_set_attribute(&mut attributes.archive_as, lit_str.clone(), "archive as") } else { Err(Error::new_spanned(meta, "archive as must be a string")) } } else if meta.path.is_ident("crate") { if let Lit::Str(ref lit_str) = meta.lit { let stream = syn::parse_str(&lit_str.value())?; let tokens = crate::serde::respan::respan(stream, lit_str.span()); let path = syn::parse2(tokens)?; try_set_attribute(&mut attributes.rkyv_path, path, "crate")?; attributes.rkyv_path_str = Some(lit_str.clone()); Ok(()) } else { Err(Error::new_spanned(meta, "crate must be a string")) } } else { Err(Error::new_spanned(meta, "unrecognized archive argument")) } } } } pub fn parse_attributes(input: &DeriveInput) -> Result { let mut result = Attributes::default(); for attr in input.attrs.iter() { if let AttrStyle::Outer = attr.style { if attr.path.is_ident("archive") || attr.path.is_ident("archive_attr") { if let Meta::List(list) = attr.parse_meta()? { if list.path.is_ident("archive") { for nested in list.nested.iter() { if let NestedMeta::Meta(meta) = nested { parse_archive_attributes(&mut result, meta)?; } else { return Err(Error::new_spanned( nested, "archive arguments must be metas", )); } } } else if list.path.is_ident("archive_attr") { for nested in list.nested.iter() { if let NestedMeta::Meta(meta) = nested { if let Meta::List(list) = meta { if list.path.is_ident("repr") { result.archived_repr.parse_args(list.nested.iter())?; } else { result.attrs.push(meta.clone()); } } else { result.attrs.push(meta.clone()); } } else { return Err(Error::new_spanned( nested, "archive_attr arguments must be metas", )); } } } } else { return Err(Error::new_spanned( attr, "archive and archive_attr may only be structured list attributes", )); } } } } Ok(result) } rkyv_derive-0.7.44/src/deserialize.rs000064400000000000000000000255550072674642500157520ustar 00000000000000use crate::{ attributes::{parse_attributes, Attributes}, util::add_bounds, with::{make_with_ty, with_inner}, }; use proc_macro2::TokenStream; use quote::quote; use syn::{ parse_quote, punctuated::Punctuated, spanned::Spanned, Data, DeriveInput, Error, Fields, Generics, Ident, Index, }; pub fn derive(input: DeriveInput) -> Result { let attributes = parse_attributes(&input)?; derive_deserialize_impl(input, &attributes) } fn derive_deserialize_impl( mut input: DeriveInput, attributes: &Attributes, ) -> Result { let where_clause = input.generics.make_where_clause(); if let Some(ref bounds) = attributes.archive_bound { add_bounds(bounds, where_clause)?; } if let Some(ref bounds) = attributes.deserialize_bound { add_bounds(bounds, where_clause)?; } let mut impl_input_params = Punctuated::default(); impl_input_params.push(parse_quote! { __D: Fallible + ?Sized }); for param in input.generics.params.iter() { impl_input_params.push(param.clone()); } let impl_input_generics = Generics { lt_token: Some(Default::default()), params: impl_input_params, gt_token: Some(Default::default()), where_clause: input.generics.where_clause.clone(), }; let default_rkyv_path = parse_quote! { ::rkyv }; let rkyv_path = attributes.rkyv_path.as_ref().unwrap_or(&default_rkyv_path); let with_ty = make_with_ty(rkyv_path); let name = &input.ident; let (impl_generics, _, _) = impl_input_generics.split_for_impl(); let (_, ty_generics, where_clause) = input.generics.split_for_impl(); let where_clause = where_clause.unwrap(); let deserialize_impl = match input.data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let mut deserialize_where = where_clause.clone(); for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; deserialize_where .predicates .push(parse_quote! { #ty: Archive }); deserialize_where .predicates .push(parse_quote! { Archived<#ty>: Deserialize<#ty, __D> }); } let deserialize_fields = fields.named.iter().map(|f| { let name = &f.ident; let ty = with_ty(f).unwrap(); let value = with_inner( f, parse_quote! { Deserialize::<#ty, __D>::deserialize( &self.#name, deserializer, )? }, ) .unwrap(); quote! { #name: #value } }); quote! { impl #impl_generics Deserialize<#name #ty_generics, __D> for Archived<#name #ty_generics> #deserialize_where { #[inline] fn deserialize(&self, deserializer: &mut __D) -> ::core::result::Result<#name #ty_generics, __D::Error> { Ok(#name { #(#deserialize_fields,)* }) } } } } Fields::Unnamed(ref fields) => { let mut deserialize_where = where_clause.clone(); for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; deserialize_where .predicates .push(parse_quote! { #ty: Archive }); deserialize_where .predicates .push(parse_quote! { Archived<#ty>: Deserialize<#ty, __D> }); } let deserialize_fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let index = Index::from(i); let ty = with_ty(f).unwrap(); let value = with_inner( f, parse_quote! { Deserialize::<#ty, __D>::deserialize( &self.#index, deserializer, )? }, ) .unwrap(); quote! { #value } }); quote! { impl #impl_generics Deserialize<#name #ty_generics, __D> for Archived<#name #ty_generics> #deserialize_where { #[inline] fn deserialize(&self, deserializer: &mut __D) -> ::core::result::Result<#name #ty_generics, __D::Error> { Ok(#name( #(#deserialize_fields,)* )) } } } } Fields::Unit => quote! { impl #impl_generics Deserialize<#name #ty_generics, __D> for Archived<#name #ty_generics> #where_clause { #[inline] fn deserialize(&self, _: &mut __D) -> ::core::result::Result<#name #ty_generics, __D::Error> { Ok(#name) } } }, }, Data::Enum(ref data) => { let mut deserialize_where = where_clause.clone(); for variant in data.variants.iter() { match variant.fields { Fields::Named(ref fields) => { for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; deserialize_where .predicates .push(parse_quote! { #ty: Archive }); deserialize_where .predicates .push(parse_quote! { Archived<#ty>: Deserialize<#ty, __D> }); } } Fields::Unnamed(ref fields) => { for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; deserialize_where .predicates .push(parse_quote! { #ty: Archive }); deserialize_where .predicates .push(parse_quote! { Archived<#ty>: Deserialize<#ty, __D> }); } } Fields::Unit => (), } } let deserialize_variants = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let bindings = fields.named.iter().map(|f| { let name = &f.ident; quote! { #name } }); let fields = fields.named.iter().map(|f| { let name = &f.ident; let ty = with_ty(f).unwrap(); let value = with_inner( f, parse_quote! { Deserialize::<#ty, __D>::deserialize( #name, deserializer, )? }, ) .unwrap(); quote! { #name: #value } }); quote! { Self::#variant { #(#bindings,)* } => #name::#variant { #(#fields,)* } } } Fields::Unnamed(ref fields) => { let bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { let name = Ident::new(&format!("_{}", i), f.span()); quote! { #name } }); let fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let binding = Ident::new(&format!("_{}", i), f.span()); let ty = with_ty(f).unwrap(); let value = with_inner( f, parse_quote! { Deserialize::<#ty, __D>::deserialize( #binding, deserializer, )? }, ) .unwrap(); quote! { #value } }); quote! { Self::#variant( #(#bindings,)* ) => #name::#variant(#(#fields,)*) } } Fields::Unit => { quote! { Self::#variant => #name::#variant } } } }); quote! { impl #impl_generics Deserialize<#name #ty_generics, __D> for Archived<#name #ty_generics> #deserialize_where { #[inline] fn deserialize(&self, deserializer: &mut __D) -> ::core::result::Result<#name #ty_generics, __D::Error> { Ok(match self { #(#deserialize_variants,)* }) } } } } Data::Union(_) => { return Err(Error::new_spanned( input, "Deserialize cannot be derived for unions", )) } }; Ok(quote! { #[automatically_derived] const _: () = { use #rkyv_path::{Archive, Archived, Deserialize, Fallible}; #deserialize_impl }; }) } rkyv_derive-0.7.44/src/lib.rs000064400000000000000000000124730072674642500142130ustar 00000000000000//! Procedural macros for `rkyv`. #![deny( rustdoc::broken_intra_doc_links, missing_docs, rustdoc::missing_crate_level_docs )] mod archive; mod attributes; mod deserialize; mod repr; mod serde; mod serialize; mod util; mod with; extern crate proc_macro; use syn::{parse_macro_input, DeriveInput}; /// Derives `Archive` for the labeled type. /// /// # Attributes /// /// Additional arguments can be specified using the `#[archive(...)]` and `#[archive_attr(...)]` /// attributes. /// /// `#[archive(...)]` takes the following arguments: /// /// - `archived = "..."`: Changes the name of the generated archived type to the given value. By /// default, archived types are named "Archived" + `the name of the type`. /// - `resolver = "..."`: Changes the name of the generated resolver type to the given value. By /// default, resolver types are named `the name of the type` + "Resolver". /// - `repr(...)`: *Deprecated, use `#[archive_attr(repr(...))]` instead.* Sets the representation /// for the archived type to the given representation. Available representation options may vary /// depending on features and type layout. /// - `compare(...)`: Implements common comparison operators between the original and archived /// types. Supported comparisons are `PartialEq` and `PartialOrd` (i.e. /// `#[archive(compare(PartialEq, PartialOrd))]`). /// - `bound(...)`: Adds additional bounds to trait implementations. This can be especially useful /// when dealing with recursive structures, where bounds may need to be omitted to prevent /// recursive type definitions. Use `archive = "..."` to specify `Archive` bounds, /// `serialize = "..."` to specify `Serialize` bounds, and `deserialize = "..."` to specify /// `Deserialize` bounds. /// - `check_bytes`: Derive `CheckBytes` on the archived type, in order to enable safe /// deserialization. Requires `validation` feature. Not compatible with `as = "..."`. In that /// case, use `#[derive(CheckBytes)]` on the archived type, and include a `use rkyv::bytecheck` /// statement. /// - `copy_safe`: States that the archived type is tightly packed with no padding bytes. This /// qualifies it for copy optimizations. (requires nightly) /// - `as = "..."`: Instead of generating a separate archived type, this type will archive as the /// named type. This is useful for types which are generic over their parameters. /// - `crate = "..."`: Chooses an alternative crate path to import rkyv from. /// /// `#[archive_attr(...)]` adds the attributes passed as arguments as attributes to the generated /// type. This is commonly used with attributes like `derive(...)` to derive trait implementations /// for the archived type. /// /// # Recursive types /// /// This derive macro automatically adds a type bound `field: Archive` for each field type. This can /// cause an overflow while evaluating trait bounds if the structure eventually references its own /// type, as the implementation of `Archive` for a struct depends on each field type implementing it /// as well. Adding the attribute `#[omit_bounds]` to a field will suppress this trait bound and /// allow recursive structures. This may be too coarse for some types, in which case additional type /// bounds may be required with `bound(...)`. /// /// # Wrappers /// /// Wrappers transparently customize archived types by providing different implementations of core /// traits. For example, references cannot be archived, but the `Inline` wrapper serializes a /// reference as if it were a field of the struct. Wrappers can be applied to fields using the /// `#[with(...)]` attribute. Multiple wrappers can be used, and they are applied in reverse order /// (i.e. `#[with(A, B, C)]` will archive `MyType` as `With, B, A>`). #[proc_macro_derive(Archive, attributes(archive, archive_attr, omit_bounds, with))] pub fn derive_archive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match archive::derive(derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } /// Derives `Serialize` for the labeled type. /// /// This macro also supports the `#[archive]`, `#[omit_bounds]`, and `#[with]` attributes. See /// [`Archive`] for more information. #[proc_macro_derive(Serialize, attributes(archive, omit_bounds, with))] pub fn derive_serialize(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match serialize::derive(derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } /// Derives `Deserialize` for the labeled type. /// /// This macro also supports the `#[archive]`, `#[omit_bounds]`, and `#[with]` attributes. See /// [`Archive`] for more information. #[proc_macro_derive(Deserialize, attributes(archive, omit_bounds, with))] pub fn derive_deserialize(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match deserialize::derive(derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } rkyv_derive-0.7.44/src/repr.rs000064400000000000000000000213770072674642500144200ustar 00000000000000use proc_macro2::{Literal, Punct, Spacing, Span, TokenStream}; use quote::{quote, ToTokens, TokenStreamExt}; use syn::{spanned::Spanned, Error, Lit, LitInt, Meta, NestedMeta}; #[derive(Clone, Copy)] pub enum IntRepr { I8, I16, I32, I64, I128, U8, U16, U32, U64, U128, } impl ToTokens for IntRepr { fn to_tokens(&self, tokens: &mut TokenStream) { match self { Self::I8 => tokens.append_all(quote! { i8 }), Self::I16 => tokens.append_all(quote! { i16 }), Self::I32 => tokens.append_all(quote! { i32 }), Self::I64 => tokens.append_all(quote! { i64 }), Self::I128 => tokens.append_all(quote! { i128 }), Self::U8 => tokens.append_all(quote! { u8 }), Self::U16 => tokens.append_all(quote! { u16 }), Self::U32 => tokens.append_all(quote! { u32 }), Self::U64 => tokens.append_all(quote! { u64 }), Self::U128 => tokens.append_all(quote! { u128 }), } } } impl IntRepr { #[inline] #[cfg(not(feature = "arbitrary_enum_discriminant"))] pub fn enum_discriminant(&self, _: usize) -> Option { None } #[inline] #[cfg(feature = "arbitrary_enum_discriminant")] pub fn enum_discriminant(&self, index: usize) -> EnumDiscriminant { #[cfg(not(any( all(target_endian = "little", feature = "archive_be"), all(target_endian = "big", feature = "archive_le"), )))] let value = index as u128; #[cfg(any( all(target_endian = "little", feature = "archive_be"), all(target_endian = "big", feature = "archive_le"), ))] let value = match self { Self::I8 => (index as i8).swap_bytes() as u128, Self::I16 => (index as i16).swap_bytes() as u128, Self::I32 => (index as i32).swap_bytes() as u128, Self::I64 => (index as i64).swap_bytes() as u128, Self::I128 => (index as i128).swap_bytes() as u128, Self::U8 => (index as u8).swap_bytes() as u128, Self::U16 => (index as u16).swap_bytes() as u128, Self::U32 => (index as u32).swap_bytes() as u128, Self::U64 => (index as u64).swap_bytes() as u128, Self::U128 => (index as u128).swap_bytes(), }; EnumDiscriminant { repr: *self, value } } } // None of these variants are constructed unless the arbitrary_enum_discriminant feature is enabled #[allow(dead_code)] pub struct EnumDiscriminant { repr: IntRepr, value: u128, } impl ToTokens for EnumDiscriminant { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append(Punct::new('=', Spacing::Alone)); tokens.append(match self.repr { IntRepr::I8 => Literal::i8_suffixed(self.value as i8), IntRepr::I16 => Literal::i16_suffixed(self.value as i16), IntRepr::I32 => Literal::i32_suffixed(self.value as i32), IntRepr::I64 => Literal::i64_suffixed(self.value as i64), IntRepr::I128 => Literal::i128_suffixed(self.value as i128), IntRepr::U8 => Literal::u8_suffixed(self.value as u8), IntRepr::U16 => Literal::u16_suffixed(self.value as u16), IntRepr::U32 => Literal::u32_suffixed(self.value as u32), IntRepr::U64 => Literal::u64_suffixed(self.value as u64), IntRepr::U128 => Literal::u128_suffixed(self.value), }); } } #[derive(Clone, Copy)] pub enum BaseRepr { C, // structs only Transparent, // enums only Int(IntRepr), } impl ToTokens for BaseRepr { fn to_tokens(&self, tokens: &mut TokenStream) { match self { BaseRepr::C => tokens.append_all(quote! { C }), BaseRepr::Transparent => tokens.append_all(quote! { transparent }), BaseRepr::Int(int_repr) => tokens.append_all(quote! { #int_repr }), } } } #[derive(Clone)] pub enum Modifier { // structs only Packed, Align(LitInt), } impl ToTokens for Modifier { fn to_tokens(&self, tokens: &mut TokenStream) { match self { Modifier::Packed => tokens.append_all(quote! { packed }), Modifier::Align(n) => tokens.append_all(quote! { align(#n) }), } } } #[derive(Clone, Default)] pub struct Repr { pub base_repr: Option<(BaseRepr, Span)>, pub modifier: Option<(Modifier, Span)>, } impl Repr { fn try_set_modifier( &mut self, modifier: Modifier, spanned: S, ) -> Result<(), Error> { if self.modifier.is_some() { Err(Error::new_spanned( spanned, "only one repr modifier may be specified", )) } else { self.modifier = Some((modifier, spanned.span())); Ok(()) } } fn try_set_base_repr(&mut self, repr: BaseRepr, spanned: S) -> Result<(), Error> { if self.base_repr.is_some() { Err(Error::new_spanned( spanned, "only one repr may be specified", )) } else { self.base_repr = Some((repr, spanned.span())); Ok(()) } } pub fn parse_args<'a>( &mut self, args: impl Iterator, ) -> Result<(), Error> { for arg in args { if let NestedMeta::Meta(meta) = arg { match meta { Meta::Path(path) => { if path.is_ident("packed") { self.try_set_modifier(Modifier::Packed, path)?; } else { let parsed_repr = if path.is_ident("transparent") { BaseRepr::Transparent } else if path.is_ident("C") { BaseRepr::C } else if path.is_ident("i8") { BaseRepr::Int(IntRepr::I8) } else if path.is_ident("i16") { BaseRepr::Int(IntRepr::I16) } else if path.is_ident("i32") { BaseRepr::Int(IntRepr::I32) } else if path.is_ident("i64") { BaseRepr::Int(IntRepr::I64) } else if path.is_ident("i128") { BaseRepr::Int(IntRepr::I128) } else if path.is_ident("u8") { BaseRepr::Int(IntRepr::U8) } else if path.is_ident("u16") { BaseRepr::Int(IntRepr::U16) } else if path.is_ident("u32") { BaseRepr::Int(IntRepr::U32) } else if path.is_ident("u64") { BaseRepr::Int(IntRepr::U64) } else if path.is_ident("u128") { BaseRepr::Int(IntRepr::U128) } else { return Err(Error::new_spanned( path, "invalid repr, available reprs are transparent, C, i* and u*", )); }; self.try_set_base_repr(parsed_repr, path)?; } } Meta::List(list) => { if list.path.is_ident("align") { if list.nested.len() != 1 { return Err(Error::new_spanned(list, "missing arguments to align")); } else if let Some(NestedMeta::Lit(Lit::Int(alignment))) = list.nested.first() { self.try_set_modifier( Modifier::Align(alignment.clone()), alignment, )?; } } } _ => return Err(Error::new_spanned(meta, "invalid repr argument")), } } else { return Err(Error::new_spanned(arg, "invalid repr argument")); } } Ok(()) } } impl ToTokens for Repr { fn to_tokens(&self, tokens: &mut TokenStream) { let base_repr = self.base_repr.as_ref().map(|(b, _)| b); let base_repr_iter = base_repr.iter(); let modifier = self.modifier.as_ref().map(|(m, _)| m); let modifier_iter = modifier.iter(); tokens.append_all(quote! { #[repr(#(#base_repr_iter,)* #(#modifier_iter,)*)] }); } } rkyv_derive-0.7.44/src/serde/LICENSE000064400000000000000000000017760072674642500152120ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.rkyv_derive-0.7.44/src/serde/mod.rs000064400000000000000000000000420072674642500153130ustar 00000000000000pub mod receiver; pub mod respan; rkyv_derive-0.7.44/src/serde/receiver.rs000064400000000000000000000236520072674642500163540ustar 00000000000000// Code in this file is taken whole or in part from serde: https://github.com/serde-rs/serde // The original license for this code is included in LICENSE use super::respan::respan; use proc_macro2::Span; use quote::ToTokens; use std::mem; use syn::punctuated::Punctuated; use syn::{ parse_quote, Data, DeriveInput, Expr, ExprPath, GenericArgument, GenericParam, Generics, Macro, Path, PathArguments, QSelf, ReturnType, Token, Type, TypeParamBound, TypePath, WherePredicate, }; pub fn replace_receiver(input: &mut DeriveInput) { let self_ty = { let ident = &input.ident; let ty_generics = input.generics.split_for_impl().1; parse_quote!(#ident #ty_generics) }; let mut visitor = ReplaceReceiver(&self_ty); visitor.visit_generics_mut(&mut input.generics); visitor.visit_data_mut(&mut input.data); } struct ReplaceReceiver<'a>(&'a TypePath); impl ReplaceReceiver<'_> { fn self_ty(&self, span: Span) -> TypePath { let tokens = self.0.to_token_stream(); let respanned = respan(tokens, span); syn::parse2(respanned).unwrap() } fn self_to_qself(&self, qself: &mut Option, path: &mut Path) { if path.leading_colon.is_some() || path.segments[0].ident != "Self" { return; } if path.segments.len() == 1 { self.self_to_expr_path(path); return; } let span = path.segments[0].ident.span(); *qself = Some(QSelf { lt_token: Token![<](span), ty: Box::new(Type::Path(self.self_ty(span))), position: 0, as_token: None, gt_token: Token![>](span), }); path.leading_colon = Some(**path.segments.pairs().next().unwrap().punct().unwrap()); let segments = mem::replace(&mut path.segments, Punctuated::new()); path.segments = segments.into_pairs().skip(1).collect(); } fn self_to_expr_path(&self, path: &mut Path) { let self_ty = self.self_ty(path.segments[0].ident.span()); let variant = mem::replace(path, self_ty.path); for segment in &mut path.segments { if let PathArguments::AngleBracketed(bracketed) = &mut segment.arguments { if bracketed.colon2_token.is_none() && !bracketed.args.is_empty() { bracketed.colon2_token = Some(::default()); } } } if variant.segments.len() > 1 { path.segments.push_punct(::default()); path.segments.extend(variant.segments.into_pairs().skip(1)); } } } impl ReplaceReceiver<'_> { // `Self` -> `Receiver` fn visit_type_mut(&mut self, ty: &mut Type) { let span = if let Type::Path(node) = ty { if node.qself.is_none() && node.path.is_ident("Self") { node.path.segments[0].ident.span() } else { self.visit_type_path_mut(node); return; } } else { self.visit_type_mut_impl(ty); return; }; *ty = self.self_ty(span).into(); } // `Self::Assoc` -> `::Assoc` fn visit_type_path_mut(&mut self, ty: &mut TypePath) { if ty.qself.is_none() { self.self_to_qself(&mut ty.qself, &mut ty.path); } self.visit_type_path_mut_impl(ty); } // `Self::method` -> `::method` fn visit_expr_path_mut(&mut self, expr: &mut ExprPath) { if expr.qself.is_none() { self.self_to_qself(&mut expr.qself, &mut expr.path); } self.visit_expr_path_mut_impl(expr); } // Everything below is simply traversing the syntax tree. fn visit_type_mut_impl(&mut self, ty: &mut Type) { match ty { Type::Array(ty) => { self.visit_type_mut(&mut ty.elem); self.visit_expr_mut(&mut ty.len); } Type::BareFn(ty) => { for arg in &mut ty.inputs { self.visit_type_mut(&mut arg.ty); } self.visit_return_type_mut(&mut ty.output); } Type::Group(ty) => self.visit_type_mut(&mut ty.elem), Type::ImplTrait(ty) => { for bound in &mut ty.bounds { self.visit_type_param_bound_mut(bound); } } Type::Macro(ty) => self.visit_macro_mut(&mut ty.mac), Type::Paren(ty) => self.visit_type_mut(&mut ty.elem), Type::Path(ty) => { if let Some(qself) = &mut ty.qself { self.visit_type_mut(&mut qself.ty); } self.visit_path_mut(&mut ty.path); } Type::Ptr(ty) => self.visit_type_mut(&mut ty.elem), Type::Reference(ty) => self.visit_type_mut(&mut ty.elem), Type::Slice(ty) => self.visit_type_mut(&mut ty.elem), Type::TraitObject(ty) => { for bound in &mut ty.bounds { self.visit_type_param_bound_mut(bound); } } Type::Tuple(ty) => { for elem in &mut ty.elems { self.visit_type_mut(elem); } } Type::Infer(_) | Type::Never(_) | Type::Verbatim(_) => {} _ => {} } } fn visit_type_path_mut_impl(&mut self, ty: &mut TypePath) { if let Some(qself) = &mut ty.qself { self.visit_type_mut(&mut qself.ty); } self.visit_path_mut(&mut ty.path); } fn visit_expr_path_mut_impl(&mut self, expr: &mut ExprPath) { if let Some(qself) = &mut expr.qself { self.visit_type_mut(&mut qself.ty); } self.visit_path_mut(&mut expr.path); } fn visit_path_mut(&mut self, path: &mut Path) { for segment in &mut path.segments { self.visit_path_arguments_mut(&mut segment.arguments); } } fn visit_path_arguments_mut(&mut self, arguments: &mut PathArguments) { match arguments { PathArguments::None => {} PathArguments::AngleBracketed(arguments) => { for arg in &mut arguments.args { match arg { GenericArgument::Type(arg) => self.visit_type_mut(arg), GenericArgument::Binding(arg) => self.visit_type_mut(&mut arg.ty), GenericArgument::Lifetime(_) | GenericArgument::Constraint(_) | GenericArgument::Const(_) => {} } } } PathArguments::Parenthesized(arguments) => { for argument in &mut arguments.inputs { self.visit_type_mut(argument); } self.visit_return_type_mut(&mut arguments.output); } } } fn visit_return_type_mut(&mut self, return_type: &mut ReturnType) { match return_type { ReturnType::Default => {} ReturnType::Type(_, output) => self.visit_type_mut(output), } } fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) { match bound { TypeParamBound::Trait(bound) => self.visit_path_mut(&mut bound.path), TypeParamBound::Lifetime(_) => {} } } fn visit_generics_mut(&mut self, generics: &mut Generics) { for param in &mut generics.params { match param { GenericParam::Type(param) => { for bound in &mut param.bounds { self.visit_type_param_bound_mut(bound); } } GenericParam::Lifetime(_) | GenericParam::Const(_) => {} } } if let Some(where_clause) = &mut generics.where_clause { for predicate in &mut where_clause.predicates { match predicate { WherePredicate::Type(predicate) => { self.visit_type_mut(&mut predicate.bounded_ty); for bound in &mut predicate.bounds { self.visit_type_param_bound_mut(bound); } } WherePredicate::Lifetime(_) | WherePredicate::Eq(_) => {} } } } } fn visit_data_mut(&mut self, data: &mut Data) { match data { Data::Struct(data) => { for field in &mut data.fields { self.visit_type_mut(&mut field.ty); } } Data::Enum(data) => { for variant in &mut data.variants { for field in &mut variant.fields { self.visit_type_mut(&mut field.ty); } } } Data::Union(_) => {} } } fn visit_expr_mut(&mut self, expr: &mut Expr) { match expr { Expr::Binary(expr) => { self.visit_expr_mut(&mut expr.left); self.visit_expr_mut(&mut expr.right); } Expr::Call(expr) => { self.visit_expr_mut(&mut expr.func); for arg in &mut expr.args { self.visit_expr_mut(arg); } } Expr::Cast(expr) => { self.visit_expr_mut(&mut expr.expr); self.visit_type_mut(&mut expr.ty); } Expr::Field(expr) => self.visit_expr_mut(&mut expr.base), Expr::Index(expr) => { self.visit_expr_mut(&mut expr.expr); self.visit_expr_mut(&mut expr.index); } Expr::Paren(expr) => self.visit_expr_mut(&mut expr.expr), Expr::Path(expr) => self.visit_expr_path_mut(expr), Expr::Unary(expr) => self.visit_expr_mut(&mut expr.expr), _ => {} } } fn visit_macro_mut(&mut self, _mac: &mut Macro) {} } rkyv_derive-0.7.44/src/serde/respan.rs000064400000000000000000000011360072674642500160310ustar 00000000000000// Code in this file is taken whole or in part from serde: https://github.com/serde-rs/serde // The original license for this code is included in LICENSE use proc_macro2::{Group, Span, TokenStream, TokenTree}; pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream { stream .into_iter() .map(|token| respan_token(token, span)) .collect() } fn respan_token(mut token: TokenTree, span: Span) -> TokenTree { if let TokenTree::Group(g) = &mut token { *g = Group::new(g.delimiter(), respan(g.stream(), span)); } token.set_span(span); token } rkyv_derive-0.7.44/src/serialize.rs000064400000000000000000000222610072674642500154300ustar 00000000000000use crate::{ attributes::{parse_attributes, Attributes}, util::{add_bounds, strip_raw}, with::{make_with_cast, make_with_ty}, }; use proc_macro2::TokenStream; use quote::quote; use syn::{ parse_quote, punctuated::Punctuated, spanned::Spanned, Data, DeriveInput, Error, Fields, Generics, Ident, Index, }; pub fn derive(input: DeriveInput) -> Result { let attributes = parse_attributes(&input)?; derive_serialize_impl(input, &attributes) } fn derive_serialize_impl( mut input: DeriveInput, attributes: &Attributes, ) -> Result { let where_clause = input.generics.make_where_clause(); if let Some(ref bounds) = attributes.archive_bound { add_bounds(bounds, where_clause)?; } if let Some(ref bounds) = attributes.serialize_bound { add_bounds(bounds, where_clause)?; } let mut impl_input_params = Punctuated::default(); impl_input_params.push(parse_quote! { __S: Fallible + ?Sized }); for param in input.generics.params.iter() { impl_input_params.push(param.clone()); } let impl_input_generics = Generics { lt_token: Some(Default::default()), params: impl_input_params, gt_token: Some(Default::default()), where_clause: input.generics.where_clause.clone(), }; let default_rkyv_path = parse_quote! { ::rkyv }; let rkyv_path = attributes.rkyv_path.as_ref().unwrap_or(&default_rkyv_path); let with_ty = make_with_ty(rkyv_path); let with_cast = make_with_cast(rkyv_path); let name = &input.ident; let (impl_generics, _, _) = impl_input_generics.split_for_impl(); let (_, ty_generics, where_clause) = input.generics.split_for_impl(); let where_clause = where_clause.unwrap(); let resolver = attributes.resolver.as_ref().map_or_else( || Ident::new(&format!("{}Resolver", strip_raw(name)), name.span()), |value| value.clone(), ); let serialize_impl = match input.data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let mut serialize_where = where_clause.clone(); for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; serialize_where .predicates .push(parse_quote! { #ty: Serialize<__S> }); } let resolver_values = fields.named.iter().map(|f| { let name = &f.ident; let field = with_cast(f, parse_quote! { &self.#name }).unwrap(); quote! { #name: Serialize::<__S>::serialize(#field, serializer)? } }); quote! { impl #impl_generics Serialize<__S> for #name #ty_generics #serialize_where { #[inline] fn serialize(&self, serializer: &mut __S) -> ::core::result::Result { Ok(#resolver { #(#resolver_values,)* }) } } } } Fields::Unnamed(ref fields) => { let mut serialize_where = where_clause.clone(); for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; serialize_where .predicates .push(parse_quote! { #ty: Serialize<__S> }); } let resolver_values = fields.unnamed.iter().enumerate().map(|(i, f)| { let index = Index::from(i); let field = with_cast(f, parse_quote! { &self.#index }).unwrap(); quote! { Serialize::<__S>::serialize(#field, serializer)? } }); quote! { impl #impl_generics Serialize<__S> for #name #ty_generics #serialize_where { #[inline] fn serialize(&self, serializer: &mut __S) -> ::core::result::Result { Ok(#resolver( #(#resolver_values,)* )) } } } } Fields::Unit => { quote! { impl #impl_generics Serialize<__S> for #name #ty_generics #where_clause { #[inline] fn serialize(&self, serializer: &mut __S) -> ::core::result::Result { Ok(#resolver) } } } } }, Data::Enum(ref data) => { let mut serialize_where = where_clause.clone(); for variant in data.variants.iter() { match variant.fields { Fields::Named(ref fields) => { for field in fields .named .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; serialize_where .predicates .push(parse_quote! { #ty: Serialize<__S> }); } } Fields::Unnamed(ref fields) => { for field in fields .unnamed .iter() .filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds"))) { let ty = with_ty(field)?; serialize_where .predicates .push(parse_quote! { #ty: Serialize<__S> }); } } Fields::Unit => (), } } let serialize_arms = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let bindings = fields.named.iter().map(|f| { let name = &f.ident; quote! { #name } }); let fields = fields.named.iter().map(|f| { let name = &f.ident; let field = with_cast(f, parse_quote! { #name }).unwrap(); quote! { #name: Serialize::<__S>::serialize(#field, serializer)? } }); quote! { Self::#variant { #(#bindings,)* } => #resolver::#variant { #(#fields,)* } } } Fields::Unnamed(ref fields) => { let bindings = fields.unnamed.iter().enumerate().map(|(i, f)| { let name = Ident::new(&format!("_{}", i), f.span()); quote! { #name } }); let fields = fields.unnamed.iter().enumerate().map(|(i, f)| { let binding = Ident::new(&format!("_{}", i), f.span()); let field = with_cast(f, parse_quote! { #binding }).unwrap(); quote! { Serialize::<__S>::serialize(#field, serializer)? } }); quote! { Self::#variant( #(#bindings,)* ) => #resolver::#variant(#(#fields,)*) } } Fields::Unit => { quote! { Self::#variant => #resolver::#variant } } } }); quote! { impl #impl_generics Serialize<__S> for #name #ty_generics #serialize_where { #[inline] fn serialize(&self, serializer: &mut __S) -> ::core::result::Result<::Resolver, __S::Error> { Ok(match self { #(#serialize_arms,)* }) } } } } Data::Union(_) => { return Err(Error::new_spanned( input, "Serialize cannot be derived for unions", )) } }; Ok(quote! { #[automatically_derived] const _: () = { use #rkyv_path::{Archive, Fallible, Serialize}; #serialize_impl }; }) } rkyv_derive-0.7.44/src/util.rs000064400000000000000000000011070072674642500144120ustar 00000000000000use proc_macro2::Ident; use syn::{punctuated::Punctuated, Error, LitStr, Token, WhereClause, WherePredicate}; pub fn add_bounds(bounds: &LitStr, where_clause: &mut WhereClause) -> Result<(), Error> { let clauses = bounds.parse_with(Punctuated::::parse_terminated)?; for clause in clauses { where_clause.predicates.push(clause); } Ok(()) } pub fn strip_raw(ident: &Ident) -> String { let as_string = ident.to_string(); as_string .strip_prefix("r#") .map(ToString::to_string) .unwrap_or(as_string) } rkyv_derive-0.7.44/src/with.rs000064400000000000000000000024400072674642500144110ustar 00000000000000use syn::{parse_quote, punctuated::Punctuated, token::Comma, Error, Expr, Field, Path, Type}; #[inline] pub fn with B>(field: &Field, init: B, f: F) -> Result { let fields = field .attrs .iter() .filter_map(|attr| { if attr.path.is_ident("with") { Some(attr.parse_args_with(Punctuated::::parse_separated_nonempty)) } else { None } }) .collect::, _>>()?; Ok(fields.iter().flatten().rev().fold(init, f)) } #[inline] pub fn make_with_ty(rkyv_path: &Path) -> impl '_ + Fn(&Field) -> Result { move |field| { with( field, field.ty.clone(), |ty, wrapper| parse_quote! { #rkyv_path::with::With<#ty, #wrapper> }, ) } } #[inline] pub fn make_with_cast(rkyv_path: &Path) -> impl '_ + Fn(&Field, Expr) -> Result { move |field, expr| { with( field, expr, |expr, wrapper| parse_quote! { #rkyv_path::with::With::<_, #wrapper>::cast(#expr) }, ) } } #[inline] pub fn with_inner(field: &Field, expr: Expr) -> Result { with(field, expr, |expr, _| parse_quote! { #expr.into_inner() }) }