rkyv_derive-0.8.9/.cargo_vcs_info.json0000644000000001510000000000100134000ustar { "git": { "sha1": "9febe6f809a96c223b318ad67b7c8fed5cb307b0" }, "path_in_vcs": "rkyv_derive" }rkyv_derive-0.8.9/Cargo.toml0000644000000027020000000000100114020ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.81" name = "rkyv_derive" version = "0.8.9" authors = ["David Koloski "] build = false autobins = false autoexamples = false autotests = false autobenches = false description = "Derive macro for rkyv" readme = "README.md" keywords = [ "archive", "rkyv", "serialization", "zero-copy", "no_std", ] categories = [ "encoding", "no-std", "no-std::no-alloc", ] license = "MIT" repository = "https://github.com/rkyv/rkyv" [package.metadata.docs.rs] all-features = true [lib] name = "rkyv_derive" path = "src/lib.rs" proc-macro = true [dependencies.proc-macro2] version = "1" features = ["proc-macro"] default-features = false [dependencies.quote] version = "1" features = ["proc-macro"] default-features = false [dependencies.syn] version = "2.0.73" features = [ "clone-impls", "derive", "parsing", "printing", "proc-macro", ] default-features = false [features] bytecheck = [] default = [] rkyv_derive-0.8.9/Cargo.toml.orig000064400000000000000000000014721046102023000150660ustar 00000000000000[package] name = "rkyv_derive" description = "Derive macro for rkyv" version.workspace = true edition.workspace = true rust-version.workspace = true authors.workspace = true license.workspace = true readme = "../README.md" repository.workspace = true keywords = ["archive", "rkyv", "serialization", "zero-copy", "no_std"] categories = ["encoding", "no-std", "no-std::no-alloc"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [lib] proc-macro = true [dependencies] proc-macro2 = { workspace = true, features = ["proc-macro"] } syn = { workspace = true, features = ["clone-impls", "derive", "parsing", "printing", "proc-macro"] } quote = { workspace = true, features = ["proc-macro"] } [features] default = [] bytecheck = [] [package.metadata.docs.rs] all-features = true rkyv_derive-0.8.9/LICENSE000064400000000000000000000020351046102023000132000ustar 00000000000000Copyright 2021 David Koloski Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. rkyv_derive-0.8.9/README.md000064400000000000000000000102001046102023000134430ustar 00000000000000

rkyv

rkyv (archive) is a zero-copy deserialization framework for Rust

Discord crates.io docs.rs MIT license

# Resources ## Learning Materials - The [rkyv book](https://rkyv.github.io/rkyv) covers the motivation, architecture, and major features of rkyv - The [rkyv discord](https://discord.gg/65F6MdnbQh) is a great place to get help with specific issues and meet other people using rkyv ## Documentation - [rkyv](https://docs.rs/rkyv), the core library - [rkyv_dyn](https://docs.rs/rkyv_dyn), which adds trait object support to rkyv ## Benchmarks - The [rust serialization benchmark](https://github.com/djkoloski/rust_serialization_benchmark) is a shootout style benchmark comparing many rust serialization solutions. It includes special benchmarks for zero-copy serialization solutions like rkyv. ## Sister Crates - [rend](https://github.com/rkyv/rend), which rkyv uses for endian-agnostic features - [bytecheck](https://github.com/rkyv/bytecheck), which rkyv uses for validation - [rancor](https://github.com/rkyv/rancor), which rkyv uses for error handling - [ptr_meta](https://github.com/rkyv/ptr_meta), which rkyv uses for pointer manipulation # Example ```rust use rkyv::{deserialize, rancor::Error, Archive, Deserialize, Serialize}; #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[rkyv( // This will generate a PartialEq impl between our unarchived // and archived types compare(PartialEq), // Derives can be passed through to the generated type: derive(Debug), )] struct Test { int: u8, string: String, option: Option>, } fn main() { let value = Test { int: 42, string: "hello world".to_string(), option: Some(vec![1, 2, 3, 4]), }; // Serializing is as easy as a single function call let _bytes = rkyv::to_bytes::(&value).unwrap(); // Or you can customize your serialization for better performance or control // over resource usage use rkyv::{api::high::to_bytes_with_alloc, ser::allocator::Arena}; let mut arena = Arena::new(); let bytes = to_bytes_with_alloc::<_, Error>(&value, arena.acquire()).unwrap(); // You can use the safe API for fast zero-copy deserialization let archived = rkyv::access::(&bytes[..]).unwrap(); assert_eq!(archived, &value); // Or you can use the unsafe API for maximum performance let archived = unsafe { rkyv::access_unchecked::(&bytes[..]) }; assert_eq!(archived, &value); // And you can always deserialize back to the original type let deserialized = deserialize::(archived).unwrap(); assert_eq!(deserialized, value); } ``` _Note: the safe API requires the `bytecheck` feature (enabled by default)_ _Read more about [available features](https://docs.rs/rkyv/latest/rkyv/#features)._ # Thanks Thanks to all the sponsors that keep development sustainable. Special thanks to the following sponsors for going above and beyond supporting rkyv: ## Bronze Sponsors

Climatiq

> Climatiq provides an embedded carbon intelligence solution that enables developers to automate GHG emission calculations based on verified scientific models. Its suite of products includes the largest dataset of emission factors, and intelligent APIs that integrate with any software platform for real time monitoring of greenhouse gas emissions. rkyv_derive-0.8.9/src/archive/enum.rs000064400000000000000000001000411046102023000157110ustar 00000000000000use proc_macro2::{Span, TokenStream}; use quote::{format_ident, quote}; use syn::{ parse_quote, spanned::Spanned as _, DataEnum, Error, Field, Fields, Generics, Ident, Index, Member, Path, }; use crate::{ archive::{ archived_doc, printing::Printing, resolver_doc, resolver_variant_doc, variant_doc, }, attributes::{Attributes, FieldAttributes}, util::{strip_generics_from_path, strip_raw}, }; pub fn impl_enum( printing: &Printing, generics: &Generics, attributes: &Attributes, data: &DataEnum, ) -> Result { let Printing { rkyv_path, name, archived_type, resolver_name, .. } = printing; if data.variants.len() > 256 { return Err(Error::new_spanned( &printing.name, "enums with more than 256 variants cannot derive Archive", )); } let mut public = TokenStream::new(); let mut private = TokenStream::new(); if attributes.as_type.is_none() { public.extend(generate_archived_type( printing, attributes, generics, data, )?); private.extend(generate_niching_impls( printing, attributes, generics, data, )?); } public.extend(generate_resolver_type( printing, attributes, generics, data, )?); let archived_variant_tags = data.variants.iter().map(|variant| { let ident = &variant.ident; let (eq, expr) = variant .discriminant .as_ref() .map(|(eq, expr)| (eq, expr)) .unzip(); quote! { #ident #eq #expr } }); private.extend(quote! { #[derive(PartialEq, PartialOrd)] #[repr(u8)] enum ArchivedTag { #(#archived_variant_tags,)* } }); private.extend(generate_variant_structs( printing, attributes, generics, data, )?); if let Some(ref compares) = attributes.compares { for compare in compares { if compare.is_ident("PartialEq") { public.extend(generate_partial_eq_impl( printing, attributes, generics, data, )?); } else if compare.is_ident("PartialOrd") { private.extend(generate_partial_ord_impl( printing, attributes, generics, data, )?); } else { return Err(Error::new_spanned( compare, "unrecognized compare argument, supported compares are \ PartialEq (PartialOrd is not supported for enums)", )); } } } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let archive_impl = if let Some(ref remote) = attributes.remote { let resolve_arms = generate_resolve_arms( printing, attributes, generics, data, &strip_generics_from_path(remote.clone()), )?; quote! { impl #impl_generics #rkyv_path::with::ArchiveWith<#remote> for #name #ty_generics #where_clause { type Archived = #archived_type; type Resolver = #resolver_name #ty_generics; // Some resolvers will be (), this allow is to prevent clippy // from complaining #[allow(clippy::unit_arg)] fn resolve_with( field: &#remote, resolver: Self::Resolver, out: #rkyv_path::Place, ) { let __this = field; match resolver { #resolve_arms } } } } } else { let resolve_arms = generate_resolve_arms( printing, attributes, generics, data, &parse_quote!(#name), )?; quote! { impl #impl_generics #rkyv_path::Archive for #name #ty_generics #where_clause { type Archived = #archived_type; type Resolver = #resolver_name #ty_generics; // Some resolvers will be (), this allow is to prevent clippy // from complaining #[allow(clippy::unit_arg)] fn resolve( &self, resolver: Self::Resolver, out: #rkyv_path::Place, ) { let __this = self; match resolver { #resolve_arms } } } } }; Ok(quote! { #public const _: () = { #private #archive_impl }; }) } fn generate_archived_type( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, ) -> Result { let Printing { rkyv_path, vis, name, archived_metas, archived_name, .. } = printing; let mut archived_variants = TokenStream::new(); for variant in &data.variants { let variant_name = &variant.ident; let (eq, expr) = variant .discriminant .as_ref() .map(|(eq, expr)| (eq, expr)) .unzip(); let variant_doc = variant_doc(name, variant_name); let mut variant_fields = TokenStream::new(); for field in variant.fields.iter() { let Field { vis, ident, colon_token, .. } = field; let field_attrs = FieldAttributes::parse(attributes, field)?; let field_ty = field_attrs.archived(rkyv_path, field); let field_metas = field_attrs.metas(); variant_fields.extend(quote! { #field_metas #vis #ident #colon_token #field_ty, }); } archived_variants.extend(match variant.fields { Fields::Named(_) => quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant_name { #variant_fields } #eq #expr, }, Fields::Unnamed(_) => quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant_name(#variant_fields) #eq #expr, }, Fields::Unit => quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant_name #eq #expr, }, }); } let where_clause = &generics.where_clause; let archived_doc = archived_doc(name); Ok(quote! { #[automatically_derived] #[doc = #archived_doc] #(#[#archived_metas])* #[repr(u8)] #vis enum #archived_name #generics #where_clause { #archived_variants } }) } fn generate_resolver_type( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, ) -> Result { let Printing { rkyv_path, vis, name, resolver_name, .. } = printing; let mut resolver_variants = TokenStream::new(); for variant in &data.variants { let variant_name = &variant.ident; let variant_doc = resolver_variant_doc(name, variant_name); let mut variant_fields = TokenStream::new(); for field in variant.fields.iter() { let Field { ident, colon_token, .. } = field; let field_attrs = FieldAttributes::parse(attributes, field)?; let field_ty = field_attrs.resolver(rkyv_path, field); variant_fields.extend(quote! { #ident #colon_token #field_ty, }); } resolver_variants.extend(match variant.fields { Fields::Named(_) => quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant_name { #variant_fields }, }, Fields::Unnamed(_) => quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant_name(#variant_fields), }, Fields::Unit => quote! { #[doc = #variant_doc] #[allow(dead_code)] #variant_name, }, }); } let where_clause = &generics.where_clause; let resolver_doc = resolver_doc(name); Ok(quote! { #[automatically_derived] #[doc = #resolver_doc] #vis enum #resolver_name #generics #where_clause { #resolver_variants } }) } fn generate_resolve_arms( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, name: &Path, ) -> Result { let Printing { rkyv_path, resolver_name, .. } = printing; let (_, ty_generics, _) = generics.split_for_impl(); let mut result = TokenStream::new(); for variant in &data.variants { let variant_name = &variant.ident; let archived_variant_name = format_ident!("ArchivedVariant{}", strip_raw(variant_name),); let members = variant .fields .members() .map(|member| match member { Member::Named(_) => member, Member::Unnamed(index) => Member::Unnamed(Index { index: index.index + 1, span: index.span, }), }) .collect::>(); let (self_bindings, resolver_bindings) = variant .fields .iter() .enumerate() .map(|(i, field)| { ( Ident::new(&format!("self_{}", i), field.span()), Ident::new(&format!("resolver_{}", i), field.span()), ) }) .unzip::<_, _, Vec<_>, Vec<_>>(); let resolves = variant .fields .iter() .map(|f| { let field_attrs = FieldAttributes::parse(attributes, f)?; Ok(field_attrs.resolve(rkyv_path, f)) }) .collect::, Error>>()?; match variant.fields { Fields::Named(_) => result.extend(quote! { #resolver_name::#variant_name { #(#members: #resolver_bindings,)* } => { match __this { #name::#variant_name { #(#members: #self_bindings,)*.. } => { let out = unsafe { out.cast_unchecked::< #archived_variant_name #ty_generics >() }; let tag_ptr = unsafe { ::core::ptr::addr_of_mut!( (*out.ptr()).__tag ) }; unsafe { tag_ptr.write(ArchivedTag::#variant_name); } #( let field_ptr = unsafe { ::core::ptr::addr_of_mut!( (*out.ptr()).#members ) }; let out_field = unsafe { #rkyv_path::Place::from_field_unchecked( out, field_ptr, ) }; #resolves( #self_bindings, #resolver_bindings, out_field, ); )* }, #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } }), Fields::Unnamed(_) => result.extend(quote! { #resolver_name::#variant_name( #(#resolver_bindings,)* ) => { match __this { #name::#variant_name(#(#self_bindings,)* ..) => { let out = unsafe { out.cast_unchecked::< #archived_variant_name #ty_generics >() }; let tag_ptr = unsafe { ::core::ptr::addr_of_mut!((*out.ptr()).0) }; unsafe { tag_ptr.write(ArchivedTag::#variant_name); } #( let field_ptr = unsafe { ::core::ptr::addr_of_mut!( (*out.ptr()).#members ) }; let out_field = unsafe { #rkyv_path::Place::from_field_unchecked( out, field_ptr, ) }; #resolves( #self_bindings, #resolver_bindings, out_field, ); )* }, #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } }), Fields::Unit => result.extend(quote! { #resolver_name::#variant_name => { let out = unsafe { out.cast_unchecked::() }; // SAFETY: `ArchivedTag` is `repr(u8)` and so is always // initialized. unsafe { out.write_unchecked(ArchivedTag::#variant_name); } } }), } } Ok(result) } fn generate_variant_structs( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, ) -> Result { let Printing { rkyv_path, name, .. } = printing; let (_, ty_generics, _) = generics.split_for_impl(); let where_clause = &generics.where_clause; let mut result = TokenStream::new(); for variant in &data.variants { let archived_variant_name = format_ident!("ArchivedVariant{}", strip_raw(&variant.ident),); let mut archived_fields = TokenStream::new(); for field in variant.fields.iter() { let field_attrs = FieldAttributes::parse(attributes, field)?; let archived = field_attrs.archived(rkyv_path, field); let Field { ident, colon_token, .. } = field; archived_fields.extend(quote! { #ident #colon_token #archived, }); } match variant.fields { Fields::Named(_) => result.extend(quote! { #[repr(C)] struct #archived_variant_name #generics #where_clause { __tag: ArchivedTag, #archived_fields __phantom: ::core::marker::PhantomData< #name #ty_generics >, } }), Fields::Unnamed(_) => result.extend(quote! { #[repr(C)] struct #archived_variant_name #generics ( ArchivedTag, #archived_fields ::core::marker::PhantomData<#name #ty_generics>, ) #where_clause; }), Fields::Unit => (), } } Ok(result) } fn generate_partial_eq_impl( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, ) -> Result { let Printing { archived_name, archived_type, name, .. } = printing; let (impl_generics, ty_generics, _) = generics.split_for_impl(); let mut where_clause = generics.where_clause.clone().unwrap(); for field in data.variants.iter().flat_map(|v| v.fields.iter()) { let field_attrs = FieldAttributes::parse(attributes, field)?; if field_attrs.omit_bounds.is_none() { let field_attrs = FieldAttributes::parse(attributes, field)?; let ty = &field.ty; let archived = field_attrs.archived(&printing.rkyv_path, field); where_clause .predicates .push(parse_quote! { #archived: PartialEq<#ty> }); } } let variant_impls = data.variants.iter().map(|v| { let variant = &v.ident; let (self_fields, other_fields) = v .fields .iter() .enumerate() .map(|(i, f)| { ( Ident::new(&format!("self_{}", i), f.span()), Ident::new(&format!("other_{}", i), f.span()), ) }) .unzip::<_, _, Vec<_>, Vec<_>>(); match v.fields { Fields::Named(ref fields) => { let field_names = fields.named.iter().map(|f| &f.ident).collect::>(); quote! { #name::#variant { #(#field_names: #self_fields,)* } => match other { #archived_name::#variant { #(#field_names: #other_fields,)* } => true #(&& #other_fields.eq(#self_fields))*, #[allow(unreachable_patterns)] _ => false, } } } Fields::Unnamed(_) => { quote! { #name::#variant(#(#self_fields,)*) => match other { #archived_name::#variant(#(#other_fields,)*) => { true #(&& #other_fields.eq(#self_fields))* } #[allow(unreachable_patterns)] _ => false, } } } Fields::Unit => quote! { #name::#variant => match other { #archived_name::#variant => true, #[allow(unreachable_patterns)] _ => false, } }, } }); Ok(quote! { impl #impl_generics PartialEq<#archived_type> for #name #ty_generics #where_clause { fn eq(&self, other: &#archived_type) -> bool { match self { #(#variant_impls,)* } } } impl #impl_generics PartialEq<#name #ty_generics> for #archived_type #where_clause { fn eq(&self, other: &#name #ty_generics) -> bool { other.eq(self) } } }) } fn generate_partial_ord_impl( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, ) -> Result { let Printing { archived_name, archived_type, name, .. } = printing; let (impl_generics, ty_generics, _) = generics.split_for_impl(); let mut where_clause = generics.where_clause.clone().unwrap(); for field in data.variants.iter().flat_map(|v| v.fields.iter()) { let field_attrs = FieldAttributes::parse(attributes, field)?; if field_attrs.omit_bounds.is_none() { let ty = &field.ty; let archived = field_attrs.archived(&printing.rkyv_path, field); where_clause .predicates .push(parse_quote! { #archived: PartialOrd<#ty> }); } } let self_disc = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(_) => quote! { #name::#variant { .. } => ArchivedTag::#variant }, Fields::Unnamed(_) => quote! { #name::#variant ( .. ) => ArchivedTag::#variant }, Fields::Unit => quote! { #name::#variant => ArchivedTag::#variant }, } }); let other_disc = data.variants.iter().map(|v| { let variant = &v.ident; match v.fields { Fields::Named(_) => quote! { #archived_name::#variant { .. } => ArchivedTag::#variant }, Fields::Unnamed(_) => quote! { #archived_name::#variant ( .. ) => ArchivedTag::#variant }, Fields::Unit => quote! { #archived_name::#variant => ArchivedTag::#variant }, } }); let variant_impls = data.variants.iter().map(|v| { let variant = &v.ident; let (self_fields, other_fields) = v .fields .iter() .enumerate() .map(|(i, f)| { ( Ident::new(&format!("self_{}", i), f.span()), Ident::new(&format!("other_{}", i), f.span()), ) }) .unzip::<_, _, Vec<_>, Vec<_>>(); match v.fields { Fields::Named(ref fields) => { let field_names = fields.named.iter().map(|f| &f.ident).collect::>(); quote! { #name::#variant { #(#field_names: #self_fields,)* } => match other { #archived_name::#variant { #(#field_names: #other_fields,)* } => { #( match #other_fields.partial_cmp(#self_fields) { Some(::core::cmp::Ordering::Equal) => (), cmp => return cmp.map( ::core::cmp::Ordering::reverse ), } )* Some(::core::cmp::Ordering::Equal) } #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } } Fields::Unnamed(_) => { quote! { #name::#variant(#(#self_fields,)*) => match other { #archived_name::#variant(#(#other_fields,)*) => { #( match #other_fields.partial_cmp(#self_fields) { Some(::core::cmp::Ordering::Equal) => (), cmp => return cmp.map( ::core::cmp::Ordering::reverse ), } )* Some(::core::cmp::Ordering::Equal) } #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } } } Fields::Unit => quote! { #name::#variant => match other { #archived_name::#variant => { Some(::core::cmp::Ordering::Equal) } #[allow(unreachable_patterns)] _ => unsafe { ::core::hint::unreachable_unchecked() }, } }, } }); Ok(quote! { impl #impl_generics PartialOrd<#archived_type> for #name #ty_generics #where_clause { fn partial_cmp( &self, other: &#archived_type, ) -> Option<::core::cmp::Ordering> { let self_disc = match self { #(#self_disc,)* }; let other_disc = match other { #(#other_disc,)* }; if self_disc == other_disc { match self { #(#variant_impls,)* } } else { self_disc.partial_cmp(&other_disc) } } } impl #impl_generics PartialOrd<#name #ty_generics> for #archived_type #where_clause { fn partial_cmp( &self, other: &#name #ty_generics, ) -> Option<::core::cmp::Ordering> { match other.partial_cmp(self) { Some(::core::cmp::Ordering::Less) => { Some(::core::cmp::Ordering::Greater) } Some(::core::cmp::Ordering::Greater) => { Some(::core::cmp::Ordering::Less) } cmp => cmp, } } } }) } fn generate_niching_impls( printing: &Printing, attributes: &Attributes, generics: &Generics, data: &DataEnum, ) -> Result { let Printing { rkyv_path, archived_type, .. } = printing; let (impl_generics, ty_generics, ..) = generics.split_for_impl(); let mut result = TokenStream::new(); let mut niches = Vec::new(); for variant in data.variants.iter() { let variant_name = &variant.ident; let archived_variant_name = format_ident!("ArchivedVariant{}", strip_raw(variant_name)); for (i, field) in variant.fields.iter().enumerate() { let field_attrs = FieldAttributes::parse(attributes, field)?; let archived_field = field_attrs.archived(rkyv_path, field); for niche in field_attrs.niches { let niche_tokens = niche.to_tokens(rkyv_path); // Best-effort attempt at improving the error message if the // same `Niching` implementor type is being used multiple times. // Otherwise, the compiler will inform about conflicting impls // which are not entirely unreasonable but may appear slightly // cryptic. if niches.contains(&niche) { return Err(Error::new_spanned( niche_tokens, "each niching type may be used at most once", )); } let field_member = if let Some(ref name) = field.ident { Member::Named(name.clone()) } else { Member::Unnamed(Index::from(i + 1)) }; let tag_member = if field.ident.is_some() { Member::Named(Ident::new("__tag", Span::call_site())) } else { Member::Unnamed(Index::from(0)) }; let field_niche = quote! { <#niche_tokens as #rkyv_path::niche::niching::Niching< #archived_field> > }; result.extend(quote! { #[automatically_derived] impl #impl_generics #rkyv_path::niche::niching::Niching<#archived_type> for #niche_tokens { unsafe fn is_niched(niched: *const #archived_type) -> bool { let variant = niched .cast::<#archived_variant_name #ty_generics>(); let tag = unsafe { ::core::ptr::addr_of!((*variant).#tag_member) }; if unsafe { &*tag != &ArchivedTag::#variant_name } { return false; } let field = unsafe { ::core::ptr::addr_of!((*variant).#field_member) }; unsafe { #field_niche::is_niched(field) } } fn resolve_niched( out: #rkyv_path::Place<#archived_type> ) { let out = unsafe { out.cast_unchecked::< #archived_variant_name #ty_generics >() }; let tag_ptr = unsafe { ::core::ptr::addr_of_mut!( (*out.ptr()).#tag_member ) }; unsafe { tag_ptr.write(ArchivedTag::#variant_name); } let field_ptr = unsafe { ::core::ptr::addr_of_mut!( (*out.ptr()).#field_member ) }; let out_field = unsafe { #rkyv_path::Place::from_field_unchecked( out, field_ptr, ) }; #field_niche::resolve_niched(out_field); } } }); niches.push(niche); } } } let mut iter = niches.iter(); while let Some(niche1) = iter.next() { let niche1_tokens = niche1.to_tokens(rkyv_path); for niche2 in iter.clone() { let niche2_tokens = niche2.to_tokens(rkyv_path); result.extend(quote! { #[automatically_derived] unsafe impl #impl_generics #rkyv_path::niche::niching::SharedNiching< #niche1_tokens, #niche2_tokens > for #archived_type {} #[automatically_derived] unsafe impl #impl_generics #rkyv_path::niche::niching::SharedNiching< #niche2_tokens, #niche1_tokens > for #archived_type {} }); } } Ok(result) } rkyv_derive-0.8.9/src/archive/mod.rs000064400000000000000000000064001046102023000155300ustar 00000000000000mod r#enum; pub mod printing; mod r#struct; use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{parse_quote, Data, DataStruct, DeriveInput, Error, Ident}; use crate::{ archive::printing::Printing, attributes::{Attributes, FieldAttributes}, util::iter_fields, }; pub fn derive(input: &mut DeriveInput) -> Result { let attributes = Attributes::parse(input)?; derive_archive_impl(input, &attributes) } fn archived_doc(name: &Ident) -> String { format!("An archived [`{}`]", name) } fn resolver_doc(name: &Ident) -> String { format!("The resolver for an archived [`{}`]", name) } fn variant_doc(name: &Ident, variant_name: &Ident) -> String { format!("The archived counterpart of [`{}::{}`]", name, variant_name) } fn resolver_variant_doc(name: &Ident, variant_name: &Ident) -> String { format!("The resolver for [`{}::{}`]", name, variant_name) } fn derive_archive_impl( input: &mut DeriveInput, attributes: &Attributes, ) -> Result { let printing = Printing::new(input, attributes)?; let where_clause = input.generics.make_where_clause(); if let Some(ref bounds) = attributes.archive_bounds { where_clause.predicates.extend(bounds.iter().cloned()); } for field in iter_fields(&input.data) { let field_attrs = FieldAttributes::parse(attributes, field)?; where_clause .predicates .extend(field_attrs.archive_bound(&printing.rkyv_path, field)); } let mut result = match &input.data { Data::Struct(DataStruct { fields, .. }) => r#struct::impl_struct( &printing, &input.generics, attributes, fields, )?, Data::Enum(enm) => { r#enum::impl_enum(&printing, &input.generics, attributes, enm)? } Data::Union(_) => { return Err(Error::new_spanned( input, "Archive cannot be derived for unions", )) } }; if attributes.as_type.is_none() { result .extend(impl_auto_trait(input, &printing, attributes, "Portable")?); } Ok(result) } fn impl_auto_trait( input: &DeriveInput, printing: &Printing, attributes: &Attributes, trait_name: &str, ) -> Result { let mut generics = input.generics.clone(); let where_clause = generics.make_where_clause(); let rkyv_path = &printing.rkyv_path; let trait_ident = Ident::new(trait_name, Span::call_site()); for field in iter_fields(&input.data) { let field_attrs = FieldAttributes::parse(attributes, field)?; let archived_field_ty = field_attrs.archived(rkyv_path, field); where_clause.predicates.push(parse_quote! { #archived_field_ty: #rkyv_path::traits::#trait_ident }); } let archived_name = &printing.archived_name; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); Ok(quote! { // SAFETY: These pseudo-auto traits are implemented for the archived // type if they are implemented for all of its fields. unsafe impl #impl_generics #rkyv_path::traits::#trait_ident for #archived_name #ty_generics #where_clause {} }) } rkyv_derive-0.8.9/src/archive/printing.rs000064400000000000000000000040731046102023000166070ustar 00000000000000use quote::format_ident; use syn::{ parse_quote, DeriveInput, Error, Ident, Meta, Path, Type, Visibility, }; use crate::{attributes::Attributes, util::strip_raw}; pub struct Printing { pub rkyv_path: Path, pub vis: Visibility, pub name: Ident, pub archived_name: Ident, pub archived_type: Type, pub resolver_name: Ident, pub archived_metas: Vec, } impl Printing { pub fn new( input: &DeriveInput, attributes: &Attributes, ) -> Result { let name = input.ident.clone(); let (_, ty_generics, _) = input.generics.split_for_impl(); let rkyv_path = attributes .crate_path .clone() .unwrap_or_else(|| parse_quote! { ::rkyv }); let base_name = strip_raw(&name); let archived_name = attributes .archived .clone() .unwrap_or_else(|| format_ident!("Archived{}", base_name)); let archived_type = attributes .as_type .clone() .unwrap_or_else(|| parse_quote! { #archived_name #ty_generics }); let resolver_name = attributes .resolver .clone() .unwrap_or_else(|| format_ident!("{}Resolver", base_name)); #[cfg(not(feature = "bytecheck"))] let archived_metas = attributes.metas.clone(); #[cfg(feature = "bytecheck")] let archived_metas = { let mut result = attributes.metas.clone(); result.push(parse_quote! { derive(#rkyv_path::bytecheck::CheckBytes) }); result.push(parse_quote! { bytecheck(crate = #rkyv_path::bytecheck) }); if let Some(attrs) = &attributes.bytecheck { result.push(parse_quote! { bytecheck(#attrs) }); } result }; Ok(Self { rkyv_path, vis: input.vis.clone(), name, archived_name, archived_type, resolver_name, archived_metas, }) } } rkyv_derive-0.8.9/src/archive/struct.rs000064400000000000000000000367161046102023000163120ustar 00000000000000use proc_macro2::{Ident, Span, TokenStream}; use quote::quote; use syn::{ parse_quote, punctuated::Punctuated, Error, Field, Fields, Generics, Index, Member, }; use crate::{ archive::{archived_doc, printing::Printing, resolver_doc}, attributes::{Attributes, FieldAttributes}, }; pub fn impl_struct( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result { let Printing { rkyv_path, name, archived_type, resolver_name, .. } = printing; let mut result = TokenStream::new(); if attributes.as_type.is_none() { result.extend(generate_archived_type( printing, generics, attributes, fields, )?); result.extend(generate_niching_impls( printing, generics, attributes, fields, )?); } result.extend(generate_resolver_type( printing, generics, attributes, fields, )?); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let archive_impl = if let Some(ref remote) = attributes.remote { let resolve_statements = generate_resolve_statements( printing, attributes, fields, Ident::new("field", Span::call_site()), )?; quote! { impl #impl_generics #rkyv_path::with::ArchiveWith<#remote> for #name #ty_generics #where_clause { type Archived = #archived_type; type Resolver = #resolver_name #ty_generics; // Some resolvers will be (), this allow is to prevent clippy // from complaining. #[allow(clippy::unit_arg)] fn resolve_with( field: &#remote, resolver: Self::Resolver, out: #rkyv_path::Place, ) { #resolve_statements } } } } else { let resolve_statements = generate_resolve_statements( printing, attributes, fields, Ident::new("self", Span::call_site()), )?; let copy_optimization = generate_copy_optimization(printing, generics, attributes, fields)?; quote! { impl #impl_generics #rkyv_path::Archive for #name #ty_generics #where_clause { type Archived = #archived_type; type Resolver = #resolver_name #ty_generics; #copy_optimization // Some resolvers will be (), this allow is to prevent clippy // from complaining. #[allow(clippy::unit_arg)] fn resolve( &self, resolver: Self::Resolver, out: #rkyv_path::Place, ) { #resolve_statements } } } }; result.extend(archive_impl); for compare in attributes.compares.iter().flat_map(Punctuated::iter) { if compare.is_ident("PartialEq") { result.extend(generate_partial_eq_impl( printing, generics, attributes, fields, )?); } else if compare.is_ident("PartialOrd") { result.extend(generate_partial_ord_impl( printing, generics, attributes, fields, )?); } else { return Err(Error::new_spanned( compare, "unrecognized compare argument, supported compares are \ PartialEq and PartialOrd", )); } } Ok(result) } fn generate_resolve_statements( printing: &Printing, attributes: &Attributes, fields: &Fields, this: Ident, ) -> Result { let rkyv_path = &printing.rkyv_path; let mut resolve_statements = TokenStream::new(); for (field, member) in fields.iter().zip(fields.members()) { let field_attrs = FieldAttributes::parse(attributes, field)?; let resolves = field_attrs.resolve(rkyv_path, field); let access_field = field_attrs.access_field(&this, &member); resolve_statements.extend(quote! { let field_ptr = unsafe { ::core::ptr::addr_of_mut!((*out.ptr()).#member) }; let field_out = unsafe { #rkyv_path::Place::from_field_unchecked(out, field_ptr) }; #resolves(#access_field, resolver.#member, field_out); }); } Ok(resolve_statements) } fn generate_archived_type( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result { let Printing { rkyv_path, vis, name, archived_name, archived_metas, .. } = printing; let mut archived_fields = TokenStream::new(); for field in fields { let Field { vis, ident, colon_token, .. } = field; let field_attrs = FieldAttributes::parse(attributes, field)?; let field_metas = field_attrs.metas(); let ty = field_attrs.archived(rkyv_path, field); archived_fields.extend(quote! { #field_metas #vis #ident #colon_token #ty, }); } let where_clause = &generics.where_clause; let body = match fields { Fields::Named(_) => quote! { #where_clause { #archived_fields } }, Fields::Unnamed(_) => quote! { (#archived_fields) #where_clause; }, Fields::Unit => quote! { #where_clause; }, }; let doc_string = archived_doc(name); Ok(quote! { #[automatically_derived] #[doc = #doc_string] #(#[#archived_metas])* #[repr(C)] #vis struct #archived_name #generics #body }) } fn generate_resolver_type( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result { let Printing { rkyv_path, vis, name, resolver_name, .. } = printing; let mut resolver_fields = TokenStream::new(); for field in fields.iter() { let Field { ident, colon_token, .. } = field; let field_attrs = FieldAttributes::parse(attributes, field)?; let ty = field_attrs.resolver(rkyv_path, field); resolver_fields.extend(quote! { #ident #colon_token #ty, }); } let where_clause = &generics.where_clause; let body = match fields { Fields::Named(_) => quote! { #where_clause { #resolver_fields } }, Fields::Unnamed(_) => quote! { (#resolver_fields) #where_clause; }, Fields::Unit => quote! { #where_clause; }, }; let doc_string = resolver_doc(name); Ok(quote! { #[automatically_derived] #[doc = #doc_string] #vis struct #resolver_name #generics #body }) } fn generate_partial_eq_impl( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result { let Printing { rkyv_path, name, archived_type, .. } = printing; let mut where_clause = generics.where_clause.clone().unwrap(); for field in fields.iter() { let field_attrs = FieldAttributes::parse(attributes, field)?; if field_attrs.omit_bounds.is_none() { let ty = &field.ty; let archived_ty = field_attrs.archived(rkyv_path, field); where_clause .predicates .push(parse_quote! { #archived_ty: PartialEq<#ty> }); } } let members = fields.members(); let (impl_generics, ty_generics, _) = generics.split_for_impl(); Ok(quote! { impl #impl_generics PartialEq<#archived_type> for #name #ty_generics #where_clause { fn eq(&self, other: &#archived_type) -> bool { true #(&& other.#members.eq(&self.#members))* } } impl #impl_generics PartialEq<#name #ty_generics> for #archived_type #where_clause { fn eq(&self, other: &#name #ty_generics) -> bool { other.eq(self) } } }) } fn generate_partial_ord_impl( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result { let Printing { rkyv_path, name, archived_type, .. } = printing; let mut where_clause = generics.where_clause.as_ref().unwrap().clone(); for field in fields.iter() { let field_attrs = FieldAttributes::parse(attributes, field)?; if field_attrs.omit_bounds.is_none() { let ty = &field.ty; let archived_ty = field_attrs.archived(rkyv_path, field); where_clause .predicates .push(parse_quote! { #archived_ty: PartialOrd<#ty> }); } } let members = fields.members(); let (impl_generics, ty_generics, _) = generics.split_for_impl(); Ok(quote! { impl #impl_generics PartialOrd<#archived_type> for #name #ty_generics #where_clause { fn partial_cmp( &self, other: &#archived_type, ) -> Option<::core::cmp::Ordering> { #( match other.#members.partial_cmp(&self.#members) { Some(::core::cmp::Ordering::Equal) => (), x => return x.map(::core::cmp::Ordering::reverse), } )* Some(::core::cmp::Ordering::Equal) } } impl #impl_generics PartialOrd<#name #ty_generics> for #archived_type #where_clause { fn partial_cmp( &self, other: &#name #ty_generics, ) -> Option<::core::cmp::Ordering> { other.partial_cmp(self).map(::core::cmp::Ordering::reverse) } } }) } fn generate_copy_optimization( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result, Error> { if !generics.params.is_empty() { return Ok(None); } for f in fields.iter() { if FieldAttributes::parse(attributes, f)?.with.is_some() { return Ok(None); } } let Printing { rkyv_path, name, archived_type, .. } = printing; let field_sizes = fields.iter().map(|f| { let ty = &f.ty; quote! { ::core::mem::size_of::<#ty>() } }); let padding_check = quote! { 0 #(+ #field_sizes)* == ::core::mem::size_of::<#name>() }; let field_checks = fields.iter().zip(fields.members()).map(|(f, m)| { let ty = &f.ty; quote! { <#ty as #rkyv_path::Archive>::COPY_OPTIMIZATION.is_enabled() && ::core::mem::offset_of!(#name, #m) == ::core::mem::offset_of!(#archived_type, #m) } }); Ok(Some(quote! { const COPY_OPTIMIZATION: #rkyv_path::traits::CopyOptimization = unsafe { #rkyv_path::traits::CopyOptimization::enable_if( #padding_check #(&& #field_checks)* ) }; })) } fn generate_niching_impls( printing: &Printing, generics: &Generics, attributes: &Attributes, fields: &Fields, ) -> Result { let Printing { rkyv_path, archived_type, .. } = printing; let (impl_generics, ..) = generics.split_for_impl(); let mut result = TokenStream::new(); let mut niches = Vec::new(); for (i, field) in fields.iter().enumerate() { let field_attrs = FieldAttributes::parse(attributes, field)?; let archived_field = field_attrs.archived(rkyv_path, field); for niche in field_attrs.niches { let niche_tokens = niche.to_tokens(rkyv_path); // Best-effort attempt at improving the error message if the same // `Niching` implementor type is being used multiple times. // Otherwise, the compiler will inform about conflicting impls which // are not entirely unreasonable but may appear slightly cryptic. if niches.contains(&niche) { return Err(Error::new_spanned( niche_tokens, "each niching type may be used at most once", )); } let field_member = if let Some(ref name) = field.ident { Member::Named(name.clone()) } else { Member::Unnamed(Index::from(i)) }; let field_niching = quote! { <#niche_tokens as #rkyv_path::niche::niching::Niching< #archived_field> > }; result.extend(quote! { #[automatically_derived] impl #impl_generics #rkyv_path::niche::niching::Niching<#archived_type> for #niche_tokens { unsafe fn is_niched(niched: *const #archived_type) -> bool { let field = unsafe { ::core::ptr::addr_of!((*niched).#field_member) }; unsafe { #field_niching::is_niched(field) } } fn resolve_niched(out: #rkyv_path::Place<#archived_type>) { let field_ptr = unsafe { ::core::ptr::addr_of_mut!( (*out.ptr()).#field_member ) }; let out_field = unsafe { #rkyv_path::Place::from_field_unchecked( out, field_ptr, ) }; #field_niching::resolve_niched(out_field); } } }); niches.push(niche); } } let mut iter = niches.iter(); while let Some(niche1) = iter.next() { let niche1_tokens = niche1.to_tokens(rkyv_path); for niche2 in iter.clone() { let niche2_tokens = niche2.to_tokens(rkyv_path); result.extend(quote! { #[automatically_derived] unsafe impl #impl_generics #rkyv_path::niche::niching::SharedNiching< #niche1_tokens, #niche2_tokens > for #archived_type {} #[automatically_derived] unsafe impl #impl_generics #rkyv_path::niche::niching::SharedNiching< #niche2_tokens, #niche1_tokens > for #archived_type {} }); } } Ok(result) } rkyv_derive-0.8.9/src/attributes.rs000064400000000000000000000372161046102023000155270ustar 00000000000000use proc_macro2::{Span, TokenStream, TokenTree}; use quote::{quote, ToTokens}; use syn::{ meta::ParseNestedMeta, parenthesized, parse::Parse, parse_quote, punctuated::Punctuated, DeriveInput, Error, Field, Fields, Ident, Meta, Path, Token, Type, Variant, WherePredicate, }; fn try_set_attribute( attribute: &mut Option, value: T, name: &'static str, ) -> Result<(), Error> { if attribute.is_none() { *attribute = Some(value); Ok(()) } else { Err(Error::new_spanned( value, format!("{} already specified", name), )) } } #[derive(Default)] pub struct Attributes { pub as_type: Option, pub archived: Option, pub resolver: Option, pub remote: Option, pub metas: Vec, pub compares: Option>, pub archive_bounds: Option>, pub serialize_bounds: Option>, pub deserialize_bounds: Option>, pub bytecheck: Option, pub crate_path: Option, } impl Attributes { fn parse_meta(&mut self, meta: ParseNestedMeta<'_>) -> Result<(), Error> { if meta.path.is_ident("bytecheck") { let tokens = meta.input.step(|cursor| { if let Some((TokenTree::Group(group), rest)) = cursor.token_tree() { Ok((group.stream(), rest)) } else { Err(cursor.error("expected bytecheck attributes")) } })?; if cfg!(feature = "bytecheck") { try_set_attribute(&mut self.bytecheck, tokens, "bytecheck")?; } Ok(()) } else if meta.path.is_ident("compare") { let traits; parenthesized!(traits in meta.input); let traits = traits.parse_terminated(Path::parse, Token![,])?; try_set_attribute(&mut self.compares, traits, "compare") } else if meta.path.is_ident("archive_bounds") { let bounds; parenthesized!(bounds in meta.input); let clauses = bounds.parse_terminated(WherePredicate::parse, Token![,])?; try_set_attribute( &mut self.archive_bounds, clauses, "archive_bounds", ) } else if meta.path.is_ident("serialize_bounds") { let bounds; parenthesized!(bounds in meta.input); let clauses = bounds.parse_terminated(WherePredicate::parse, Token![,])?; try_set_attribute( &mut self.serialize_bounds, clauses, "serialize_bounds", ) } else if meta.path.is_ident("deserialize_bounds") { let bounds; parenthesized!(bounds in meta.input); let clauses = bounds.parse_terminated(WherePredicate::parse, Token![,])?; try_set_attribute( &mut self.deserialize_bounds, clauses, "deserialize_bounds", ) } else if meta.path.is_ident("archived") { try_set_attribute( &mut self.archived, meta.value()?.parse()?, "archived", ) } else if meta.path.is_ident("resolver") { try_set_attribute( &mut self.resolver, meta.value()?.parse()?, "resolver", ) } else if meta.path.is_ident("as") { meta.input.parse::()?; try_set_attribute( &mut self.as_type, meta.input.parse::()?, "as", ) } else if meta.path.is_ident("crate") { if meta.input.parse::().is_ok() { let path = meta.input.parse::()?; try_set_attribute(&mut self.crate_path, path, "crate") } else if meta.input.is_empty() || meta.input.peek(Token![,]) { try_set_attribute( &mut self.crate_path, parse_quote! { crate }, "crate", ) } else { Err(meta.error("expected `crate` or `crate = ...`")) } } else if meta.path.is_ident("derive") { let metas; parenthesized!(metas in meta.input); self.metas.extend( metas .parse_terminated(Meta::parse, Token![,])? .into_iter() .map(|meta| parse_quote! { derive(#meta) }), ); Ok(()) } else if meta.path.is_ident("attr") { let metas; parenthesized!(metas in meta.input); self.metas .extend(metas.parse_terminated(Meta::parse, Token![,])?); Ok(()) } else if meta.path.is_ident("remote") { try_set_attribute( &mut self.remote, meta.value()?.parse()?, "remote", ) } else { Err(meta.error("unrecognized rkyv argument")) } } pub fn parse(input: &DeriveInput) -> Result { let mut result = Self::default(); for attr in input.attrs.iter() { if attr.path().is_ident("rkyv") { attr.parse_nested_meta(|meta| result.parse_meta(meta))?; } } if result.as_type.is_some() { if let Some(ref ident) = result.archived { return Err(Error::new_spanned( ident, "`archived = ...` may not be used with `as = ...` because \ no type is generated", )); } if let Some(first) = result.metas.first() { return Err(Error::new_spanned( first, "attributes may not be used with `as = ...`; place \ attributes on the archived type instead", )); } if result.bytecheck.is_some() { return Err(Error::new_spanned( result.bytecheck.unwrap(), "cannot generate a `CheckBytes` impl because `as = ...` \ does not generate an archived type", )); } } Ok(result) } pub fn crate_path(&self) -> Path { self.crate_path .clone() .unwrap_or_else(|| parse_quote! { ::rkyv }) } } #[derive(Default)] pub struct FieldAttributes { pub attrs: Punctuated, pub omit_bounds: Option, pub with: Option, pub getter: Option, pub niches: Vec, } impl FieldAttributes { fn parse_meta(&mut self, meta: ParseNestedMeta<'_>) -> Result<(), Error> { if meta.path.is_ident("attr") { let content; parenthesized!(content in meta.input); self.attrs = content.parse_terminated(Meta::parse, Token![,])?; Ok(()) } else if meta.path.is_ident("omit_bounds") { self.omit_bounds = Some(meta.path); Ok(()) } else if meta.path.is_ident("with") { meta.input.parse::()?; self.with = Some(meta.input.parse::()?); Ok(()) } else if meta.path.is_ident("getter") { meta.input.parse::()?; self.getter = Some(meta.input.parse::()?); Ok(()) } else if meta.path.is_ident("niche") { let niche = if meta.input.is_empty() { Niche::Default } else { meta.input.parse::()?; Niche::Type(meta.input.parse::()?) }; self.niches.push(niche); Ok(()) } else { Err(meta.error("unrecognized rkyv arguments")) } } pub fn parse( attributes: &Attributes, input: &Field, ) -> Result { let mut result = Self::default(); for attr in input.attrs.iter() { if attr.path().is_ident("rkyv") { attr.parse_nested_meta(|meta| result.parse_meta(meta))?; } } if result.getter.is_some() && attributes.remote.is_none() { return Err(Error::new_spanned( result.getter, "getters may only be used with remote derive", )); } Ok(result) } pub fn archive_bound( &self, rkyv_path: &Path, field: &Field, ) -> Option { if self.omit_bounds.is_some() { return None; } let ty = &field.ty; if let Some(with) = &self.with { Some(parse_quote! { #with: #rkyv_path::with::ArchiveWith<#ty> }) } else { Some(parse_quote! { #ty: #rkyv_path::Archive }) } } pub fn serialize_bound( &self, rkyv_path: &Path, field: &Field, ) -> Option { if self.omit_bounds.is_some() { return None; } let ty = &field.ty; if let Some(with) = &self.with { Some(parse_quote! { #with: #rkyv_path::with::SerializeWith<#ty, __S> }) } else { Some(parse_quote! { #ty: #rkyv_path::Serialize<__S> }) } } pub fn deserialize_bound( &self, rkyv_path: &Path, field: &Field, ) -> Option { if self.omit_bounds.is_some() { return None; } let archived = self.archived(rkyv_path, field); let ty = &field.ty; if let Some(with) = &self.with { Some(parse_quote! { #with: #rkyv_path::with::DeserializeWith<#archived, #ty, __D> }) } else { Some(parse_quote! { #archived: #rkyv_path::Deserialize<#ty, __D> }) } } fn archive_item( &self, rkyv_path: &Path, field: &Field, name: &str, with_name: &str, ) -> TokenStream { let ty = &field.ty; if let Some(with) = &self.with { let ident = Ident::new(with_name, Span::call_site()); quote! { <#with as #rkyv_path::with::ArchiveWith<#ty>>::#ident } } else { let ident = Ident::new(name, Span::call_site()); quote! { <#ty as #rkyv_path::Archive>::#ident } } } pub fn archived(&self, rkyv_path: &Path, field: &Field) -> TokenStream { self.archive_item(rkyv_path, field, "Archived", "Archived") } pub fn resolver(&self, rkyv_path: &Path, field: &Field) -> TokenStream { self.archive_item(rkyv_path, field, "Resolver", "Resolver") } pub fn resolve(&self, rkyv_path: &Path, field: &Field) -> TokenStream { self.archive_item(rkyv_path, field, "resolve", "resolve_with") } pub fn serialize(&self, rkyv_path: &Path, field: &Field) -> TokenStream { let ty = &field.ty; if let Some(with) = &self.with { quote! { < #with as #rkyv_path::with::SerializeWith<#ty, __S> >::serialize_with } } else { quote! { <#ty as #rkyv_path::Serialize<__S>>::serialize } } } pub fn deserialize(&self, rkyv_path: &Path, field: &Field) -> TokenStream { let ty = &field.ty; let archived = self.archived(rkyv_path, field); if let Some(with) = &self.with { quote! { < #with as #rkyv_path::with::DeserializeWith< #archived, #ty, __D, > >::deserialize_with } } else { quote! { <#archived as #rkyv_path::Deserialize<#ty, __D>>::deserialize } } } pub fn access_field( &self, this: &Ident, member: &impl ToTokens, ) -> TokenStream { if let Some(ref getter) = self.getter { quote! { ::core::borrow::Borrow::borrow(&#getter(#this)) } } else { quote! { &#this.#member } } } pub fn metas(&self) -> TokenStream { let mut result = TokenStream::new(); #[cfg(feature = "bytecheck")] if self.omit_bounds.is_some() { result.extend(quote! { #[bytecheck(omit_bounds)] }); } for attr in self.attrs.iter() { result.extend(quote! { #[#attr] }); } result } } #[derive(Default)] pub struct VariantAttributes { pub other: Option, } impl VariantAttributes { fn parse_meta(&mut self, meta: ParseNestedMeta<'_>) -> Result<(), Error> { if meta.path.is_ident("other") { self.other = Some(meta.path); Ok(()) } else { Err(meta.error("unrecognized rkyv arguments")) } } pub fn parse( attributes: &Attributes, input: &Variant, ) -> Result { let mut result = Self::default(); for attr in input.attrs.iter() { if attr.path().is_ident("rkyv") { attr.parse_nested_meta(|meta| result.parse_meta(meta))?; } } if result.other.is_some() { if attributes.remote.is_none() { return Err(Error::new_spanned( result.other, "`#[rkyv(other)]` may only be used with remote derive", )); } else if !matches!(input.fields, Fields::Unit) { return Err(Error::new_spanned( result.other, "`#[rkyv(other)]` may only be used on unit variants", )); } } Ok(result) } } pub enum Niche { Type(Type), Default, } impl Niche { pub fn to_tokens(&self, rkyv_path: &Path) -> TokenStream { match self { Niche::Type(ty) => quote!(#ty), Niche::Default => quote! { #rkyv_path::niche::niching::DefaultNiche }, } } } impl PartialEq for Niche { fn eq(&self, other: &Self) -> bool { match (self, other) { (Niche::Type(Type::Path(ty1)), Niche::Type(Type::Path(ty2))) => { ty1.path.get_ident() == ty2.path.get_ident() } (Niche::Type(_), Niche::Type(_)) => false, (Niche::Type(Type::Path(ty)), Niche::Default) | (Niche::Default, Niche::Type(Type::Path(ty))) => { match ty.path.get_ident() { Some(ident) => ident == "DefaultNiche", None => false, } } (Niche::Type(_), Niche::Default) | (Niche::Default, Niche::Type(_)) => false, (Niche::Default, Niche::Default) => true, } } } rkyv_derive-0.8.9/src/deserialize.rs000064400000000000000000000272321046102023000156360ustar 00000000000000use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{ parse_quote, punctuated::Punctuated, spanned::Spanned, Data, DeriveInput, Error, Fields, Generics, Ident, Index, Path, WhereClause, }; use crate::{ archive::printing::Printing, attributes::{Attributes, FieldAttributes}, }; pub fn derive(input: DeriveInput) -> Result { let attributes = Attributes::parse(&input)?; derive_deserialize_impl(input, &attributes) } fn derive_deserialize_impl( mut input: DeriveInput, attributes: &Attributes, ) -> Result { let rkyv_path = attributes.crate_path(); let where_clause = input.generics.make_where_clause(); if let Some(ref bounds) = attributes.archive_bounds { for bound in bounds { where_clause.predicates.push(bound.clone()); } } if let Some(ref bounds) = attributes.deserialize_bounds { for bound in bounds { where_clause.predicates.push(bound.clone()); } } let mut impl_input_params = Punctuated::default(); impl_input_params .push(parse_quote! { __D: #rkyv_path::rancor::Fallible + ?Sized }); for param in input.generics.params.iter() { impl_input_params.push(param.clone()); } let impl_input_generics = Generics { lt_token: Some(Default::default()), params: impl_input_params, gt_token: Some(Default::default()), where_clause: input.generics.where_clause.clone(), }; let name = &input.ident; let (impl_generics, ..) = impl_input_generics.split_for_impl(); let (_, ty_generics, where_clause) = input.generics.split_for_impl(); let where_clause = where_clause.unwrap(); let mut deserialize_where = where_clause.clone(); if let Some(ref remote) = attributes.remote { let printing = Printing::new(&input, attributes)?; let body = generate_deserialize_body( &input, attributes, &mut deserialize_where, &rkyv_path, printing.archived_name, name, )?; Ok(quote! { #[automatically_derived] impl #impl_generics #rkyv_path::with::DeserializeWith< < #name #ty_generics as #rkyv_path::with::ArchiveWith<#remote> >::Archived, #remote, __D, > for #name #ty_generics #deserialize_where { fn deserialize_with( field: &< #name #ty_generics as #rkyv_path::with::ArchiveWith<#remote> >::Archived, deserializer: &mut __D, ) -> ::core::result::Result< #remote, <__D as #rkyv_path::rancor::Fallible>::Error, > { let __this = field; #body.map(<#remote as From<#name #ty_generics>>::from) } } }) } else { let body = generate_deserialize_body( &input, attributes, &mut deserialize_where, &rkyv_path, Ident::new("Self", Span::call_site()), name, )?; Ok(quote! { #[automatically_derived] impl #impl_generics #rkyv_path::Deserialize<#name #ty_generics, __D> for #rkyv_path::Archived<#name #ty_generics> #deserialize_where { fn deserialize( &self, deserializer: &mut __D, ) -> ::core::result::Result< #name #ty_generics, <__D as #rkyv_path::rancor::Fallible>::Error, > { let __this = self; #body } } }) } } fn generate_deserialize_body( input: &DeriveInput, attributes: &Attributes, deserialize_where: &mut WhereClause, rkyv_path: &Path, self_type: Ident, return_type: &Ident, ) -> Result { let this = Ident::new("__this", Span::call_site()); let body = match input.data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let deserialize_fields = fields .named .iter() .map(|field| { let field_attrs = FieldAttributes::parse(attributes, field)?; deserialize_where.predicates.extend( field_attrs.archive_bound(rkyv_path, field), ); deserialize_where.predicates.extend( field_attrs.deserialize_bound(rkyv_path, field), ); let name = &field.ident; let deserialize = field_attrs.deserialize(rkyv_path, field); Ok(quote! { #name: #deserialize(&#this.#name, deserializer)? }) }) .collect::, Error>>()?; quote! { #return_type { #(#deserialize_fields,)* } } } Fields::Unnamed(ref fields) => { let deserialize_fields = fields .unnamed .iter() .enumerate() .map(|(i, field)| { let field_attrs = FieldAttributes::parse(attributes, field)?; deserialize_where.predicates.extend( field_attrs.archive_bound(rkyv_path, field), ); deserialize_where.predicates.extend( field_attrs.deserialize_bound(rkyv_path, field), ); let index = Index::from(i); let deserialize = field_attrs.deserialize(rkyv_path, field); Ok(quote! { #deserialize(&#this.#index, deserializer)? }) }) .collect::, Error>>()?; quote! { #return_type(#(#deserialize_fields,)*) } } Fields::Unit => quote! { #return_type }, }, Data::Enum(ref data) => { let deserialize_variants = data .variants .iter() .map(|v| { let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let bindings = fields.named.iter().map(|field| { let name = &field.ident; quote! { #name } }); let fields = fields .named .iter() .map(|field| { let field_attrs = FieldAttributes::parse( attributes, field, )?; deserialize_where.predicates.extend( field_attrs .archive_bound(rkyv_path, field), ); deserialize_where.predicates.extend( field_attrs.deserialize_bound( rkyv_path, field, ), ); let name = &field.ident; let deserialize = field_attrs .deserialize(rkyv_path, field); Ok(quote! { #name: #deserialize( #name, deserializer, )? }) }) .collect::, Error>>()?; Ok(quote! { #self_type::#variant { #(#bindings,)*.. } => #return_type::#variant { #(#fields,)* } }) } Fields::Unnamed(ref fields) => { let bindings = fields.unnamed.iter().enumerate().map( |(i, f)| { Ident::new(&format!("_{}", i), f.span()) }, ); let fields = fields .unnamed .iter() .enumerate() .map(|(i, field)| { let field_attrs = FieldAttributes::parse( attributes, field, )?; deserialize_where.predicates.extend( field_attrs .archive_bound(rkyv_path, field), ); deserialize_where.predicates.extend( field_attrs.deserialize_bound( rkyv_path, field, ), ); let binding = Ident::new( &format!("_{}", i), field.span(), ); let deserialize = field_attrs .deserialize(rkyv_path, field); Ok(quote! { #deserialize( #binding, deserializer, )? }) }) .collect::, Error>>()?; Ok(quote! { #self_type::#variant( #(#bindings,)*.. ) => #return_type::#variant(#(#fields,)*) }) } Fields::Unit => Ok(quote! { #self_type::#variant => #return_type::#variant }), } }) .collect::, Error>>()?; quote! { match __this { #(#deserialize_variants,)* } } } Data::Union(_) => { return Err(Error::new_spanned( input, "Deserialize cannot be derived for unions", )) } }; Ok(quote! { ::core::result::Result::Ok(#body) }) } rkyv_derive-0.8.9/src/lib.rs000064400000000000000000000117451046102023000141060ustar 00000000000000//! Procedural macros for `rkyv`. #![deny( rustdoc::broken_intra_doc_links, missing_docs, rustdoc::missing_crate_level_docs )] mod archive; mod attributes; mod deserialize; mod portable; mod repr; mod serde; mod serialize; mod util; extern crate proc_macro; use syn::{parse_macro_input, DeriveInput}; /// Derives `Portable` for the labeled type. #[proc_macro_derive(Portable, attributes(rkyv))] pub fn derive_portable( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match portable::derive(derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } /// Derives `Archive` for the labeled type. /// /// # Attributes /// /// Additional arguments can be specified using `#[rkyv(..)]`, which accepts /// the following arguments: /// /// ## Types and fields /// /// - `attr(..)`: Passes along attributes to the generated archived type. /// /// ## Types only /// /// - `derive(..)`: Adds the derives passed as arguments to the generated type. /// This is equivalent to `#[rkyv(attr(derive(..)))]`. /// - `crate = ..`: Chooses an alternative crate path to import rkyv from. /// - `compare(..)`: Implements common comparison operators between the original /// and archived types. Supported comparisons are `PartialEq` and `PartialOrd` /// (i.e. `#[rkyv(compare(PartialEq, PartialOrd))]`). /// - `{archive, serialize, deserialize}_bounds(..)`: Adds additional bounds to /// trait implementations. This can be useful for recursive types, where /// bounds may need to be omitted to prevent recursive trait impls. /// - `bytecheck(..)`: Passed through to the underlying `CheckBytes` derive for /// the archived type. /// - `as = ..`: Uses the given archived type instead of generating a new one. /// This is useful for types which are `Portable` and/or generic over their /// parameters. /// - `archived = ..`: Changes the name of the generated archived type. By /// default, archived types are named "Archived" + `the name of the type`. /// - `resolver = ..`: Changes the name of the generated resolver type. By /// default, resolver types are named `the name of the type` + "Resolver". /// - `remote = ..`: Generate a remote derive for the annotated type instead of /// a regular derive. /// /// ## Fields only /// /// - `with = ..`: Applies the given wrapper type to the field. /// - `omit_bounds`: Omits trait bounds for the annotated field in the generated /// impl. /// /// # Recursive types /// /// This derive macro automatically adds a type bound `field: Archive` for each /// field type. This can cause an overflow while evaluating trait bounds if the /// structure eventually references its own type, as the implementation of /// `Archive` for a struct depends on each field type implementing it /// as well. Adding the attribute `#[rkyv(omit_bounds)]` to a field will /// suppress this trait bound and allow recursive structures. This may be too /// coarse for some types, in which case additional type bounds may be required /// with `{archive, serialize, deserialize}_bounds(..)`. /// /// # Wrappers /// /// Wrappers transparently customize archived types by providing different /// implementations of core traits. For example, references cannot be archived, /// but the `Inline` wrapper serializes a reference as if it were a field of the /// struct. Wrappers can be applied to fields using the `#[rkyv_with = ..]` /// attribute. #[proc_macro_derive(Archive, attributes(rkyv))] pub fn derive_archive( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match archive::derive(&mut derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } /// Derives `Serialize` for the labeled type. /// /// This macro also supports the `#[rkyv]` attribute. See [`Archive`] for more /// information. #[proc_macro_derive(Serialize, attributes(rkyv))] pub fn derive_serialize( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match serialize::derive(derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } /// Derives `Deserialize` for the labeled type. /// /// This macro also supports the `#[rkyv]` attribute. See [`Archive`] for more /// information. #[proc_macro_derive(Deserialize, attributes(rkyv))] pub fn derive_deserialize( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut derive_input = parse_macro_input!(input as DeriveInput); serde::receiver::replace_receiver(&mut derive_input); match deserialize::derive(derive_input) { Ok(result) => result.into(), Err(e) => e.to_compile_error().into(), } } rkyv_derive-0.8.9/src/portable.rs000064400000000000000000000030531046102023000151410ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::{parse_quote, Data, DeriveInput, Error}; use crate::{attributes::Attributes, repr::Repr, util::iter_fields}; pub fn derive(mut input: DeriveInput) -> Result { let repr = Repr::from_attrs(&input.attrs)?; match &input.data { Data::Struct(_) | Data::Union(_) => { if !repr.is_struct_well_defined() { return Err(Error::new_spanned( &input.ident, "structs and unions must be `repr(C)` or \ `repr(transparent)` to implement `Portable`", )); } } Data::Enum(_) => { if !repr.is_enum_well_defined() { return Err(Error::new_spanned( &input.ident, "enums must be `repr(u8/i8)` or `repr(C, u8/i8)` to \ implement `Portable`", )); } } } let attributes = Attributes::parse(&input)?; let rkyv_path = attributes.crate_path(); let where_clause = input.generics.make_where_clause(); for field in iter_fields(&input.data) { let ty = &field.ty; where_clause.predicates.push(parse_quote! { #ty: #rkyv_path::Portable }); } let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); Ok(quote! { unsafe impl #impl_generics #rkyv_path::Portable for #name #ty_generics #where_clause {} }) } rkyv_derive-0.8.9/src/repr.rs000064400000000000000000000076571046102023000143170ustar 00000000000000use syn::{parenthesized, token, Attribute, Error, LitInt}; #[derive(Clone, Copy)] pub enum Primitive { I8, I16, I32, I64, Isize, U8, U16, U32, U64, Usize, } impl Primitive { const ALL: [Self; 10] = [ Self::I8, Self::I16, Self::I32, Self::I64, Self::Isize, Self::U8, Self::U16, Self::U32, Self::U64, Self::Usize, ]; pub const fn as_str(&self) -> &'static str { match self { Self::I8 => "i8", Self::I16 => "i16", Self::I32 => "i32", Self::I64 => "i64", Self::Isize => "isize", Self::U8 => "u8", Self::U16 => "u16", Self::U32 => "u32", Self::U64 => "u64", Self::Usize => "usize", } } pub const fn is_well_defined(&self) -> bool { matches!(self, Self::I8 | Self::U8) } } pub enum Modifier { Packed(#[allow(dead_code)] usize), Align(#[allow(dead_code)] usize), } pub enum Repr { Transparent, Primitive(Primitive), C { primitive: Option, #[allow(dead_code)] modifier: Option, }, Rust { #[allow(dead_code)] modifier: Option, }, } impl Repr { pub fn from_attrs(attrs: &[Attribute]) -> Result { let mut c = false; let mut transparent = false; let mut primitive = None; let mut modifier = None; for attr in attrs.iter().filter(|a| a.meta.path().is_ident("repr")) { attr.parse_nested_meta(|meta| { if meta.path.is_ident("C") { c = true; Ok(()) } else if meta.path.is_ident("transparent") { transparent = true; Ok(()) } else if let Some(&p) = Primitive::ALL .iter() .find(|p| meta.path.is_ident(p.as_str())) { primitive = Some(p); Ok(()) } else if meta.path.is_ident("align") { let content; parenthesized!(content in meta.input); let lit = content.parse::()?; let n = lit.base10_parse()?; modifier = Some(Modifier::Align(n)); Ok(()) } else if meta.path.is_ident("packed") { if meta.input.peek(token::Paren) { let content; parenthesized!(content in meta.input); let lit = content.parse::()?; let n = lit.base10_parse()?; modifier = Some(Modifier::Packed(n)); } else { modifier = Some(Modifier::Packed(1)); } Ok(()) } else { Err(Error::new_spanned( meta.path, "unrecognized repr argument", )) } })?; } if c { Ok(Repr::C { primitive, modifier, }) } else if transparent { Ok(Repr::Transparent) } else if let Some(primitive) = primitive { Ok(Repr::Primitive(primitive)) } else { Ok(Repr::Rust { modifier }) } } pub fn is_struct_well_defined(&self) -> bool { !matches!(self, Self::Rust { .. }) } pub fn is_enum_well_defined(&self) -> bool { match self { Self::Rust { .. } | Self::C { primitive: None, .. } => false, Self::C { primitive: Some(p), .. } | Self::Primitive(p) => p.is_well_defined(), _ => false, } } } rkyv_derive-0.8.9/src/serde/LICENSE000064400000000000000000000017761046102023000151040ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.rkyv_derive-0.8.9/src/serde/mod.rs000064400000000000000000000000421046102023000152050ustar 00000000000000pub mod receiver; pub mod respan; rkyv_derive-0.8.9/src/serde/receiver.rs000064400000000000000000000233561046102023000162470ustar 00000000000000// Code in this file is taken whole or in part from serde: // https://github.com/serde-rs/serde // The original license for this code is included in LICENSE use std::mem; use proc_macro2::Span; use quote::ToTokens; use syn::{ parse_quote, punctuated::Punctuated, Data, DeriveInput, Expr, ExprPath, GenericArgument, GenericParam, Generics, Macro, Path, PathArguments, QSelf, ReturnType, Token, Type, TypeParamBound, TypePath, WherePredicate, }; use super::respan::respan; pub fn replace_receiver(input: &mut DeriveInput) { let self_ty = { let ident = &input.ident; let ty_generics = input.generics.split_for_impl().1; parse_quote!(#ident #ty_generics) }; let mut visitor = ReplaceReceiver(&self_ty); visitor.visit_generics_mut(&mut input.generics); visitor.visit_data_mut(&mut input.data); } struct ReplaceReceiver<'a>(&'a TypePath); impl ReplaceReceiver<'_> { fn self_ty(&self, span: Span) -> TypePath { let tokens = self.0.to_token_stream(); let respanned = respan(tokens, span); syn::parse2(respanned).unwrap() } fn self_to_qself(&self, qself: &mut Option, path: &mut Path) { if path.leading_colon.is_some() || path.segments[0].ident != "Self" { return; } if path.segments.len() == 1 { self.self_to_expr_path(path); return; } let span = path.segments[0].ident.span(); *qself = Some(QSelf { lt_token: Token![<](span), ty: Box::new(Type::Path(self.self_ty(span))), position: 0, as_token: None, gt_token: Token![>](span), }); path.leading_colon = Some(**path.segments.pairs().next().unwrap().punct().unwrap()); let segments = mem::replace(&mut path.segments, Punctuated::new()); path.segments = segments.into_pairs().skip(1).collect(); } fn self_to_expr_path(&self, path: &mut Path) { let self_ty = self.self_ty(path.segments[0].ident.span()); let variant = mem::replace(path, self_ty.path); for segment in &mut path.segments { if let PathArguments::AngleBracketed(bracketed) = &mut segment.arguments { if bracketed.colon2_token.is_none() && !bracketed.args.is_empty() { bracketed.colon2_token = Some(::default()); } } } if variant.segments.len() > 1 { path.segments.push_punct(::default()); path.segments.extend(variant.segments.into_pairs().skip(1)); } } } impl ReplaceReceiver<'_> { // `Self` -> `Receiver` fn visit_type_mut(&mut self, ty: &mut Type) { let span = if let Type::Path(node) = ty { if node.qself.is_none() && node.path.is_ident("Self") { node.path.segments[0].ident.span() } else { self.visit_type_path_mut(node); return; } } else { self.visit_type_mut_impl(ty); return; }; *ty = self.self_ty(span).into(); } // `Self::Assoc` -> `::Assoc` fn visit_type_path_mut(&mut self, ty: &mut TypePath) { if ty.qself.is_none() { self.self_to_qself(&mut ty.qself, &mut ty.path); } self.visit_type_path_mut_impl(ty); } // `Self::method` -> `::method` fn visit_expr_path_mut(&mut self, expr: &mut ExprPath) { if expr.qself.is_none() { self.self_to_qself(&mut expr.qself, &mut expr.path); } self.visit_expr_path_mut_impl(expr); } // Everything below is simply traversing the syntax tree. fn visit_type_mut_impl(&mut self, ty: &mut Type) { match ty { Type::Array(ty) => { self.visit_type_mut(&mut ty.elem); self.visit_expr_mut(&mut ty.len); } Type::BareFn(ty) => { for arg in &mut ty.inputs { self.visit_type_mut(&mut arg.ty); } self.visit_return_type_mut(&mut ty.output); } Type::Group(ty) => self.visit_type_mut(&mut ty.elem), Type::ImplTrait(ty) => { for bound in &mut ty.bounds { self.visit_type_param_bound_mut(bound); } } Type::Macro(ty) => self.visit_macro_mut(&mut ty.mac), Type::Paren(ty) => self.visit_type_mut(&mut ty.elem), Type::Path(ty) => { if let Some(qself) = &mut ty.qself { self.visit_type_mut(&mut qself.ty); } self.visit_path_mut(&mut ty.path); } Type::Ptr(ty) => self.visit_type_mut(&mut ty.elem), Type::Reference(ty) => self.visit_type_mut(&mut ty.elem), Type::Slice(ty) => self.visit_type_mut(&mut ty.elem), Type::TraitObject(ty) => { for bound in &mut ty.bounds { self.visit_type_param_bound_mut(bound); } } Type::Tuple(ty) => { for elem in &mut ty.elems { self.visit_type_mut(elem); } } Type::Infer(_) | Type::Never(_) | Type::Verbatim(_) => {} _ => {} } } fn visit_type_path_mut_impl(&mut self, ty: &mut TypePath) { if let Some(qself) = &mut ty.qself { self.visit_type_mut(&mut qself.ty); } self.visit_path_mut(&mut ty.path); } fn visit_expr_path_mut_impl(&mut self, expr: &mut ExprPath) { if let Some(qself) = &mut expr.qself { self.visit_type_mut(&mut qself.ty); } self.visit_path_mut(&mut expr.path); } fn visit_path_mut(&mut self, path: &mut Path) { for segment in &mut path.segments { self.visit_path_arguments_mut(&mut segment.arguments); } } fn visit_path_arguments_mut(&mut self, arguments: &mut PathArguments) { match arguments { PathArguments::None => {} PathArguments::AngleBracketed(arguments) => { for arg in &mut arguments.args { match arg { GenericArgument::Type(arg) => self.visit_type_mut(arg), GenericArgument::AssocType(arg) => { self.visit_type_mut(&mut arg.ty) } _ => {} } } } PathArguments::Parenthesized(arguments) => { for argument in &mut arguments.inputs { self.visit_type_mut(argument); } self.visit_return_type_mut(&mut arguments.output); } } } fn visit_return_type_mut(&mut self, return_type: &mut ReturnType) { match return_type { ReturnType::Default => {} ReturnType::Type(_, output) => self.visit_type_mut(output), } } fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) { if let TypeParamBound::Trait(bound) = bound { self.visit_path_mut(&mut bound.path) } } fn visit_generics_mut(&mut self, generics: &mut Generics) { for param in &mut generics.params { match param { GenericParam::Type(param) => { for bound in &mut param.bounds { self.visit_type_param_bound_mut(bound); } } GenericParam::Lifetime(_) | GenericParam::Const(_) => {} } } if let Some(where_clause) = &mut generics.where_clause { for predicate in &mut where_clause.predicates { if let WherePredicate::Type(predicate) = predicate { self.visit_type_mut(&mut predicate.bounded_ty); for bound in &mut predicate.bounds { self.visit_type_param_bound_mut(bound); } } } } } fn visit_data_mut(&mut self, data: &mut Data) { match data { Data::Struct(data) => { for field in &mut data.fields { self.visit_type_mut(&mut field.ty); } } Data::Enum(data) => { for variant in &mut data.variants { for field in &mut variant.fields { self.visit_type_mut(&mut field.ty); } } } Data::Union(_) => {} } } fn visit_expr_mut(&mut self, expr: &mut Expr) { match expr { Expr::Binary(expr) => { self.visit_expr_mut(&mut expr.left); self.visit_expr_mut(&mut expr.right); } Expr::Call(expr) => { self.visit_expr_mut(&mut expr.func); for arg in &mut expr.args { self.visit_expr_mut(arg); } } Expr::Cast(expr) => { self.visit_expr_mut(&mut expr.expr); self.visit_type_mut(&mut expr.ty); } Expr::Field(expr) => self.visit_expr_mut(&mut expr.base), Expr::Index(expr) => { self.visit_expr_mut(&mut expr.expr); self.visit_expr_mut(&mut expr.index); } Expr::Paren(expr) => self.visit_expr_mut(&mut expr.expr), Expr::Path(expr) => self.visit_expr_path_mut(expr), Expr::Unary(expr) => self.visit_expr_mut(&mut expr.expr), _ => {} } } fn visit_macro_mut(&mut self, _mac: &mut Macro) {} } rkyv_derive-0.8.9/src/serde/respan.rs000064400000000000000000000011411046102023000157170ustar 00000000000000// Code in this file is taken whole or in part from serde: // https://github.com/serde-rs/serde // The original license for this code is included in LICENSE use proc_macro2::{Group, Span, TokenStream, TokenTree}; pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream { stream .into_iter() .map(|token| respan_token(token, span)) .collect() } fn respan_token(mut token: TokenTree, span: Span) -> TokenTree { if let TokenTree::Group(g) = &mut token { *g = Group::new(g.delimiter(), respan(g.stream(), span)); } token.set_span(span); token } rkyv_derive-0.8.9/src/serialize.rs000064400000000000000000000260141046102023000153220ustar 00000000000000use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{ parse_quote, punctuated::Punctuated, spanned::Spanned, Data, DeriveInput, Error, Fields, Generics, Ident, Index, Path, WhereClause, }; use crate::{ attributes::{Attributes, FieldAttributes, VariantAttributes}, util::{strip_generics_from_path, strip_raw}, }; pub fn derive(input: DeriveInput) -> Result { let attributes = Attributes::parse(&input)?; derive_serialize_impl(input, &attributes) } fn derive_serialize_impl( mut input: DeriveInput, attributes: &Attributes, ) -> Result { let rkyv_path = attributes.crate_path(); let where_clause = input.generics.make_where_clause(); if let Some(ref bounds) = attributes.archive_bounds { for bound in bounds { where_clause.predicates.push(bound.clone()); } } if let Some(ref bounds) = attributes.serialize_bounds { for bound in bounds { where_clause.predicates.push(bound.clone()); } } let mut impl_input_params = Punctuated::default(); impl_input_params .push(parse_quote! { __S: #rkyv_path::rancor::Fallible + ?Sized }); for param in input.generics.params.iter() { impl_input_params.push(param.clone()); } let impl_input_generics = Generics { lt_token: Some(Default::default()), params: impl_input_params, gt_token: Some(Default::default()), where_clause: input.generics.where_clause.clone(), }; let name = &input.ident; let (impl_generics, ..) = impl_input_generics.split_for_impl(); let (_, ty_generics, where_clause) = input.generics.split_for_impl(); let where_clause = where_clause.unwrap(); let resolver = attributes.resolver.as_ref().map_or_else( || Ident::new(&format!("{}Resolver", strip_raw(name)), name.span()), |value| value.clone(), ); let mut serialize_where = where_clause.clone(); if let Some(ref remote) = attributes.remote { let body = generate_serialize_body( &input, attributes, &mut serialize_where, &rkyv_path, resolver, strip_generics_from_path(remote.clone()), )?; Ok(quote! { #[automatically_derived] impl #impl_generics #rkyv_path::with::SerializeWith<#remote, __S> for #name #ty_generics #serialize_where { fn serialize_with( field: &#remote, serializer: &mut __S, ) -> ::core::result::Result< >::Resolver, <__S as #rkyv_path::rancor::Fallible>::Error, > { let __this = field; #body } } }) } else { let body = generate_serialize_body( &input, attributes, &mut serialize_where, &rkyv_path, resolver, parse_quote!(#name), )?; Ok(quote! { #[automatically_derived] impl #impl_generics #rkyv_path::Serialize<__S> for #name #ty_generics #serialize_where { fn serialize( &self, serializer: &mut __S, ) -> ::core::result::Result< ::Resolver, <__S as #rkyv_path::rancor::Fallible>::Error, > { let __this = self; #body } } }) } } fn generate_serialize_body( input: &DeriveInput, attributes: &Attributes, serialize_where: &mut WhereClause, rkyv_path: &Path, resolver: Ident, name: Path, ) -> Result { let this = Ident::new("__this", Span::call_site()); let body = match input.data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let resolver_values = fields .named .iter() .map(|field| { let field_attrs = FieldAttributes::parse(attributes, field)?; serialize_where.predicates.extend( field_attrs.serialize_bound(rkyv_path, field), ); let name = &field.ident; let access_field = field_attrs.access_field(&this, name); let serialize = field_attrs.serialize(rkyv_path, field); Ok(quote! { #name: #serialize(#access_field, serializer)? }) }) .collect::, Error>>()?; quote! { #resolver { #(#resolver_values,)* } } } Fields::Unnamed(ref fields) => { let resolver_values = fields .unnamed .iter() .enumerate() .map(|(i, field)| { let field_attrs = FieldAttributes::parse(attributes, field)?; serialize_where.predicates.extend( field_attrs.serialize_bound(rkyv_path, field), ); let index = Index::from(i); let access_field = field_attrs.access_field(&this, &index); let serialize = field_attrs.serialize(rkyv_path, field); Ok(quote! { #serialize(#access_field, serializer)? }) }) .collect::, Error>>()?; quote! { #resolver(#(#resolver_values,)*) } } Fields::Unit => quote! { #resolver }, }, Data::Enum(ref data) => { let mut other: Option = None; let serialize_arms = data .variants .iter() .map(|v| { if let Some(ref other) = other { return Err(Error::new_spanned( other, "Only the very last variant may be denoted with \ `#[rkyv(other)]`", )); } let variant_attrs = VariantAttributes::parse(attributes, v)?; let variant = &v.ident; match v.fields { Fields::Named(ref fields) => { let bindings = fields.named.iter().map(|f| &f.ident); let fields = fields .named .iter() .map(|field| { let field_attrs = FieldAttributes::parse( attributes, field, )?; serialize_where.predicates.extend( field_attrs .serialize_bound(rkyv_path, field), ); let name = &field.ident; let serialize = field_attrs.serialize(rkyv_path, field); Ok(quote! { #name: #serialize(#name, serializer)? }) }) .collect::, Error>>()?; Ok(quote! { #name::#variant { #(#bindings,)*.. } => #resolver::#variant { #(#fields,)* } }) } Fields::Unnamed(ref fields) => { let bindings = fields.unnamed.iter().enumerate().map( |(i, f)| { Ident::new(&format!("_{}", i), f.span()) }, ); let fields = fields .unnamed .iter() .enumerate() .map(|(i, field)| { let field_attrs = FieldAttributes::parse( attributes, field, )?; serialize_where.predicates.extend( field_attrs .serialize_bound(rkyv_path, field), ); let binding = Ident::new( &format!("_{}", i), field.span(), ); let serialize = field_attrs.serialize(rkyv_path, field); Ok(quote! { #serialize(#binding, serializer)? }) }) .collect::, Error>>()?; Ok(quote! { #name::#variant( #(#bindings,)*.. ) => #resolver::#variant(#(#fields,)*) }) } Fields::Unit => { if variant_attrs.other.is_some() { other = variant_attrs.other; Ok(quote! { _ => #resolver::#variant }) } else { Ok(quote! { #name::#variant => #resolver::#variant }) } } } }) .collect::, Error>>()?; quote! { match __this { #(#serialize_arms,)* } } } Data::Union(_) => { return Err(Error::new_spanned( input, "Serialize cannot be derived for unions", )) } }; Ok(quote! { ::core::result::Result::Ok(#body) }) } rkyv_derive-0.8.9/src/util.rs000064400000000000000000000027121046102023000143070ustar 00000000000000use core::iter::FlatMap; use proc_macro2::Ident; use syn::{ punctuated::Iter, Data, DataEnum, DataStruct, DataUnion, Field, Path, PathArguments, Variant, }; pub fn strip_raw(ident: &Ident) -> String { let as_string = ident.to_string(); as_string .strip_prefix("r#") .map(ToString::to_string) .unwrap_or(as_string) } type VariantFieldsFn = fn(&Variant) -> Iter<'_, Field>; fn variant_fields(variant: &Variant) -> Iter<'_, Field> { variant.fields.iter() } pub enum FieldsIter<'a> { Struct(Iter<'a, Field>), Enum(FlatMap, Iter<'a, Field>, VariantFieldsFn>), } impl<'a> Iterator for FieldsIter<'a> { type Item = &'a Field; fn next(&mut self) -> Option { match self { Self::Struct(iter) => iter.next(), Self::Enum(iter) => iter.next(), } } } pub fn iter_fields(data: &Data) -> FieldsIter<'_> { match data { Data::Struct(DataStruct { fields, .. }) => { FieldsIter::Struct(fields.iter()) } Data::Enum(DataEnum { variants, .. }) => { FieldsIter::Enum(variants.iter().flat_map(variant_fields)) } Data::Union(DataUnion { fields, .. }) => { FieldsIter::Struct(fields.named.iter()) } } } pub fn strip_generics_from_path(mut path: Path) -> Path { for segment in path.segments.iter_mut() { segment.arguments = PathArguments::None; } path }