diesel_derives-2.0.2/Cargo.toml0000644000000027240000000000100120260ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] rust-version = "1.56.0" name = "diesel_derives" version = "2.0.2" include = [ "src/**/*", "LICENSE-*", ] autotests = false description = "You should not use this crate directly, it is internal to Diesel." homepage = "https://diesel.rs" documentation = "https://diesel.rs/guides/" license = "MIT OR Apache-2.0" repository = "https://github.com/diesel-rs/diesel/tree/master/diesel_derives" [lib] proc-macro = true [dependencies.proc-macro-error] version = "1.0.4" [dependencies.proc-macro2] version = "1.0.27" [dependencies.quote] version = "1.0.9" [dependencies.syn] version = "1.0.73" features = [ "derive", "fold", ] [dev-dependencies.cfg-if] version = "1" [dev-dependencies.diesel] version = "~2.0.0" [dev-dependencies.dotenvy] version = "0.15" [features] 128-column-tables = ["64-column-tables"] 32-column-tables = [] 64-column-tables = ["32-column-tables"] default = [] mysql = [] nightly = ["proc-macro2/nightly"] postgres = [] sqlite = [] with-deprecated = [] without-deprecated = [] diesel_derives-2.0.2/Cargo.toml.orig000064400000000000000000000016641046102023000155110ustar 00000000000000[package] name = "diesel_derives" version = "2.0.2" license = "MIT OR Apache-2.0" description = "You should not use this crate directly, it is internal to Diesel." documentation = "https://diesel.rs/guides/" homepage = "https://diesel.rs" repository = "https://github.com/diesel-rs/diesel/tree/master/diesel_derives" autotests = false include = ["src/**/*", "LICENSE-*"] rust-version = "1.56.0" [dependencies] syn = { version = "1.0.73", features = ["derive", "fold"] } quote = "1.0.9" proc-macro2 = "1.0.27" proc-macro-error = "1.0.4" [dev-dependencies] cfg-if = "1" dotenvy = "0.15" [dev-dependencies.diesel] version = "~2.0.0" path = "../diesel" [lib] proc-macro = true #[[test]] #name = "tests" [features] default = [] nightly = ["proc-macro2/nightly"] postgres = [] sqlite = [] mysql = [] 32-column-tables = [] 64-column-tables = ["32-column-tables"] 128-column-tables = ["64-column-tables"] without-deprecated = [] with-deprecated = [] diesel_derives-2.0.2/LICENSE-APACHE000064400000000000000000000250461046102023000145460ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Copyright 2015-2021 Sean Griffin, 2018-2021 Diesel Core Team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diesel_derives-2.0.2/LICENSE-MIT000064400000000000000000000021121046102023000142430ustar 00000000000000The MIT License (MIT) 2015-2021 Sean Griffin, 2018-2021 Diesel Core Team Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diesel_derives-2.0.2/src/as_changeset.rs000064400000000000000000000147721046102023000164070ustar 00000000000000use proc_macro2::TokenStream; use syn::{DeriveInput, Expr, Path, Type}; use attrs::AttributeSpanWrapper; use field::Field; use model::Model; use util::{inner_of_option_ty, is_option_ty, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); let struct_name = &item.ident; let table_name = model.table_name(); let fields_for_update = model .fields() .iter() .filter(|f| { !model .primary_key_names .iter() .any(|p| f.column_name() == *p) }) .collect::>(); if fields_for_update.is_empty() { abort_call_site!( "Deriving `AsChangeset` on a structure that only contains primary keys isn't supported."; help = "If you want to change the primary key of a row, you should do so with `.set(table::id.eq(new_id))`."; note = "`#[derive(AsChangeset)]` never changes the primary key of a row."; ) } let treat_none_as_null = model.treat_none_as_null(); let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let mut generate_borrowed_changeset = true; let mut direct_field_ty = Vec::with_capacity(fields_for_update.len()); let mut direct_field_assign = Vec::with_capacity(fields_for_update.len()); let mut ref_field_ty = Vec::with_capacity(fields_for_update.len()); let mut ref_field_assign = Vec::with_capacity(fields_for_update.len()); for field in fields_for_update { match field.serialize_as.as_ref() { Some(AttributeSpanWrapper { item: ty, .. }) => { direct_field_ty.push(field_changeset_ty_serialize_as( field, table_name, ty, treat_none_as_null, )); direct_field_assign.push(field_changeset_expr_serialize_as( field, table_name, ty, treat_none_as_null, )); generate_borrowed_changeset = false; // as soon as we hit one field with #[diesel(serialize_as)] there is no point in generating the impl of AsChangeset for borrowed structs } None => { direct_field_ty.push(field_changeset_ty( field, table_name, None, treat_none_as_null, )); direct_field_assign.push(field_changeset_expr( field, table_name, None, treat_none_as_null, )); ref_field_ty.push(field_changeset_ty( field, table_name, Some(quote!(&'update)), treat_none_as_null, )); ref_field_assign.push(field_changeset_expr( field, table_name, Some(quote!(&)), treat_none_as_null, )); } } } let changeset_owned = quote! { impl #impl_generics AsChangeset for #struct_name #ty_generics #where_clause { type Target = #table_name::table; type Changeset = <(#(#direct_field_ty,)*) as AsChangeset>::Changeset; fn as_changeset(self) -> Self::Changeset { (#(#direct_field_assign,)*).as_changeset() } } }; let changeset_borrowed = if generate_borrowed_changeset { let mut impl_generics = item.generics.clone(); impl_generics.params.push(parse_quote!('update)); let (impl_generics, _, _) = impl_generics.split_for_impl(); quote! { impl #impl_generics AsChangeset for &'update #struct_name #ty_generics #where_clause { type Target = #table_name::table; type Changeset = <(#(#ref_field_ty,)*) as AsChangeset>::Changeset; fn as_changeset(self) -> Self::Changeset { (#(#ref_field_assign,)*).as_changeset() } } } } else { quote! {} }; wrap_in_dummy_mod(quote!( use diesel::query_builder::AsChangeset; use diesel::prelude::*; #changeset_owned #changeset_borrowed )) } fn field_changeset_ty( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_null: bool, ) -> TokenStream { let column_name = field.column_name(); if !treat_none_as_null && is_option_ty(&field.ty) { let field_ty = inner_of_option_ty(&field.ty); quote!(std::option::Option>) } else { let field_ty = &field.ty; quote!(diesel::dsl::Eq<#table_name::#column_name, #lifetime #field_ty>) } } fn field_changeset_expr( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_null: bool, ) -> TokenStream { let field_name = &field.name; let column_name = field.column_name(); if !treat_none_as_null && is_option_ty(&field.ty) { if lifetime.is_some() { quote!(self.#field_name.as_ref().map(|x| #table_name::#column_name.eq(x))) } else { quote!(self.#field_name.map(|x| #table_name::#column_name.eq(x))) } } else { quote!(#table_name::#column_name.eq(#lifetime self.#field_name)) } } fn field_changeset_ty_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_null: bool, ) -> TokenStream { let column_name = field.column_name(); if !treat_none_as_null && is_option_ty(&field.ty) { let inner_ty = inner_of_option_ty(ty); quote!(std::option::Option>) } else { quote!(diesel::dsl::Eq<#table_name::#column_name, #ty>) } } fn field_changeset_expr_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_null: bool, ) -> TokenStream { let field_name = &field.name; let column_name = field.column_name(); let column: Expr = parse_quote!(#table_name::#column_name); if !treat_none_as_null && is_option_ty(&field.ty) { quote!(self.#field_name.map(|x| #column.eq(::std::convert::Into::<#ty>::into(x)))) } else { quote!(#column.eq(::std::convert::Into::<#ty>::into(self.#field_name))) } } diesel_derives-2.0.2/src/as_expression.rs000064400000000000000000000074431046102023000166420ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use model::Model; use util::{ty_for_foreign_derive, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, true); if model.sql_types.is_empty() { abort_call_site!( "At least one `sql_type` is needed for deriving `AsExpression` on a structure." ); } let struct_ty = ty_for_foreign_derive(&item, &model); let (impl_generics, ..) = item.generics.split_for_impl(); let lifetimes = item.generics.lifetimes().collect::>(); let ty_params = item.generics.type_params().collect::>(); let const_params = item.generics.const_params().collect::>(); let tokens = model.sql_types.iter().map(|sql_type| { let lifetimes = &lifetimes; let ty_params = &ty_params; let const_params = &const_params; let tokens = quote!( impl<'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression<#sql_type> for &'expr #struct_ty { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression> for &'expr #struct_ty { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<'expr2, 'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression<#sql_type> for &'expr2 &'expr #struct_ty { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<'expr2, 'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression> for &'expr2 &'expr #struct_ty { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl<#(#lifetimes,)* #(#ty_params,)* __DB, #(#const_params,)*> diesel::serialize::ToSql, __DB> for #struct_ty where __DB: diesel::backend::Backend, Self: ToSql<#sql_type, __DB>, { fn to_sql<'__b>(&'__b self, out: &mut Output<'__b, '_, __DB>) -> serialize::Result { ToSql::<#sql_type, __DB>::to_sql(self, out) } } ); if model.not_sized { tokens } else { quote!( #tokens impl#impl_generics AsExpression<#sql_type> for #struct_ty { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl#impl_generics AsExpression> for #struct_ty { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } ) } }); wrap_in_dummy_mod(quote! { use diesel::expression::AsExpression; use diesel::internal::derives::as_expression::Bound; use diesel::sql_types::Nullable; use diesel::serialize::{self, ToSql, Output}; #(#tokens)* }) } diesel_derives-2.0.2/src/associations.rs000064400000000000000000000101611046102023000164460ustar 00000000000000use proc_macro2::{Span, TokenStream}; use syn::fold::Fold; use syn::{DeriveInput, Ident, Lifetime}; use model::Model; use parsers::BelongsTo; use util::{camel_to_snake, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); if model.belongs_to.is_empty() { abort_call_site!( "At least one `belongs_to` is needed for deriving `Associations` on a structure." ); } let tokens = model .belongs_to .iter() .map(|assoc| derive_belongs_to(&item, &model, assoc)); wrap_in_dummy_mod(quote!(#(#tokens)*)) } fn derive_belongs_to(item: &DeriveInput, model: &Model, assoc: &BelongsTo) -> TokenStream { let (_, ty_generics, _) = item.generics.split_for_impl(); let struct_name = &item.ident; let table_name = model.table_name(); let foreign_key = &foreign_key(assoc); let foreign_key_field = model.find_column(foreign_key); let foreign_key_name = &foreign_key_field.name; let foreign_key_ty = &foreign_key_field.ty; let mut generics = item.generics.clone(); let parent_struct = ReplacePathLifetimes::new(|i, span| { let letter = char::from(b'b' + i as u8); let lifetime = Lifetime::new(&format!("'__{}", letter), span); generics.params.push(parse_quote!(#lifetime)); lifetime }) .fold_type_path(assoc.parent.clone()); generics.params.push(parse_quote!(__FK)); { let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); where_clause .predicates .push(parse_quote!(__FK: std::hash::Hash + std::cmp::Eq)); where_clause.predicates.push( parse_quote!(for<'__a> &'__a #foreign_key_ty: std::convert::Into<::std::option::Option<&'__a __FK>>), ); where_clause.predicates.push( parse_quote!(for<'__a> &'__a #parent_struct: diesel::associations::Identifiable), ); } let foreign_key_expr = quote!(std::convert::Into::into(&self.#foreign_key_name)); let foreign_key_ty = quote!(__FK); let (impl_generics, _, where_clause) = generics.split_for_impl(); quote! { impl #impl_generics diesel::associations::BelongsTo<#parent_struct> for #struct_name #ty_generics #where_clause { type ForeignKey = #foreign_key_ty; type ForeignKeyColumn = #table_name::#foreign_key; fn foreign_key(&self) -> std::option::Option<&Self::ForeignKey> { #foreign_key_expr } fn foreign_key_column() -> Self::ForeignKeyColumn { #table_name::#foreign_key } } impl #impl_generics diesel::associations::BelongsTo<&'_ #parent_struct> for #struct_name #ty_generics #where_clause { type ForeignKey = #foreign_key_ty; type ForeignKeyColumn = #table_name::#foreign_key; fn foreign_key(&self) -> std::option::Option<&Self::ForeignKey> { #foreign_key_expr } fn foreign_key_column() -> Self::ForeignKeyColumn { #table_name::#foreign_key } } } } fn foreign_key(assoc: &BelongsTo) -> Ident { let ident = &assoc .parent .path .segments .last() .expect("paths always have at least one segment") .ident; assoc .foreign_key .clone() .unwrap_or_else(|| infer_foreign_key(ident)) } fn infer_foreign_key(name: &Ident) -> Ident { let snake_case = camel_to_snake(&name.to_string()); Ident::new(&format!("{}_id", snake_case), name.span()) } struct ReplacePathLifetimes { count: usize, f: F, } impl ReplacePathLifetimes { fn new(f: F) -> Self { Self { count: 0, f } } } impl Fold for ReplacePathLifetimes where F: FnMut(usize, Span) -> Lifetime, { fn fold_lifetime(&mut self, mut lt: Lifetime) -> Lifetime { if lt.ident == "_" { lt = (self.f)(self.count, lt.span()); self.count += 1; } lt } } diesel_derives-2.0.2/src/attrs.rs000064400000000000000000000227531046102023000151160ustar 00000000000000use std::fmt::{Display, Formatter}; use proc_macro2::{Span, TokenStream}; use proc_macro_error::ResultExt; use quote::ToTokens; use syn::parse::discouraged::Speculative; use syn::parse::{Parse, ParseStream, Parser, Result}; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::token::Comma; use syn::{parenthesized, Attribute, Ident, LitBool, LitStr, Path, Type, TypePath}; use deprecated::ParseDeprecated; use parsers::{BelongsTo, MysqlType, PostgresType, SqliteType}; use util::{ parse_eq, parse_paren, unknown_attribute, BELONGS_TO_NOTE, COLUMN_NAME_NOTE, DESERIALIZE_AS_NOTE, MYSQL_TYPE_NOTE, POSTGRES_TYPE_NOTE, SELECT_EXPRESSION_NOTE, SELECT_EXPRESSION_TYPE_NOTE, SERIALIZE_AS_NOTE, SQLITE_TYPE_NOTE, SQL_TYPE_NOTE, TABLE_NAME_NOTE, TREAT_NONE_AS_DEFAULT_VALUE_NOTE, TREAT_NONE_AS_NULL_NOTE, }; use crate::field::SelectExpr; pub struct AttributeSpanWrapper { pub item: T, pub attribute_span: Span, pub ident_span: Span, } pub enum FieldAttr { Embed(Ident), ColumnName(Ident, SqlIdentifier), SqlType(Ident, TypePath), SerializeAs(Ident, TypePath), DeserializeAs(Ident, TypePath), SelectExpression(Ident, SelectExpr), SelectExpressionType(Ident, Type), } #[derive(Clone)] pub struct SqlIdentifier { field_name: String, span: Span, } impl SqlIdentifier { pub fn span(&self) -> Span { self.span } } impl ToTokens for SqlIdentifier { fn to_tokens(&self, tokens: &mut TokenStream) { Ident::new(&self.field_name, self.span).to_tokens(tokens) } } impl Display for SqlIdentifier { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str(&self.field_name) } } impl PartialEq for SqlIdentifier { fn eq(&self, other: &Ident) -> bool { *other == self.field_name } } impl From<&'_ Ident> for SqlIdentifier { fn from(ident: &'_ Ident) -> Self { Self { span: ident.span(), field_name: ident.to_string(), } } } impl Parse for SqlIdentifier { fn parse(input: ParseStream) -> Result { let fork = input.fork(); if let Ok(ident) = fork.parse::() { input.advance_to(&fork); Ok((&ident).into()) } else { let name = input.parse::()?; Ok(Self { field_name: name.value(), span: name.span(), }) } } } impl Parse for FieldAttr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "embed" => Ok(FieldAttr::Embed(name)), "column_name" => Ok(FieldAttr::ColumnName( name, parse_eq(input, COLUMN_NAME_NOTE)?, )), "sql_type" => Ok(FieldAttr::SqlType(name, parse_eq(input, SQL_TYPE_NOTE)?)), "serialize_as" => Ok(FieldAttr::SerializeAs( name, parse_eq(input, SERIALIZE_AS_NOTE)?, )), "deserialize_as" => Ok(FieldAttr::DeserializeAs( name, parse_eq(input, DESERIALIZE_AS_NOTE)?, )), "select_expression" => Ok(FieldAttr::SelectExpression( name, parse_eq(input, SELECT_EXPRESSION_NOTE)?, )), "select_expression_type" => Ok(FieldAttr::SelectExpressionType( name, parse_eq(input, SELECT_EXPRESSION_TYPE_NOTE)?, )), _ => unknown_attribute( &name, &[ "embed", "column_name", "sql_type", "serialize_as", "deserialize_as", "select_expression", "select_expression_type", ], ), } } } impl Spanned for FieldAttr { fn span(&self) -> Span { match self { FieldAttr::Embed(ident) | FieldAttr::ColumnName(ident, _) | FieldAttr::SqlType(ident, _) | FieldAttr::SerializeAs(ident, _) | FieldAttr::DeserializeAs(ident, _) | FieldAttr::SelectExpression(ident, _) | FieldAttr::SelectExpressionType(ident, _) => ident.span(), } } } #[allow(clippy::large_enum_variant)] pub enum StructAttr { Aggregate(Ident), NotSized(Ident), ForeignDerive(Ident), TableName(Ident, Path), SqlType(Ident, TypePath), TreatNoneAsDefaultValue(Ident, LitBool), TreatNoneAsNull(Ident, LitBool), BelongsTo(Ident, BelongsTo), MysqlType(Ident, MysqlType), SqliteType(Ident, SqliteType), PostgresType(Ident, PostgresType), PrimaryKey(Ident, Punctuated), } impl Parse for StructAttr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "aggregate" => Ok(StructAttr::Aggregate(name)), "not_sized" => Ok(StructAttr::NotSized(name)), "foreign_derive" => Ok(StructAttr::ForeignDerive(name)), "table_name" => Ok(StructAttr::TableName( name, parse_eq(input, TABLE_NAME_NOTE)?, )), "sql_type" => Ok(StructAttr::SqlType(name, parse_eq(input, SQL_TYPE_NOTE)?)), "treat_none_as_default_value" => Ok(StructAttr::TreatNoneAsDefaultValue( name, parse_eq(input, TREAT_NONE_AS_DEFAULT_VALUE_NOTE)?, )), "treat_none_as_null" => Ok(StructAttr::TreatNoneAsNull( name, parse_eq(input, TREAT_NONE_AS_NULL_NOTE)?, )), "belongs_to" => Ok(StructAttr::BelongsTo( name, parse_paren(input, BELONGS_TO_NOTE)?, )), "mysql_type" => Ok(StructAttr::MysqlType( name, parse_paren(input, MYSQL_TYPE_NOTE)?, )), "sqlite_type" => Ok(StructAttr::SqliteType( name, parse_paren(input, SQLITE_TYPE_NOTE)?, )), "postgres_type" => Ok(StructAttr::PostgresType( name, parse_paren(input, POSTGRES_TYPE_NOTE)?, )), "primary_key" => Ok(StructAttr::PrimaryKey(name, { let content; parenthesized!(content in input); content.parse_terminated(Ident::parse)? })), _ => unknown_attribute( &name, &[ "aggregate", "not_sized", "foreign_derive", "table_name", "sql_type", "treat_none_as_default_value", "treat_none_as_null", "belongs_to", "mysql_type", "sqlite_type", "postgres_type", "primary_key", ], ), } } } impl Spanned for StructAttr { fn span(&self) -> Span { match self { StructAttr::Aggregate(ident) | StructAttr::NotSized(ident) | StructAttr::ForeignDerive(ident) | StructAttr::TableName(ident, _) | StructAttr::SqlType(ident, _) | StructAttr::TreatNoneAsDefaultValue(ident, _) | StructAttr::TreatNoneAsNull(ident, _) | StructAttr::BelongsTo(ident, _) | StructAttr::MysqlType(ident, _) | StructAttr::SqliteType(ident, _) | StructAttr::PostgresType(ident, _) | StructAttr::PrimaryKey(ident, _) => ident.span(), } } } pub fn parse_attributes(attrs: &[Attribute]) -> Vec> where T: Parse + ParseDeprecated + Spanned, { attrs .iter() .flat_map(|attr| { if attr.path.is_ident("diesel") { attr.parse_args_with(Punctuated::::parse_terminated) .unwrap_or_abort() .into_iter() .map(|a| AttributeSpanWrapper { ident_span: a.span(), item: a, attribute_span: attr.tokens.span(), }) .collect::>() } else if cfg!(all( not(feature = "without-deprecated"), feature = "with-deprecated" )) { let mut p = Vec::new(); let Attribute { path, tokens, .. } = attr; let ident = path.get_ident().map(|f| f.to_string()); if let "sql_type" | "column_name" | "table_name" | "changeset_options" | "primary_key" | "belongs_to" | "sqlite_type" | "mysql_type" | "postgres" = ident.as_deref().unwrap_or_default() { let ts = quote!(#path #tokens).into(); let value = Parser::parse(T::parse_deprecated, ts).unwrap_or_abort(); if let Some(value) = value { p.push(AttributeSpanWrapper { ident_span: value.span(), item: value, attribute_span: attr.tokens.span(), }); } } p } else { Vec::new() } }) .collect() } diesel_derives-2.0.2/src/deprecated/belongs_to.rs000064400000000000000000000025321046102023000202050ustar 00000000000000use syn::parse::{ParseStream, Result}; use syn::token::Comma; use syn::{parenthesized, Ident, LitStr}; use deprecated::utils::parse_eq_and_lit_str; use parsers::BelongsTo; use util::BELONGS_TO_NOTE; pub fn parse_belongs_to(name: Ident, input: ParseStream) -> Result { if input.is_empty() { abort!( name.span(), "unexpected end of input, expected parentheses"; help = "The correct format looks like `#[diesel({})]`", BELONGS_TO_NOTE ); } let content; parenthesized!(content in input); let parent = if content.peek(Ident) { let name: Ident = content.parse()?; if name == "parent" { let lit_str = parse_eq_and_lit_str(name, &content, BELONGS_TO_NOTE)?; lit_str.parse()? } else { LitStr::new(&name.to_string(), name.span()).parse()? } } else { content.parse()? }; let mut foreign_key = None; if content.peek(Comma) { content.parse::()?; let name: Ident = content.parse()?; if name != "foreign_key" { abort!(name, "expected `foreign_key`"); } let lit_str = parse_eq_and_lit_str(name, &content, BELONGS_TO_NOTE)?; foreign_key = Some(lit_str.parse()?); } Ok(BelongsTo { parent, foreign_key, }) } diesel_derives-2.0.2/src/deprecated/changeset_options.rs000064400000000000000000000014501046102023000215640ustar 00000000000000use proc_macro_error::ResultExt; use syn::parse::{ParseStream, Result}; use syn::{parenthesized, Ident, LitBool}; use deprecated::utils::parse_eq_and_lit_str; use util::TREAT_NONE_AS_NULL_NOTE; pub fn parse_changeset_options(name: Ident, input: ParseStream) -> Result<(Ident, LitBool)> { if input.is_empty() { abort!(name.span(), "unexpected end of input, expected parentheses"); } let content; parenthesized!(content in input); let name: Ident = content.parse()?; let name_str = name.to_string(); if name_str != "treat_none_as_null" { abort!(name.span(), "expected `treat_none_as_null`"); } Ok((name.clone(), { let lit_str = parse_eq_and_lit_str(name, &content, TREAT_NONE_AS_NULL_NOTE)?; lit_str.parse().unwrap_or_abort() })) } diesel_derives-2.0.2/src/deprecated/mod.rs000064400000000000000000000222111046102023000166250ustar 00000000000000use syn::parse::{ParseStream, Result}; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod belongs_to; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod changeset_options; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod postgres_type; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod primary_key; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod utils; pub trait ParseDeprecated: Sized { fn parse_deprecated(input: ParseStream) -> Result>; } #[cfg(any(feature = "without-deprecated", not(feature = "with-deprecated")))] mod not_deprecated { use super::{ParseDeprecated, ParseStream, Result}; use attrs::{FieldAttr, StructAttr}; impl ParseDeprecated for StructAttr { fn parse_deprecated(_input: ParseStream) -> Result> { unimplemented!() } } impl ParseDeprecated for FieldAttr { fn parse_deprecated(_input: ParseStream) -> Result> { unimplemented!() } } } #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod impl_deprecated { use super::{ParseDeprecated, ParseStream, Result}; use attrs::{FieldAttr, StructAttr}; use deprecated::belongs_to::parse_belongs_to; use deprecated::changeset_options::parse_changeset_options; use deprecated::postgres_type::parse_postgres_type; use deprecated::primary_key::parse_primary_key; use deprecated::utils::parse_eq_and_lit_str; use parsers::{MysqlType, PostgresType, SqliteType}; use proc_macro2::Span; use proc_macro_error::ResultExt; use syn::Ident; use util::{ COLUMN_NAME_NOTE, MYSQL_TYPE_NOTE, SQLITE_TYPE_NOTE, SQL_TYPE_NOTE, TABLE_NAME_NOTE, }; macro_rules! warn { ($ident: expr, $help: expr) => { warn( $ident.span(), &format!("#[{}] attribute form is deprecated", $ident), $help, ); }; } impl ParseDeprecated for StructAttr { fn parse_deprecated(input: ParseStream) -> Result> { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "table_name" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, TABLE_NAME_NOTE)?; warn!( name, &format!("use `#[diesel(table_name = {})]` instead", lit_str.value()) ); Ok(Some(StructAttr::TableName(name, { lit_str.parse().unwrap_or_abort() }))) } "changeset_options" => { let (ident, value) = parse_changeset_options(name.clone(), input)?; warn!( name, &format!( "use `#[diesel(treat_none_as_null = {})]` instead", value.value ) ); Ok(Some(StructAttr::TreatNoneAsNull(ident, value))) } "sql_type" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, SQL_TYPE_NOTE)?; warn!( name, &format!("use `#[diesel(sql_type = {})]` instead", lit_str.value()) ); Ok(Some(StructAttr::SqlType(name, { lit_str.parse().unwrap_or_abort() }))) } "primary_key" => { let keys = parse_primary_key(name.clone(), input)?; let hint = keys .iter() .map(|i| i.to_string()) .collect::>() .join(", "); warn!( name, &format!("use `#[diesel(primary_key({}))]` instead", hint) ); Ok(Some(StructAttr::PrimaryKey(name, keys))) } "belongs_to" => { let belongs_to = parse_belongs_to(name.clone(), input)?; let parent = belongs_to .parent .path .segments .iter() .map(|s| s.ident.to_string()) .collect::>() .join("::"); if let Some(ref key) = belongs_to.foreign_key { warn!( name, &format!( "use `#[diesel(belongs_to({}, foreign_key = {}))]` instead", parent, key ) ); } else { warn!( name, &format!("use `#[diesel(belongs_to({}))]` instead", parent) ); } Ok(Some(StructAttr::BelongsTo(name, belongs_to))) } "sqlite_type" => { let name_value = parse_eq_and_lit_str(name.clone(), input, SQLITE_TYPE_NOTE)?; warn!( name, &format!( "use `#[diesel(sqlite_type(name = \"{}\"))]` instead", name_value.value() ) ); Ok(Some(StructAttr::SqliteType( name, SqliteType { name: name_value }, ))) } "mysql_type" => { let name_value = parse_eq_and_lit_str(name.clone(), input, MYSQL_TYPE_NOTE)?; warn!( name, &format!( "use `#[diesel(mysql_type(name = \"{}\"))]` instead", name_value.value() ) ); Ok(Some(StructAttr::MysqlType( name, MysqlType { name: name_value }, ))) } "postgres" => { let pg_type = parse_postgres_type(name.clone(), input)?; let msg = match &pg_type { PostgresType::Fixed(oid, array_oid) => format!( "use `#[diesel(postgres_type(oid = {}, array_oid = {}))]` instead", oid.base10_parse::()?, array_oid.base10_parse::()? ), PostgresType::Lookup(name, Some(schema)) => format!( "use `#[diesel(postgres_type(name = \"{}\", schema = \"{}\"))]` instead", name.value(), schema.value() ), PostgresType::Lookup(name, None) => format!( "use `#[diesel(postgres_type(name = \"{}\"))]` instead", name.value(), ), }; warn!(name, &msg); Ok(Some(StructAttr::PostgresType(name, pg_type))) } _ => Ok(None), } } } #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] impl ParseDeprecated for FieldAttr { fn parse_deprecated(input: ParseStream) -> Result> { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "column_name" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, COLUMN_NAME_NOTE)?; warn!( name, &format!("use `#[diesel(column_name = {})]` instead", lit_str.value()) ); Ok(Some(FieldAttr::ColumnName(name, { lit_str.parse().unwrap_or_abort() }))) } "sql_type" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, SQL_TYPE_NOTE)?; warn!( name, &format!("use `#[diesel(sql_type = {})]` instead", lit_str.value()) ); Ok(Some(FieldAttr::SqlType(name, { lit_str.parse().unwrap_or_abort() }))) } _ => Ok(None), } } } #[cfg(feature = "nightly")] fn warn(_span: Span, message: &str, help: &str) { emit_warning!(_span, message; help = help); } #[cfg(not(feature = "nightly"))] fn warn(_span: Span, message: &str, help: &str) { eprintln!("warning: {}\n = help: {}\n", message, help); } } diesel_derives-2.0.2/src/deprecated/postgres_type.rs000064400000000000000000000040371046102023000207630ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{parenthesized, Ident, LitInt, LitStr}; use deprecated::utils::parse_eq_and_lit_str; use parsers::PostgresType; use util::{unknown_attribute, POSTGRES_TYPE_NOTE}; enum Attr { Oid(Ident, LitInt), ArrayOid(Ident, LitInt), TypeName(Ident, LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "oid" => Ok(Attr::Oid(name.clone(), { let lit_str = parse_eq_and_lit_str(name, input, POSTGRES_TYPE_NOTE)?; lit_str.parse()? })), "array_oid" => Ok(Attr::ArrayOid(name.clone(), { let lit_str = parse_eq_and_lit_str(name, input, POSTGRES_TYPE_NOTE)?; lit_str.parse()? })), "type_name" => Ok(Attr::TypeName( name.clone(), parse_eq_and_lit_str(name, input, POSTGRES_TYPE_NOTE)?, )), _ => unknown_attribute(&name, &["oid", "array_oid", "type_name"]), } } } pub fn parse_postgres_type(name: Ident, input: ParseStream) -> Result { if input.is_empty() { abort!( name.span(), "unexpected end of input, expected parentheses"; help = "The correct format looks like `#[diesel({})]`", POSTGRES_TYPE_NOTE ); } let content; parenthesized!(content in input); let mut oid = None; let mut array_oid = None; let mut type_name = None; for attr in Punctuated::::parse_terminated(&content)? { match attr { Attr::Oid(ident, value) => oid = Some((ident, value)), Attr::ArrayOid(ident, value) => array_oid = Some((ident, value)), Attr::TypeName(ident, value) => type_name = Some((ident, value)), } } PostgresType::validate_and_build(&content, oid, array_oid, type_name, None) } diesel_derives-2.0.2/src/deprecated/primary_key.rs000064400000000000000000000006741046102023000204120ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{parenthesized, Ident}; pub fn parse_primary_key(name: Ident, input: ParseStream) -> Result> { if input.is_empty() { abort!(name.span(), "unexpected end of input, expected parentheses"); } let content; parenthesized!(content in input); content.parse_terminated(Ident::parse) } diesel_derives-2.0.2/src/deprecated/utils.rs000064400000000000000000000006761046102023000172210ustar 00000000000000use syn::parse::{ParseStream, Result}; use syn::token::Eq; use syn::{Ident, LitStr}; pub fn parse_eq_and_lit_str(name: Ident, input: ParseStream, help: &str) -> Result { if input.is_empty() { abort!( name.span(), "unexpected end of input, expected `=`"; help = "The correct format looks like `#[diesel({})]`", help ); } input.parse::()?; input.parse::() } diesel_derives-2.0.2/src/diesel_for_each_tuple.rs000064400000000000000000000023301046102023000202520ustar 00000000000000use proc_macro2::{Ident, Span, TokenStream}; #[cfg(not(feature = "32-column-tables"))] const MAX_TUPLE_SIZE: i32 = 16; #[cfg(all(not(feature = "64-column-tables"), feature = "32-column-tables"))] const MAX_TUPLE_SIZE: i32 = 32; #[cfg(all(not(feature = "128-column-tables"), feature = "64-column-tables"))] const MAX_TUPLE_SIZE: i32 = 64; #[cfg(feature = "128-column-tables")] const MAX_TUPLE_SIZE: i32 = 128; pub(crate) fn expand(input: Ident) -> TokenStream { let call_side = Span::mixed_site(); let pairs = (0..MAX_TUPLE_SIZE as usize) .map(|i| { let t = Ident::new(&format!("T{}", i), call_side); let st = Ident::new(&format!("ST{}", i), call_side); let tt = Ident::new(&format!("TT{}", i), call_side); let i = syn::Index::from(i as usize); quote!((#i) -> #t, #st, #tt,) }) .collect::>(); let mut out = Vec::with_capacity(MAX_TUPLE_SIZE as usize); for i in 0..MAX_TUPLE_SIZE { let items = &pairs[0..=i as usize]; let tuple = i + 1; out.push(quote! { #tuple { #(#items)* } }); } quote! { #input! { #(#out)* } } } diesel_derives-2.0.2/src/diesel_numeric_ops.rs000064400000000000000000000061221046102023000176210ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use util::wrap_in_dummy_mod; pub fn derive(mut item: DeriveInput) -> TokenStream { let struct_name = &item.ident; { let where_clause = item .generics .where_clause .get_or_insert(parse_quote!(where)); where_clause.predicates.push(parse_quote!(Self: Expression)); where_clause.predicates.push_punct(Default::default()); } let (_, ty_generics, where_clause) = item.generics.split_for_impl(); let mut impl_generics = item.generics.clone(); impl_generics.params.push(parse_quote!(__Rhs)); let (impl_generics, _, _) = impl_generics.split_for_impl(); wrap_in_dummy_mod(quote! { use diesel::internal::derives::numeric_ops as ops; use diesel::expression::{Expression, AsExpression}; use diesel::sql_types::ops::{Add, Sub, Mul, Div}; use diesel::sql_types::{SqlType, SingleValue}; impl #impl_generics ::std::ops::Add<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Add, <::SqlType as Add>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Add>::Rhs>, { type Output = ops::Add; fn add(self, rhs: __Rhs) -> Self::Output { ops::Add::new(self, rhs.as_expression()) } } impl #impl_generics ::std::ops::Sub<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Sub, <::SqlType as Sub>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Sub>::Rhs>, { type Output = ops::Sub; fn sub(self, rhs: __Rhs) -> Self::Output { ops::Sub::new(self, rhs.as_expression()) } } impl #impl_generics ::std::ops::Mul<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Mul, <::SqlType as Mul>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Mul>::Rhs>, { type Output = ops::Mul; fn mul(self, rhs: __Rhs) -> Self::Output { ops::Mul::new(self, rhs.as_expression()) } } impl #impl_generics ::std::ops::Div<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Div, <::SqlType as Div>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Div>::Rhs>, { type Output = ops::Div; fn div(self, rhs: __Rhs) -> Self::Output { ops::Div::new(self, rhs.as_expression()) } } }) } diesel_derives-2.0.2/src/diesel_public_if.rs000064400000000000000000000137621046102023000172420ustar 00000000000000use syn::{punctuated::Punctuated, DeriveInput}; pub(crate) fn expand(cfg: CfgInput, item: EntryWithVisibility) -> proc_macro2::TokenStream { item.hide_for_cfg(cfg.cfg, cfg.field_list) } pub struct CfgInput { cfg: syn::Meta, field_list: Vec, } impl syn::parse::Parse for CfgInput { fn parse(input: syn::parse::ParseStream) -> syn::Result { let mut cfg = Punctuated::::parse_terminated(input)?; if cfg.len() == 1 { Ok(Self { cfg: cfg .pop() .expect("There is exactly one element") .into_value(), field_list: Vec::new(), }) } else if cfg.len() == 2 { let value_1 = cfg .pop() .expect("There is exactly one element") .into_value(); let value_2 = cfg .pop() .expect("There is exactly one element") .into_value(); let (cfg, fields) = if matches!(&value_1, syn::Meta::List(v) if v.path.is_ident("public_fields")) { (value_2, value_1) } else if matches!(&value_2, syn::Meta::List(v) if v.path.is_ident("public_fields")) { (value_1, value_2) } else { panic!( "Incompatible argument list detected. `__diesel_public_if` \ expects a cfg argument and a optional public_fields" ) }; let field_list = if let syn::Meta::List(v) = fields { v.nested .into_iter() .map(|v| { if let syn::NestedMeta::Meta(syn::Meta::Path(p)) = v { p.get_ident() .expect("Field names need to be idents") .clone() } else { panic!("The field name key requires a list of field names as argument") } }) .collect() } else { unreachable!() }; Ok(Self { cfg, field_list }) } else { panic!( "Incompatible argument list detected. `__diesel_public_if` \ expects a cfg argument and a optional public_fields" ) } } } #[derive(Clone)] pub enum EntryWithVisibility { TraitFunction { meta: Vec, tail: proc_macro2::TokenStream, }, Item { meta: Vec, vis: syn::Visibility, tail: proc_macro2::TokenStream, }, Struct { meta: Vec, vis: syn::Visibility, def: syn::DataStruct, ident: syn::Ident, generics: syn::Generics, }, } impl syn::parse::Parse for EntryWithVisibility { fn parse(input: syn::parse::ParseStream) -> syn::Result { let meta = syn::Attribute::parse_outer(input)?; if input.peek(Token![fn]) || input.peek(Token![type]) { let tail = input.parse()?; Ok(Self::TraitFunction { meta, tail }) } else { let vis = input.parse()?; if input.peek(Token![struct]) { let s = DeriveInput::parse(input)?; if let syn::Data::Struct(def) = s.data { Ok(Self::Struct { meta, vis, def, generics: s.generics, ident: s.ident, }) } else { unreachable!() } } else { let tail = input.parse()?; Ok(Self::Item { meta, vis, tail }) } } } } impl EntryWithVisibility { fn hide_for_cfg( &self, cfg: syn::Meta, field_list: Vec, ) -> proc_macro2::TokenStream { match self { EntryWithVisibility::TraitFunction { meta, tail } if field_list.is_empty() => quote! { #(#meta)* #[cfg_attr(not(#cfg), doc(hidden))] #[cfg_attr(doc_cfg, doc(cfg(#cfg)))] #tail }, EntryWithVisibility::Item { meta, vis, tail } if field_list.is_empty() => { quote! { #(#meta)* #[cfg(not(#cfg))] #vis #tail #(#meta)* #[cfg(#cfg)] pub #tail } } EntryWithVisibility::Struct { meta, vis, def, ident, generics, } => { let fields1 = def.fields.iter(); let fields2 = def.fields.iter().map(|f| { let mut ret = f.clone(); if ret .ident .as_ref() .map(|i| field_list.contains(i)) .unwrap_or(false) { ret.vis = syn::Visibility::Public(syn::VisPublic { pub_token: Default::default(), }); } ret }); quote! { #(#meta)* #[cfg(not(#cfg))] #vis struct #ident #generics { #(#fields1,)* } #(#meta)* #[cfg(#cfg)] #[non_exhaustive] pub struct #ident #generics { #(#fields2,)* } } } EntryWithVisibility::TraitFunction { .. } | EntryWithVisibility::Item { .. } => { panic!("Public field list is only supported for structs") } } } } diesel_derives-2.0.2/src/field.rs000064400000000000000000000141471046102023000150420ustar 00000000000000use proc_macro2::{Span, TokenStream}; use syn::spanned::Spanned; use syn::{Field as SynField, Ident, Index, Type}; use attrs::{parse_attributes, AttributeSpanWrapper, FieldAttr, SqlIdentifier}; pub struct Field { pub ty: Type, pub span: Span, pub name: FieldName, column_name: Option>, pub sql_type: Option>, pub serialize_as: Option>, pub deserialize_as: Option>, pub select_expression: Option>, pub select_expression_type: Option>, pub embed: Option>, } impl Field { pub fn from_struct_field(field: &SynField, index: usize) -> Self { let SynField { ident, attrs, ty, .. } = field; let mut column_name = None; let mut sql_type = None; let mut serialize_as = None; let mut deserialize_as = None; let mut embed = None; let mut select_expression = None; let mut select_expression_type = None; for attr in parse_attributes(attrs) { let attribute_span = attr.attribute_span; let ident_span = attr.ident_span; match attr.item { FieldAttr::ColumnName(_, value) => { column_name = Some(AttributeSpanWrapper { item: value, attribute_span, ident_span, }) } FieldAttr::SqlType(_, value) => { sql_type = Some(AttributeSpanWrapper { item: Type::Path(value), attribute_span, ident_span, }) } FieldAttr::SerializeAs(_, value) => { serialize_as = Some(AttributeSpanWrapper { item: Type::Path(value), attribute_span, ident_span, }) } FieldAttr::DeserializeAs(_, value) => { deserialize_as = Some(AttributeSpanWrapper { item: Type::Path(value), attribute_span, ident_span, }) } FieldAttr::SelectExpression(_, value) => { select_expression = Some(AttributeSpanWrapper { item: value, attribute_span, ident_span, }) } FieldAttr::SelectExpressionType(_, value) => { select_expression_type = Some(AttributeSpanWrapper { item: value, attribute_span, ident_span, }) } FieldAttr::Embed(_) => { embed = Some(AttributeSpanWrapper { item: true, attribute_span, ident_span, }) } } } let name = match ident.clone() { Some(x) => FieldName::Named(x), None => FieldName::Unnamed(index.into()), }; let span = match name { FieldName::Named(ref ident) => ident.span(), FieldName::Unnamed(_) => ty.span(), }; Self { ty: ty.clone(), span, name, column_name, sql_type, serialize_as, deserialize_as, select_expression, select_expression_type, embed, } } pub fn column_name(&self) -> SqlIdentifier { self.column_name .as_ref() .map(|a| a.item.clone()) .unwrap_or_else(|| match self.name { FieldName::Named(ref x) => x.into(), FieldName::Unnamed(ref x) => { abort!( x, "All fields of tuple structs must be annotated with `#[diesel(column_name)]`" ); } }) } pub fn ty_for_deserialize(&self) -> &Type { if let Some(AttributeSpanWrapper { item: value, .. }) = &self.deserialize_as { value } else { &self.ty } } pub(crate) fn embed(&self) -> bool { self.embed.as_ref().map(|a| a.item).unwrap_or(false) } } pub enum FieldName { Named(Ident), Unnamed(Index), } impl quote::ToTokens for FieldName { fn to_tokens(&self, tokens: &mut TokenStream) { match *self { FieldName::Named(ref x) => x.to_tokens(tokens), FieldName::Unnamed(ref x) => x.to_tokens(tokens), } } } /// We use this instead of directly `syn::Expr` to reduce compilation time /// /// `syn::Expr` does not properly support tuples when `syn/full` feature is /// not enabled, and that feature slightly increases compilation time #[allow(clippy::large_enum_variant)] pub enum SelectExpr { Expr(syn::Expr), Tuple { paren_token: syn::token::Paren, content: proc_macro2::TokenStream, }, } impl quote::ToTokens for SelectExpr { fn to_tokens(&self, tokens: &mut TokenStream) { match self { SelectExpr::Expr(ref e) => e.to_tokens(tokens), SelectExpr::Tuple { ref paren_token, ref content, } => paren_token.surround(tokens, |tokens| content.to_tokens(tokens)), } } } impl syn::parse::Parse for SelectExpr { fn parse(input: syn::parse::ParseStream) -> syn::Result { let lookahead = input.lookahead1(); if lookahead.peek(syn::token::Paren) { let content; let paren_token = syn::parenthesized!(content in input); Ok(Self::Tuple { paren_token, content: content.parse()?, }) } else { input.parse::().map(Self::Expr) } } } diesel_derives-2.0.2/src/from_sql_row.rs000064400000000000000000000027371046102023000164720ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use model::Model; use util::{ty_for_foreign_derive, wrap_in_dummy_mod}; pub fn derive(mut item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, true); let struct_ty = ty_for_foreign_derive(&item, &model); { let where_clause = item .generics .where_clause .get_or_insert(parse_quote!(where)); where_clause .predicates .push(parse_quote!(__DB: diesel::backend::Backend)); where_clause .predicates .push(parse_quote!(__ST: diesel::sql_types::SingleValue)); where_clause .predicates .push(parse_quote!(Self: FromSql<__ST, __DB>)); } let (_, _, where_clause) = item.generics.split_for_impl(); let lifetimes = item.generics.lifetimes().collect::>(); let ty_params = item.generics.type_params().collect::>(); let const_params = item.generics.const_params().collect::>(); wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, FromSql, Queryable}; // Need to put __ST and __DB after lifetimes but before const params impl<#(#lifetimes,)* __ST, __DB, #(#ty_params,)* #(#const_params,)*> Queryable<__ST, __DB> for #struct_ty #where_clause { type Row = Self; fn build(row: Self::Row) -> deserialize::Result { Ok(row) } } }) } diesel_derives-2.0.2/src/identifiable.rs000064400000000000000000000030771046102023000163760ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use model::Model; use util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); let struct_name = &item.ident; let table_name = model.table_name(); let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let mut ref_generics = item.generics.clone(); ref_generics.params.push(parse_quote!('ident)); let (ref_generics, ..) = ref_generics.split_for_impl(); let (field_ty, field_name): (Vec<_>, Vec<_>) = model .primary_key_names .iter() .map(|pk| model.find_column(pk)) .map(|f| (&f.ty, &f.name)) .unzip(); wrap_in_dummy_mod(quote! { use diesel::associations::{HasTable, Identifiable}; impl #impl_generics HasTable for #struct_name #ty_generics #where_clause { type Table = #table_name::table; fn table() -> Self::Table { #table_name::table } } impl #ref_generics Identifiable for &'ident #struct_name #ty_generics #where_clause { type Id = (#(&'ident #field_ty),*); fn id(self) -> Self::Id { (#(&self.#field_name),*) } } impl #ref_generics Identifiable for &'_ &'ident #struct_name #ty_generics #where_clause { type Id = (#(&'ident #field_ty),*); fn id(self) -> Self::Id { (#(&self.#field_name),*) } } }) } diesel_derives-2.0.2/src/insertable.rs000064400000000000000000000175261046102023000161130ustar 00000000000000use attrs::AttributeSpanWrapper; use field::Field; use model::Model; use proc_macro2::TokenStream; use quote::quote_spanned; use syn::{DeriveInput, Expr, Path, Type}; use util::{inner_of_option_ty, is_option_ty, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); let treat_none_as_default_value = model.treat_none_as_default_value(); let table_name = &model.table_name(); let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let mut generate_borrowed_insert = true; let mut direct_field_ty = Vec::with_capacity(model.fields().len()); let mut direct_field_assign = Vec::with_capacity(model.fields().len()); let mut ref_field_ty = Vec::with_capacity(model.fields().len()); let mut ref_field_assign = Vec::with_capacity(model.fields().len()); for field in model.fields() { match (field.serialize_as.as_ref(), field.embed()) { (None, true) => { direct_field_ty.push(field_ty_embed(field, None)); direct_field_assign.push(field_expr_embed(field, None)); ref_field_ty.push(field_ty_embed(field, Some(quote!(&'insert)))); ref_field_assign.push(field_expr_embed(field, Some(quote!(&)))); } (None, false) => { direct_field_ty.push(field_ty( field, table_name, None, treat_none_as_default_value, )); direct_field_assign.push(field_expr( field, table_name, None, treat_none_as_default_value, )); ref_field_ty.push(field_ty( field, table_name, Some(quote!(&'insert)), treat_none_as_default_value, )); ref_field_assign.push(field_expr( field, table_name, Some(quote!(&)), treat_none_as_default_value, )); } (Some(AttributeSpanWrapper { item: ty, .. }), false) => { direct_field_ty.push(field_ty_serialize_as( field, table_name, ty, treat_none_as_default_value, )); direct_field_assign.push(field_expr_serialize_as( field, table_name, ty, treat_none_as_default_value, )); generate_borrowed_insert = false; // as soon as we hit one field with #[diesel(serialize_as)] there is no point in generating the impl of Insertable for borrowed structs } (Some(AttributeSpanWrapper { attribute_span, .. }), true) => { abort!( attribute_span, "`#[diesel(embed)]` cannot be combined with `#[diesel(serialize_as)]`" ) } } } let insert_owned = quote! { impl #impl_generics Insertable<#table_name::table> for #struct_name #ty_generics #where_clause { type Values = <(#(#direct_field_ty,)*) as Insertable<#table_name::table>>::Values; fn values(self) -> <(#(#direct_field_ty,)*) as Insertable<#table_name::table>>::Values { (#(#direct_field_assign,)*).values() } } }; let insert_borrowed = if generate_borrowed_insert { let mut impl_generics = item.generics.clone(); impl_generics.params.push(parse_quote!('insert)); let (impl_generics, ..) = impl_generics.split_for_impl(); quote! { impl #impl_generics Insertable<#table_name::table> for &'insert #struct_name #ty_generics #where_clause { type Values = <(#(#ref_field_ty,)*) as Insertable<#table_name::table>>::Values; fn values(self) -> <(#(#ref_field_ty,)*) as Insertable<#table_name::table>>::Values { (#(#ref_field_assign,)*).values() } } } } else { quote! {} }; wrap_in_dummy_mod(quote! { use diesel::insertable::Insertable; use diesel::internal::derives::insertable::UndecoratedInsertRecord; use diesel::prelude::*; #[allow(unused_qualifications)] #insert_owned #[allow(unused_qualifications)] #insert_borrowed impl #impl_generics UndecoratedInsertRecord<#table_name::table> for #struct_name #ty_generics #where_clause { } }) } fn field_ty_embed(field: &Field, lifetime: Option) -> TokenStream { let field_ty = &field.ty; let span = field.span; quote_spanned!(span=> #lifetime #field_ty) } fn field_expr_embed(field: &Field, lifetime: Option) -> TokenStream { let field_name = &field.name; quote!(#lifetime self.#field_name) } fn field_ty_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_default_value: bool, ) -> TokenStream { let column_name = field.column_name(); let span = field.span; if treat_none_as_default_value { let inner_ty = inner_of_option_ty(ty); quote_spanned! {span=> std::option::Option> } } else { quote_spanned! {span=> diesel::dsl::Eq< #table_name::#column_name, #ty, > } } } fn field_expr_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_default_value: bool, ) -> TokenStream { let field_name = &field.name; let column_name = field.column_name(); let column = quote!(#table_name::#column_name); if treat_none_as_default_value { if is_option_ty(ty) { quote!(self.#field_name.map(|x| #column.eq(::std::convert::Into::<#ty>::into(x)))) } else { quote!(std::option::Option::Some(#column.eq(::std::convert::Into::<#ty>::into(self.#field_name)))) } } else { quote!(#column.eq(::std::convert::Into::<#ty>::into(self.#field_name))) } } fn field_ty( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_default_value: bool, ) -> TokenStream { let column_name = field.column_name(); let span = field.span; if treat_none_as_default_value { let inner_ty = inner_of_option_ty(&field.ty); quote_spanned! {span=> std::option::Option> } } else { let inner_ty = &field.ty; quote_spanned! {span=> diesel::dsl::Eq< #table_name::#column_name, #lifetime #inner_ty, > } } } fn field_expr( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_default_value: bool, ) -> TokenStream { let field_name = &field.name; let column_name = field.column_name(); let column: Expr = parse_quote!(#table_name::#column_name); if treat_none_as_default_value { if is_option_ty(&field.ty) { if lifetime.is_some() { quote!(self.#field_name.as_ref().map(|x| #column.eq(x))) } else { quote!(self.#field_name.map(|x| #column.eq(x))) } } else { quote!(std::option::Option::Some(#column.eq(#lifetime self.#field_name))) } } else { quote!(#column.eq(#lifetime self.#field_name)) } } diesel_derives-2.0.2/src/lib.rs000064400000000000000000001340641046102023000145260ustar 00000000000000#![recursion_limit = "1024"] // Clippy lints #![allow( clippy::needless_doctest_main, clippy::needless_pass_by_value, clippy::map_unwrap_or )] #![warn( clippy::mut_mut, clippy::non_ascii_literal, clippy::similar_names, clippy::unicode_not_nfc, clippy::if_not_else, clippy::items_after_statements, clippy::used_underscore_binding, missing_copy_implementations )] extern crate proc_macro; extern crate proc_macro2; extern crate quote; #[macro_use] extern crate syn; #[macro_use] extern crate proc_macro_error; use proc_macro::TokenStream; mod attrs; mod deprecated; mod field; mod model; mod parsers; mod util; mod as_changeset; mod as_expression; mod associations; mod diesel_for_each_tuple; mod diesel_numeric_ops; mod diesel_public_if; mod from_sql_row; mod identifiable; mod insertable; mod query_id; mod queryable; mod queryable_by_name; mod selectable; mod sql_function; mod sql_type; mod valid_grouping; /// Implements `AsChangeset` /// /// To implement `AsChangeset` this derive needs to know the corresponding table /// type. By default it uses the `snake_case` type name with an added `s` from /// the current scope. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// /// If a field name of your struct differs /// from the name of the corresponding column, you can annotate the field with /// `#[diesel(column_name = some_column_name)]`. /// /// To provide custom serialization behavior for a field, you can use /// `#[diesel(serialize_as = SomeType)]`. If this attribute is present, Diesel /// will call `.into` on the corresponding field and serialize the instance of `SomeType`, /// rather than the actual field on your struct. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// Normally, Diesel produces two implementations of the `AsChangeset` trait for your /// struct using this derive: one for an owned version and one for a borrowed version. /// Using `#[diesel(serialize_as)]` implies a conversion using `.into` which consumes the underlying value. /// Hence, once you use `#[diesel(serialize_as)]`, Diesel can no longer insert borrowed /// versions of your struct. /// /// By default, any `Option` fields on the struct are skipped if their value is /// `None`. If you would like to assign `NULL` to the field instead, you can /// annotate your struct with `#[diesel(treat_none_as_null = true)]`. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(treat_none_as_null = true)]`, specifies that /// the derive should threat `None` values as `NULL`. By default /// `Option::::None` is just skipped. To insert a `NULL` using default /// behavior use `Option::>::Some(None)` /// * `#[diesel(table_name = path::to::table)]`, specifies a path to the table for which the /// current type is a changeset. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name. /// * `#[diesel(primary_key(id1, id2))]` to specify the struct field that /// that corresponds to the primary key. If not used, `id` will be /// assumed as primary key field /// /// ## Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column name /// of the current field to `some_column_name`. By default the field /// name is used as column name. /// * `#[diesel(serialize_as = SomeType)]`, instead of serializing the actual /// field type, Diesel will convert the field into `SomeType` using `.into` and /// serialize that instead. By default this derive will serialize directly using /// the actual field type. #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive( AsChangeset, attributes(diesel, table_name, column_name, primary_key, changeset_options) ) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(AsChangeset, attributes(diesel)) )] pub fn derive_as_changeset(input: TokenStream) -> TokenStream { as_changeset::derive(parse_macro_input!(input)).into() } /// Implements all required variants of `AsExpression` /// /// This derive will generate the following impls: /// /// - `impl AsExpression for YourType` /// - `impl AsExpression> for YourType` /// - `impl AsExpression for &'a YourType` /// - `impl AsExpression> for &'a YourType` /// - `impl AsExpression for &'a &'b YourType` /// - `impl AsExpression> for &'a &'b YourType` /// /// If your type is unsized, /// you can specify this by adding the annotation `#[diesel(not_sized)]` /// as attribute on the type. This will skip the impls for non-reference types. /// /// # Attributes: /// /// ## Required container attributes /// /// * `#[diesel(sql_type = SqlType)]`, to specify the sql type of the /// generated implementations. If the attribute exists multiple times /// impls for each sql type are generated. /// /// ## Optional container attributes /// /// * `#[diesel(not_sized)]`, to skip generating impls that require /// that the type is `Sized` #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(AsExpression, attributes(diesel, sql_type)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(AsExpression, attributes(diesel)) )] pub fn derive_as_expression(input: TokenStream) -> TokenStream { as_expression::derive(parse_macro_input!(input)).into() } /// Implement required traits for the associations API /// /// This derive implement support for diesel's associations api. Check the /// module level documentation of the `diesel::associations` module for details. /// /// This derive generates the following impls: /// * `impl BelongsTo for YourType` /// * `impl BelongsTo<&'a Parent> for YourType` /// /// # Attributes /// /// # Required container attributes /// /// * `#[diesel(belongs_to(User))]`, to specify a child-to-parent relation ship /// between the current type and the specified parent type (`User`). /// If this attribute is given multiple times, multiple relation ships /// are generated. `#[diesel(belongs_to(User, foreign_key = mykey))]` variant /// allows to specify the name of the foreign key. If the foreign key /// is not specified explicitly, the remote lower case type name with an /// appended `_id` is used as foreign key name. (`user_id` in this example /// case) /// /// # Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]` specifies a path to the table this /// type belongs to. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name. /// /// # Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column the current /// field maps to to `some_column_name`. By default the field name is used /// as column name. #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Associations, attributes(diesel, belongs_to, column_name, table_name)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Associations, attributes(diesel, belongs_to, column_name, table_name)) )] pub fn derive_associations(input: TokenStream) -> TokenStream { associations::derive(parse_macro_input!(input)).into() } /// Implement numeric operators for the current query node #[proc_macro_derive(DieselNumericOps)] pub fn derive_diesel_numeric_ops(input: TokenStream) -> TokenStream { diesel_numeric_ops::derive(parse_macro_input!(input)).into() } /// Implements `Queryable` for primitive types /// /// This derive is mostly useful to implement support deserializing /// into rust types not supported by diesel itself. /// /// There are no options or special considerations needed for this derive. #[proc_macro_error] #[proc_macro_derive(FromSqlRow, attributes(diesel))] pub fn derive_from_sql_row(input: TokenStream) -> TokenStream { from_sql_row::derive(parse_macro_input!(input)).into() } /// Implements `Identifiable` for references of the current type /// /// By default, the primary key field is assumed to be a single field called `id`. /// If it's not, you can put `#[diesel(primary_key(your_id))]` on your struct. /// If you have a composite primary key, the syntax is `#[diesel(primary_key(id1, id2))]`. /// /// By default, `#[derive(Identifiable)]` will assume that your table is /// in scope and its name is the plural form of your struct name. /// Diesel uses very simple pluralization rules. /// It only adds an `s` to the end, and converts `CamelCase` to `snake_case`. /// If your table name does not follow this convention or is not in scope, /// you can specify a path to the table with `#[diesel(table_name = path::to::table)]`. /// Our rules for inferring table names is considered public API. /// It will never change without a major version bump. /// /// This derive generates the following impls: /// * `impl Identifiable for &'a YourType` /// * `impl Identifiable for &'_ &'a YourType` /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]` specifies a path to the table this /// type belongs to. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name /// * `#[diesel(primary_key(id1, id2))]` to specify the struct field that /// that corresponds to the primary key. If not used, `id` will be /// assumed as primary key field /// /// # Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column the current /// field maps to to `some_column_name`. By default the field name is used /// as column name. #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Identifiable, attributes(diesel, table_name, column_name, primary_key)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Identifiable, attributes(diesel)) )] pub fn derive_identifiable(input: TokenStream) -> TokenStream { identifiable::derive(parse_macro_input!(input)).into() } /// Implements `Insertable` /// /// To implement `Insertable` this derive needs to know the corresponding table /// type. By default it uses the `snake_case` type name with an added `s` /// from the current scope. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// /// If a field name of your /// struct differs from the name of the corresponding column, /// you can annotate the field with `#[diesel(column_name = some_column_name)]`. /// /// Your struct can also contain fields which implement `Insertable`. This is /// useful when you want to have one field map to more than one column (for /// example, an enum that maps to a label and a value column). Add /// `#[diesel(embed)]` to any such fields. /// /// To provide custom serialization behavior for a field, you can use /// `#[diesel(serialize_as = SomeType)]`. If this attribute is present, Diesel /// will call `.into` on the corresponding field and serialize the instance of `SomeType`, /// rather than the actual field on your struct. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// Using `#[diesel(serialize_as)]` is **incompatible** with `#[diesel(embed)]`. /// Normally, Diesel produces two implementations of the `Insertable` trait for your /// struct using this derive: one for an owned version and one for a borrowed version. /// Using `#[diesel(serialize_as)]` implies a conversion using `.into` which consumes the underlying value. /// Hence, once you use `#[diesel(serialize_as)]`, Diesel can no longer insert borrowed /// versions of your struct. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]`, specifies a path to the table this type /// is insertable into. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name /// * `#[diesel(treat_none_as_default_value = false)]`, specifies that `None` values /// should be converted to `NULL` values on SQL side instead of being treated as `DEFAULT` /// value primitive. *Note*: This option may control if your query is stored in the /// prepared statement cache or not* /// /// ## Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column the current /// field maps to `some_column_name`. By default the field name is used /// as column name /// * `#[diesel(embed)]`, specifies that the current field maps not only /// to single database field, but is a struct that implements `Insertable` /// * `#[diesel(serialize_as = SomeType)]`, instead of serializing the actual /// field type, Diesel will convert the field into `SomeType` using `.into` and /// serialize that instead. By default this derive will serialize directly using /// the actual field type. /// /// # Examples /// /// If we want to customize the serialization during insert, we can use `#[diesel(serialize_as)]`. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use diesel::{prelude::*, serialize::{ToSql, Output, self}, deserialize::{FromSqlRow}, expression::AsExpression, sql_types, backend::Backend}; /// # use schema::users; /// # use std::io::Write; /// # /// #[derive(Debug, FromSqlRow, AsExpression)] /// #[diesel(sql_type = sql_types::Text)] /// struct UppercaseString(pub String); /// /// impl Into for String { /// fn into(self) -> UppercaseString { /// UppercaseString(self.to_uppercase()) /// } /// } /// /// impl ToSql for UppercaseString /// where /// DB: Backend, /// String: ToSql, /// { /// fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result { /// self.0.to_sql(out) /// } /// } /// /// #[derive(Insertable, PartialEq, Debug)] /// #[diesel(table_name = users)] /// struct InsertableUser { /// id: i32, /// #[diesel(serialize_as = UppercaseString)] /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut connection_no_data(); /// # diesel::sql_query("CREATE TABLE users (id INTEGER PRIMARY KEY, name VARCHAR(255) NOT NULL)") /// # .execute(connection) /// # .unwrap(); /// let user = InsertableUser { /// id: 1, /// name: "thomas".to_string(), /// }; /// /// diesel::insert_into(users) /// .values(user) /// .execute(connection) /// .unwrap(); /// /// assert_eq!( /// Ok("THOMAS".to_string()), /// users.select(name).first(connection) /// ); /// # Ok(()) /// # } /// ``` #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Insertable, attributes(diesel, table_name, column_name)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Insertable, attributes(diesel)) )] pub fn derive_insertable(input: TokenStream) -> TokenStream { insertable::derive(parse_macro_input!(input)).into() } /// Implements `QueryId` /// /// For example, given this struct: /// /// ```rust /// # extern crate diesel; /// #[derive(diesel::query_builder::QueryId)] /// pub struct And { /// left: Left, /// right: Right, /// } /// ``` /// /// the following implementation will be generated /// /// ```rust /// # extern crate diesel; /// # struct And(Left, Right); /// # use diesel::query_builder::QueryId; /// impl QueryId for And /// where /// Left: QueryId, /// Right: QueryId, /// { /// type QueryId = And; /// /// const HAS_STATIC_QUERY_ID: bool = Left::HAS_STATIC_QUERY_ID && Right::HAS_STATIC_QUERY_ID; /// } /// ``` /// /// If the SQL generated by a struct is not uniquely identifiable by its type, /// meaning that `HAS_STATIC_QUERY_ID` should always be false, /// you should not derive this trait. /// In that case you should implement it manually instead. #[proc_macro_error] #[proc_macro_derive(QueryId)] pub fn derive_query_id(input: TokenStream) -> TokenStream { query_id::derive(parse_macro_input!(input)).into() } /// Implements `Queryable` to load the result of statically typed queries /// /// This trait can only be derived for structs, not enums. /// /// **Note**: When this trait is derived, it will assume that __all fields on /// your struct__ matches __all fields in the query__, including the order and /// count. This means that field order is significant if you are using /// `#[derive(Queryable)]`. __Field name has no effect__. /// /// To provide custom deserialization behavior for a field, you can use /// `#[diesel(deserialize_as = SomeType)]`. If this attribute is present, Diesel /// will deserialize the corresponding field into `SomeType`, rather than the /// actual field type on your struct and then call /// [`.try_into`](https://doc.rust-lang.org/stable/std/convert/trait.TryInto.html#tymethod.try_into) /// to convert it to the actual field type. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// (Note: all types that have `Into` automatically implement `TryInto`, /// for cases where your conversion is not faillible.) /// /// # Attributes /// /// ## Optional field attributes /// /// * `#[diesel(deserialize_as = Type)]`, instead of deserializing directly /// into the field type, the implementation will deserialize into `Type`. /// Then `Type` is converted via /// [`.try_into`](https://doc.rust-lang.org/stable/std/convert/trait.TryInto.html#tymethod.try_into) /// into the field type. By default this derive will deserialize directly into the field type /// /// # Examples /// /// If we just want to map a query to our struct, we can use `derive`. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # /// #[derive(Queryable, PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut establish_connection(); /// let first_user = users.first(connection)?; /// let expected = User { id: 1, name: "Sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// If we want to do additional work during deserialization, we can use /// `deserialize_as` to use a different implementation. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # /// # use schema::users; /// # use diesel::backend::{self, Backend}; /// # use diesel::deserialize::{self, Queryable, FromSql}; /// # use diesel::sql_types::Text; /// # /// struct LowercaseString(String); /// /// impl Into for LowercaseString { /// fn into(self) -> String { /// self.0 /// } /// } /// /// impl Queryable for LowercaseString /// where /// DB: Backend, /// String: FromSql /// { /// /// type Row = String; /// /// fn build(s: String) -> deserialize::Result { /// Ok(LowercaseString(s.to_lowercase())) /// } /// } /// /// #[derive(Queryable, PartialEq, Debug)] /// struct User { /// id: i32, /// #[diesel(deserialize_as = LowercaseString)] /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut establish_connection(); /// let first_user = users.first(connection)?; /// let expected = User { id: 1, name: "sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// Alternatively, we can implement the trait for our struct manually. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # /// use schema::users; /// use diesel::deserialize::{self, Queryable, FromSqlRow}; /// use diesel::row::Row; /// /// # /* /// type DB = diesel::sqlite::Sqlite; /// # */ /// /// #[derive(PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// impl Queryable for User /// where /// (i32, String): FromSqlRow, /// { /// type Row = (i32, String); /// /// fn build((id, name): Self::Row) -> deserialize::Result { /// Ok(User { id, name: name.to_lowercase() }) /// } /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut establish_connection(); /// let first_user = users.first(connection)?; /// let expected = User { id: 1, name: "sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Queryable, attributes(diesel, column_name)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Queryable, attributes(diesel)) )] pub fn derive_queryable(input: TokenStream) -> TokenStream { queryable::derive(parse_macro_input!(input)).into() } /// Implements `QueryableByName` for untyped sql queries, such as that one generated /// by `sql_query` /// /// To derive this trait, Diesel needs to know the SQL type of each field. You /// can do this by either annotating your struct with `#[diesel(table_name = /// some_table)]` (in which case the SQL type will be /// `diesel::dsl::SqlTypeOf`), or by annotating each /// field with `#[diesel(sql_type = SomeType)]`. /// /// If the name of a field on your struct is different than the column in your /// `table!` declaration, or if you are deriving this trait on a tuple struct, /// you can annotate the field with `#[diesel(column_name = some_column)]`. For tuple /// structs, all fields must have this annotation. /// /// If a field is another struct which implements `QueryableByName`, /// instead of a column, you can annotate that struct with `#[diesel(embed)]`. /// Then all fields contained by that inner struct are loaded into /// the embedded struct. /// /// To provide custom deserialization behavior for a field, you can use /// `#[diesel(deserialize_as = SomeType)]`. If this attribute is present, Diesel /// will deserialize the corresponding field into `SomeType`, rather than the /// actual field type on your struct and then call `.into` to convert it to the /// actual field type. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]`, to specify that this type contains /// columns for the specified table. The path is relative to the current module. /// If no field attributes are specified the derive will use the sql type of /// the corresponding column. /// /// ## Optional field attributes /// /// * `#[diesel(column_name = some_column)]`, overrides the column name for /// a given field. If not set, the name of the field is used as column /// name. This attribute is required on tuple structs, if /// `#[diesel(table_name = some_table)]` is used, otherwise it's optional. /// * `#[diesel(sql_type = SomeType)]`, assumes `SomeType` as sql type of the /// corresponding field. This attributes has precedence over all other /// variants to specify the sql type. /// * `#[diesel(deserialize_as = Type)]`, instead of deserializing directly /// into the field type, the implementation will deserialize into `Type`. /// Then `Type` is converted via `.into()` into the field type. By default /// this derive will deserialize directly into the field type /// * `#[diesel(embed)]`, specifies that the current field maps not only /// single database column, but is a type that implements /// `QueryableByName` on it's own /// /// # Examples /// /// If we just want to map a query to our struct, we can use `derive`. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use schema::users; /// # use diesel::sql_query; /// # /// #[derive(QueryableByName, PartialEq, Debug)] /// #[diesel(table_name = users)] /// struct User { /// id: i32, /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let connection = &mut establish_connection(); /// let first_user = sql_query("SELECT * FROM users ORDER BY id LIMIT 1") /// .get_result(connection)?; /// let expected = User { id: 1, name: "Sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// If we want to do additional work during deserialization, we can use /// `deserialize_as` to use a different implementation. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use diesel::sql_query; /// # use schema::users; /// # use diesel::backend::{self, Backend}; /// # use diesel::deserialize::{self, FromSql}; /// # /// struct LowercaseString(String); /// /// impl Into for LowercaseString { /// fn into(self) -> String { /// self.0 /// } /// } /// /// impl FromSql for LowercaseString /// where /// DB: Backend, /// String: FromSql, /// { /// fn from_sql(bytes: backend::RawValue) -> deserialize::Result { /// String::from_sql(bytes) /// .map(|s| LowercaseString(s.to_lowercase())) /// } /// } /// /// #[derive(QueryableByName, PartialEq, Debug)] /// #[diesel(table_name = users)] /// struct User { /// id: i32, /// #[diesel(deserialize_as = LowercaseString)] /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let connection = &mut establish_connection(); /// let first_user = sql_query("SELECT * FROM users ORDER BY id LIMIT 1") /// .get_result(connection)?; /// let expected = User { id: 1, name: "sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// The custom derive generates impls similar to the follownig one /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use schema::users; /// # use diesel::sql_query; /// # use diesel::deserialize::{self, QueryableByName, FromSql}; /// # use diesel::row::NamedRow; /// # use diesel::backend::Backend; /// # /// #[derive(PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// impl QueryableByName for User /// where /// DB: Backend, /// i32: FromSql, DB>, /// String: FromSql, DB>, /// { /// fn build<'a>(row: &impl NamedRow<'a, DB>) -> deserialize::Result { /// let id = NamedRow::get::, _>(row, "id")?; /// let name = NamedRow::get::, _>(row, "name")?; /// /// Ok(Self { id, name }) /// } /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let connection = &mut establish_connection(); /// let first_user = sql_query("SELECT * FROM users ORDER BY id LIMIT 1") /// .get_result(connection)?; /// let expected = User { id: 1, name: "Sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(QueryableByName, attributes(diesel, table_name, column_name, sql_type)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(QueryableByName, attributes(diesel)) )] pub fn derive_queryable_by_name(input: TokenStream) -> TokenStream { queryable_by_name::derive(parse_macro_input!(input)).into() } /// Implements `Selectable` /// /// To implement `Selectable` this derive needs to know the corresponding table /// type. By default it uses the `snake_case` type name with an added `s`. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// /// If the name of a field on your struct is different than the column in your /// `table!` declaration, or if you are deriving this trait on a tuple struct, /// you can annotate the field with `#[diesel(column_name = some_column)]`. For tuple /// structs, all fields must have this annotation. /// /// If a field is another struct which implements `Selectable`, /// instead of a column, you can annotate that struct with `#[diesel(embed)]`. /// Then all fields contained by that inner struct are selected as separate tuple. /// Fields from a inner struct can come from a different table, as long as the /// select clause is valid in current query. /// /// The derive enables using the `SelectableHelper::as_select` method to construct /// select clauses, in order to use LoadDsl, you might also check the /// `Queryable` trait and derive. /// /// # Attributes /// /// ## Type attributes /// /// * `#[diesel(table_name = path::to::table)]`, specifies a path to the table for which the /// current type is selectable. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name. /// /// ## Field attributes /// /// * `#[diesel(column_name = some_column)]`, overrides the column name for /// a given field. If not set, the name of the field is used as column /// name. /// * `#[diesel(embed)]`, specifies that the current field maps not only /// single database column, but is a type that implements /// `Selectable` on it's own /// * `#[diesel(select_expression = some_custom_select_expression)]`, overrides /// the entire select expression for the given field. It may be used to select with /// custom tuples, or specify `select_expression = my_table::some_field.is_not_null()`, /// or separate tables... /// It should be used in conjunction with `select_expression_type` (described below) /// * `#[diesel(select_expression_type = the_custom_select_expression_type]`, to be used /// in conjunction with `select_expression` (described above). /// For example: `#[diesel(select_expression_type = dsl::IsNotNull)]` #[proc_macro_error] #[proc_macro_derive(Selectable, attributes(diesel))] pub fn derive_selectable(input: TokenStream) -> TokenStream { selectable::derive(parse_macro_input!(input)).into() } /// Implement necessary traits for adding a new sql type /// /// This trait implements all necessary traits to define a /// new sql type. This is useful for adding support for unsupported /// or custom types on sql side. The sql type will be usable for /// all backends you specified via the attributes listed below. /// /// This derive will implement `NotNull`, `HasSqlType` and `SingleValue`. /// When using this derive macro, /// you need to specify how the type is represented on various backends. /// You don't need to specify every backend, /// only the ones supported by your type. /// /// For PostgreSQL, add `#[diesel(postgres_type(name = "pg_type_name", schema = "pg_schema_name"))]` /// or `#[diesel(postgres_type(oid = "some_oid", array_oid = "some_oid"))]` for /// builtin types. /// For MySQL, specify which variant of `MysqlType` should be used /// by adding `#[diesel(mysql_type(name = "Variant"))]`. /// For SQLite, specify which variant of `SqliteType` should be used /// by adding `#[diesel(sqlite_type(name = "Variant"))]`. /// /// # Attributes /// /// ## Type attributes /// /// * `#[diesel(postgres_type(name = "TypeName", schema = "public"))]` specifies support for /// a postgresql type with the name `TypeName` in the schema `public`. Prefer this variant /// for types with no stable OID (== everything but the builtin types). It's possible to leaf /// of the `schema` part. In that case diesel defaults to the default postgres search path. /// * `#[diesel(postgres_type(oid = 42, array_oid = 142))]`, specifies support for a /// postgresql type with the given `oid` and `array_oid`. This variant /// should only be used with types that have a stable OID. /// * `#[diesel(sqlite_type(name = "TypeName"))]`, specifies support for a sqlite type /// with the given name. `TypeName` needs to be one of the possible values /// in `SqliteType` /// * `#[diesel(mysql_type(name = "TypeName"))]`, specifies support for a mysql type /// with the given name. `TypeName` needs to be one of the possible values /// in `MysqlType` #[proc_macro_error] #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(SqlType, attributes(diesel, postgres, sqlite_type, mysql_type)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(SqlType, attributes(diesel)) )] pub fn derive_sql_type(input: TokenStream) -> TokenStream { sql_type::derive(parse_macro_input!(input)).into() } /// Implements `ValidGrouping` /// /// This trait can be automatically derived for structs with no type parameters /// which are never aggregate, as well as for structs which are `NonAggregate` /// when all type parameters are `NonAggregate`. For example: /// /// ```ignore /// #[derive(ValidGrouping)] /// struct LiteralOne; /// /// #[derive(ValidGrouping)] /// struct Plus(Lhs, Rhs); /// /// // The following impl will be generated: /// /// impl ValidGrouping for LiteralOne { /// type IsAggregate = is_aggregate::Never; /// } /// /// impl ValidGrouping for Plus /// where /// Lhs: ValidGrouping, /// Rhs: ValidGrouping, /// Lhs::IsAggregate: MixedAggregates, /// { /// type IsAggregate = >::Output; /// } /// ``` /// /// For types which are always considered aggregate (such as an aggregate /// function), annotate your struct with `#[diesel(aggregate)]` to set `IsAggregate` /// explicitly to `is_aggregate::Yes`. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(aggregate)]` for cases where the type represents an aggregating /// SQL expression #[proc_macro_error] #[proc_macro_derive(ValidGrouping, attributes(diesel))] pub fn derive_valid_grouping(input: TokenStream) -> TokenStream { valid_grouping::derive(parse_macro_input!(input)).into() } /// Declare a sql function for use in your code. /// /// Diesel only provides support for a very small number of SQL functions. /// This macro enables you to add additional functions from the SQL standard, /// as well as any custom functions your application might have. /// /// The syntax for this macro is very similar to that of a normal Rust function, /// except the argument and return types will be the SQL types being used. /// Typically these types will come from [`diesel::sql_types`](../diesel/sql_types/index.html) /// /// This macro will generate two items. A function with the name that you've /// given, and a module with a helper type representing the return type of your /// function. For example, this invocation: /// /// ```ignore /// sql_function!(fn lower(x: Text) -> Text); /// ``` /// /// will generate this code: /// /// ```ignore /// pub fn lower(x: X) -> lower::HelperType { /// ... /// } /// /// pub(crate) mod lower { /// pub type HelperType = ...; /// } /// ``` /// /// If you are using this macro for part of a library, where the function is /// part of your public API, it is highly recommended that you re-export this /// helper type with the same name as your function. This is the standard /// structure: /// /// ```ignore /// pub mod functions { /// use super::types::*; /// use diesel::sql_types::*; /// /// sql_function! { /// /// Represents the Pg `LENGTH` function used with `tsvector`s. /// fn length(x: TsVector) -> Integer; /// } /// } /// /// pub mod helper_types { /// /// The return type of `length(expr)` /// pub type Length = functions::length::HelperType; /// } /// /// pub mod dsl { /// pub use functions::*; /// pub use helper_types::*; /// } /// ``` /// /// Most attributes given to this macro will be put on the generated function /// (including doc comments). /// /// # Adding Doc Comments /// /// ```no_run /// # extern crate diesel; /// # use diesel::*; /// # /// # table! { crates { id -> Integer, name -> VarChar, } } /// # /// use diesel::sql_types::Text; /// /// sql_function! { /// /// Represents the `canon_crate_name` SQL function, created in /// /// migration .... /// fn canon_crate_name(a: Text) -> Text; /// } /// /// # fn main() { /// # use self::crates::dsl::*; /// let target_name = "diesel"; /// crates.filter(canon_crate_name(name).eq(canon_crate_name(target_name))); /// // This will generate the following SQL /// // SELECT * FROM crates WHERE canon_crate_name(crates.name) = canon_crate_name($1) /// # } /// ``` /// /// # Special Attributes /// /// There are a handful of special attributes that Diesel will recognize. They /// are: /// /// - `#[aggregate]` /// - Indicates that this is an aggregate function, and that `NonAggregate` /// should not be implemented. /// - `#[sql_name = "name"]` /// - The SQL to be generated is different than the Rust name of the function. /// This can be used to represent functions which can take many argument /// types, or to capitalize function names. /// /// Functions can also be generic. Take the definition of `sum` for an example /// of all of this: /// /// ```no_run /// # extern crate diesel; /// # use diesel::*; /// # /// # table! { crates { id -> Integer, name -> VarChar, } } /// # /// use diesel::sql_types::Foldable; /// /// sql_function! { /// #[aggregate] /// #[sql_name = "SUM"] /// fn sum(expr: ST) -> ST::Sum; /// } /// /// # fn main() { /// # use self::crates::dsl::*; /// crates.select(sum(id)); /// # } /// ``` /// /// # SQL Functions without Arguments /// /// A common example is ordering a query using the `RANDOM()` sql function, /// which can be implemented using `sql_function!` like this: /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # table! { crates { id -> Integer, name -> VarChar, } } /// # /// sql_function!(fn random() -> Text); /// /// # fn main() { /// # use self::crates::dsl::*; /// crates.order(random()); /// # } /// ``` /// /// # Use with SQLite /// /// On most backends, the implementation of the function is defined in a /// migration using `CREATE FUNCTION`. On SQLite, the function is implemented in /// Rust instead. You must call `register_impl` or /// `register_nondeterministic_impl` with every connection before you can use /// the function. /// /// These functions will only be generated if the `sqlite` feature is enabled, /// and the function is not generic. Generic functions and variadic functions /// are not supported on SQLite. /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # #[cfg(feature = "sqlite")] /// # fn main() { /// # run_test().unwrap(); /// # } /// # /// # #[cfg(not(feature = "sqlite"))] /// # fn main() { /// # } /// # /// use diesel::sql_types::{Integer, Double}; /// sql_function!(fn add_mul(x: Integer, y: Integer, z: Double) -> Double); /// /// # #[cfg(feature = "sqlite")] /// # fn run_test() -> Result<(), Box<::std::error::Error>> { /// let connection = &mut SqliteConnection::establish(":memory:")?; /// /// add_mul::register_impl(connection, |x: i32, y: i32, z: f64| { /// (x + y) as f64 * z /// })?; /// /// let result = select(add_mul(1, 2, 1.5)) /// .get_result::(connection)?; /// assert_eq!(4.5, result); /// # Ok(()) /// # } /// ``` /// /// ## Panics /// /// If an implementation of the custom function panics and unwinding is enabled, the panic is /// caught and the function returns to libsqlite with an error. It cannot propagate the panics due /// to the FFI bounary. /// /// This is is the same for [custom aggregate functions](#custom-aggregate-functions). /// /// ## Custom Aggregate Functions /// /// Custom aggregate functions can be created in SQLite by adding an `#[aggregate]` /// attribute inside of `sql_function`. `register_impl` needs to be called on /// the generated function with a type implementing the /// [SqliteAggregateFunction](../diesel/sqlite/trait.SqliteAggregateFunction.html) /// trait as a type parameter as shown in the examples below. /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # #[cfg(feature = "sqlite")] /// # fn main() { /// # run().unwrap(); /// # } /// # /// # #[cfg(not(feature = "sqlite"))] /// # fn main() { /// # } /// use diesel::sql_types::Integer; /// # #[cfg(feature = "sqlite")] /// use diesel::sqlite::SqliteAggregateFunction; /// /// sql_function! { /// #[aggregate] /// fn my_sum(x: Integer) -> Integer; /// } /// /// #[derive(Default)] /// struct MySum { sum: i32 } /// /// # #[cfg(feature = "sqlite")] /// impl SqliteAggregateFunction for MySum { /// type Output = i32; /// /// fn step(&mut self, expr: i32) { /// self.sum += expr; /// } /// /// fn finalize(aggregator: Option) -> Self::Output { /// aggregator.map(|a| a.sum).unwrap_or_default() /// } /// } /// # table! { /// # players { /// # id -> Integer, /// # score -> Integer, /// # } /// # } /// /// # #[cfg(feature = "sqlite")] /// fn run() -> Result<(), Box> { /// # use self::players::dsl::*; /// let connection = &mut SqliteConnection::establish(":memory:")?; /// # diesel::sql_query("create table players (id integer primary key autoincrement, score integer)") /// # .execute(connection) /// # .unwrap(); /// # diesel::sql_query("insert into players (score) values (10), (20), (30)") /// # .execute(connection) /// # .unwrap(); /// /// my_sum::register_impl::(connection)?; /// /// let total_score = players.select(my_sum(score)) /// .get_result::(connection)?; /// /// println!("The total score of all the players is: {}", total_score); /// /// # assert_eq!(60, total_score); /// Ok(()) /// } /// ``` /// /// With multiple function arguments the arguments are passed as a tuple to `SqliteAggregateFunction` /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # #[cfg(feature = "sqlite")] /// # fn main() { /// # run().unwrap(); /// # } /// # /// # #[cfg(not(feature = "sqlite"))] /// # fn main() { /// # } /// use diesel::sql_types::{Float, Nullable}; /// # #[cfg(feature = "sqlite")] /// use diesel::sqlite::SqliteAggregateFunction; /// /// sql_function! { /// #[aggregate] /// fn range_max(x0: Float, x1: Float) -> Nullable; /// } /// /// #[derive(Default)] /// struct RangeMax { max_value: Option } /// /// # #[cfg(feature = "sqlite")] /// impl SqliteAggregateFunction<(T, T)> for RangeMax { /// type Output = Option; /// /// fn step(&mut self, (x0, x1): (T, T)) { /// # let max = if x0 >= x1 { /// # x0 /// # } else { /// # x1 /// # }; /// # /// # self.max_value = match self.max_value { /// # Some(current_max_value) if max > current_max_value => Some(max), /// # None => Some(max), /// # _ => self.max_value, /// # }; /// // Compare self.max_value to x0 and x1 /// } /// /// fn finalize(aggregator: Option) -> Self::Output { /// aggregator?.max_value /// } /// } /// # table! { /// # student_avgs { /// # id -> Integer, /// # s1_avg -> Float, /// # s2_avg -> Float, /// # } /// # } /// /// # #[cfg(feature = "sqlite")] /// fn run() -> Result<(), Box> { /// # use self::student_avgs::dsl::*; /// let connection = &mut SqliteConnection::establish(":memory:")?; /// # diesel::sql_query("create table student_avgs (id integer primary key autoincrement, s1_avg float, s2_avg float)") /// # .execute(connection) /// # .unwrap(); /// # diesel::sql_query("insert into student_avgs (s1_avg, s2_avg) values (85.5, 90), (79.8, 80.1)") /// # .execute(connection) /// # .unwrap(); /// /// range_max::register_impl::, _, _>(connection)?; /// /// let result = student_avgs.select(range_max(s1_avg, s2_avg)) /// .get_result::>(connection)?; /// /// if let Some(max_semeseter_avg) = result { /// println!("The largest semester average is: {}", max_semeseter_avg); /// } /// /// # assert_eq!(Some(90f32), result); /// Ok(()) /// } /// ``` #[proc_macro] pub fn sql_function_proc(input: TokenStream) -> TokenStream { sql_function::expand(parse_macro_input!(input)).into() } /// This is an internal diesel macro that /// helps to implement all traits for tuples of /// various sizes #[doc(hidden)] #[proc_macro] pub fn __diesel_for_each_tuple(input: TokenStream) -> TokenStream { diesel_for_each_tuple::expand(parse_macro_input!(input)).into() } /// This is an internal diesel macro that /// helps to restrict the visibility of an item based /// on a feature flag #[doc(hidden)] #[proc_macro_attribute] pub fn __diesel_public_if(attrs: TokenStream, input: TokenStream) -> TokenStream { diesel_public_if::expand(parse_macro_input!(attrs), parse_macro_input!(input)).into() } diesel_derives-2.0.2/src/model.rs000064400000000000000000000124311046102023000150510ustar 00000000000000use proc_macro2::Span; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{ Data, DataStruct, DeriveInput, Field as SynField, Fields, FieldsNamed, FieldsUnnamed, Ident, LitBool, Path, Type, }; use attrs::{parse_attributes, StructAttr}; use field::Field; use parsers::{BelongsTo, MysqlType, PostgresType, SqliteType}; use util::camel_to_snake; pub struct Model { name: Path, table_name: Option, pub primary_key_names: Vec, treat_none_as_default_value: Option, treat_none_as_null: Option, pub belongs_to: Vec, pub sql_types: Vec, pub aggregate: bool, pub not_sized: bool, pub foreign_derive: bool, pub mysql_type: Option, pub sqlite_type: Option, pub postgres_type: Option, fields: Vec, } impl Model { pub fn from_item(item: &DeriveInput, allow_unit_structs: bool) -> Self { let DeriveInput { data, ident, attrs, .. } = item; let fields = match *data { Data::Struct(DataStruct { fields: Fields::Named(FieldsNamed { ref named, .. }), .. }) => Some(named), Data::Struct(DataStruct { fields: Fields::Unnamed(FieldsUnnamed { ref unnamed, .. }), .. }) => Some(unnamed), _ if !allow_unit_structs => { abort_call_site!("This derive can only be used on non-unit structs") } _ => None, }; let mut table_name = None; let mut primary_key_names = vec![Ident::new("id", Span::call_site())]; let mut treat_none_as_default_value = None; let mut treat_none_as_null = None; let mut belongs_to = vec![]; let mut sql_types = vec![]; let mut aggregate = false; let mut not_sized = false; let mut foreign_derive = false; let mut mysql_type = None; let mut sqlite_type = None; let mut postgres_type = None; for attr in parse_attributes(attrs) { match attr.item { StructAttr::SqlType(_, value) => sql_types.push(Type::Path(value)), StructAttr::TableName(_, value) => table_name = Some(value), StructAttr::PrimaryKey(_, keys) => { primary_key_names = keys.into_iter().collect(); } StructAttr::TreatNoneAsDefaultValue(_, val) => { treat_none_as_default_value = Some(val) } StructAttr::TreatNoneAsNull(_, val) => treat_none_as_null = Some(val), StructAttr::BelongsTo(_, val) => belongs_to.push(val), StructAttr::Aggregate(_) => aggregate = true, StructAttr::NotSized(_) => not_sized = true, StructAttr::ForeignDerive(_) => foreign_derive = true, StructAttr::MysqlType(_, val) => mysql_type = Some(val), StructAttr::SqliteType(_, val) => sqlite_type = Some(val), StructAttr::PostgresType(_, val) => postgres_type = Some(val), } } let name = Ident::new(&infer_table_name(&ident.to_string()), ident.span()).into(); Self { name, table_name, primary_key_names, treat_none_as_default_value, treat_none_as_null, belongs_to, sql_types, aggregate, not_sized, foreign_derive, mysql_type, sqlite_type, postgres_type, fields: fields_from_item_data(fields), } } pub fn table_name(&self) -> &Path { self.table_name.as_ref().unwrap_or(&self.name) } pub fn fields(&self) -> &[Field] { &self.fields } pub fn find_column(&self, column_name: &Ident) -> &Field { self.fields() .iter() .find(|f| f.column_name() == *column_name) .unwrap_or_else(|| abort!(column_name, "No field with column name {}", column_name)) } pub fn has_table_name_attribute(&self) -> bool { self.table_name.is_some() } pub fn treat_none_as_default_value(&self) -> bool { self.treat_none_as_default_value .as_ref() .map(|v| v.value()) .unwrap_or(true) } pub fn treat_none_as_null(&self) -> bool { self.treat_none_as_null .as_ref() .map(|v| v.value()) .unwrap_or(false) } } fn fields_from_item_data(fields: Option<&Punctuated>) -> Vec { fields .map(|fields| { fields .iter() .enumerate() .map(|(i, f)| Field::from_struct_field(f, i)) .collect::>() }) .unwrap_or_default() } pub fn infer_table_name(name: &str) -> String { let mut result = camel_to_snake(name); result.push('s'); result } #[test] fn infer_table_name_pluralizes_and_downcases() { assert_eq!("foos", &infer_table_name("Foo")); assert_eq!("bars", &infer_table_name("Bar")); } #[test] fn infer_table_name_properly_handles_underscores() { assert_eq!("foo_bars", &infer_table_name("FooBar")); assert_eq!("foo_bar_bazs", &infer_table_name("FooBarBaz")); } diesel_derives-2.0.2/src/parsers/belongs_to.rs000064400000000000000000000022541046102023000175650ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, TypePath}; use util::{parse_eq, unknown_attribute, BELONGS_TO_NOTE}; enum Attr { ForeignKey(Ident, Ident), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "foreign_key" => Ok(Attr::ForeignKey(name, parse_eq(input, BELONGS_TO_NOTE)?)), _ => unknown_attribute(&name, &["foreign_key"]), } } } pub struct BelongsTo { pub parent: TypePath, pub foreign_key: Option, } impl Parse for BelongsTo { fn parse(input: ParseStream) -> Result { let parent = input.parse()?; if !input.is_empty() { input.parse::()?; } let mut foreign_key = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::ForeignKey(_, value) => foreign_key = Some(value), } } Ok(BelongsTo { parent, foreign_key, }) } } diesel_derives-2.0.2/src/parsers/mod.rs000064400000000000000000000003411046102023000162040ustar 00000000000000mod belongs_to; mod mysql_type; mod postgres_type; mod sqlite_type; pub use self::belongs_to::BelongsTo; pub use self::mysql_type::MysqlType; pub use self::postgres_type::PostgresType; pub use self::sqlite_type::SqliteType; diesel_derives-2.0.2/src/parsers/mysql_type.rs000064400000000000000000000022461046102023000176410ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, LitStr}; use util::{parse_eq, unknown_attribute, MYSQL_TYPE_NOTE}; enum Attr { Name(Ident, LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "name" => Ok(Attr::Name(name, parse_eq(input, MYSQL_TYPE_NOTE)?)), _ => unknown_attribute(&name, &["name"]), } } } pub struct MysqlType { pub name: LitStr, } impl Parse for MysqlType { fn parse(input: ParseStream) -> Result { let mut name = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::Name(_, value) => name = Some(value), } } if let Some(name) = name { Ok(MysqlType { name }) } else { abort!( input.span(), "expected attribute `name`"; help = "The correct format looks like #[diesel({})]", MYSQL_TYPE_NOTE ); } } } diesel_derives-2.0.2/src/parsers/postgres_type.rs000064400000000000000000000065761046102023000203540ustar 00000000000000use proc_macro_error::abort; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, LitInt, LitStr}; use util::{parse_eq, unknown_attribute, POSTGRES_TYPE_NOTE, POSTGRES_TYPE_NOTE_ID}; enum Attr { Oid(Ident, LitInt), ArrayOid(Ident, LitInt), Name(Ident, LitStr), Schema(Ident, LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "oid" => Ok(Attr::Oid(name, parse_eq(input, POSTGRES_TYPE_NOTE_ID)?)), "array_oid" => Ok(Attr::ArrayOid( name, parse_eq(input, POSTGRES_TYPE_NOTE_ID)?, )), "name" => Ok(Attr::Name(name, parse_eq(input, POSTGRES_TYPE_NOTE)?)), "schema" => Ok(Attr::Schema(name, parse_eq(input, POSTGRES_TYPE_NOTE)?)), _ => unknown_attribute(&name, &["oid", "array_oid", "name", "schema"]), } } } pub enum PostgresType { Fixed(LitInt, LitInt), Lookup(LitStr, Option), } impl Parse for PostgresType { fn parse(input: ParseStream) -> Result { let mut oid = None; let mut array_oid = None; let mut name = None; let mut schema = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::Oid(ident, value) => oid = Some((ident, value)), Attr::ArrayOid(ident, value) => array_oid = Some((ident, value)), Attr::Name(ident, value) => name = Some((ident, value)), Attr::Schema(ident, value) => schema = Some((ident, value)), } } Self::validate_and_build(input, oid, array_oid, name, schema) } } impl PostgresType { pub fn validate_and_build( input: ParseStream, oid: Option<(Ident, LitInt)>, array_oid: Option<(Ident, LitInt)>, name: Option<(Ident, LitStr)>, schema: Option<(Ident, LitStr)>, ) -> Result { let help = format!( "The correct format looks like either `#[diesel({})]` or `#[diesel({})]`", POSTGRES_TYPE_NOTE, POSTGRES_TYPE_NOTE_ID ); if let Some((_, name)) = name { if let Some((oid, _)) = oid { abort!( oid, "unexpected `oid` when `name` is present"; help = "{}", help ); } else if let Some((array_oid, _)) = array_oid { abort!( array_oid, "unexpected `array_oid` when `name` is present"; help = "{}", help ); } Ok(PostgresType::Lookup(name, schema.map(|s| s.1))) } else if let Some((schema, lit)) = schema { abort!( schema, "expected `name` to be also present"; help = "make sure `name` is present, `#[diesel(postgres_type(name = \"...\", schema = \"{}\"))]`", lit.value() ); } else if let (Some((_, oid)), Some((_, array_oid))) = (oid, array_oid) { Ok(PostgresType::Fixed(oid, array_oid)) } else { abort!( input.span(), "expected `oid` and `array_oid` attribute or `name` attribute"; help = "{}", help ); } } } diesel_derives-2.0.2/src/parsers/sqlite_type.rs000064400000000000000000000022541046102023000177740ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, LitStr}; use util::{parse_eq, unknown_attribute, SQLITE_TYPE_NOTE}; enum Attr { Name(Ident, LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "name" => Ok(Attr::Name(name, parse_eq(input, SQLITE_TYPE_NOTE)?)), _ => unknown_attribute(&name, &["name"]), } } } pub struct SqliteType { pub name: LitStr, } impl Parse for SqliteType { fn parse(input: ParseStream) -> Result { let mut name = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::Name(_, value) => name = Some(value), } } if let Some(name) = name { Ok(SqliteType { name }) } else { abort!( input.span(), "expected attribute `name`"; help = "The correct format looks like #[diesel({})]", SQLITE_TYPE_NOTE ); } } } diesel_derives-2.0.2/src/query_id.rs000064400000000000000000000022711046102023000155730ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use util::wrap_in_dummy_mod; pub fn derive(mut item: DeriveInput) -> TokenStream { for ty_param in item.generics.type_params_mut() { ty_param.bounds.push(parse_quote!(QueryId)); } let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let struct_name = &item.ident; let lifetimes = item.generics.lifetimes(); let ty_params = item .generics .type_params() .map(|ty_param| &ty_param.ident) .collect::>(); let query_id_ty_params = ty_params .iter() .map(|ty_param| quote!(<#ty_param as QueryId>::QueryId)); let has_static_query_id = ty_params .iter() .map(|ty_param| quote!(<#ty_param as QueryId>::HAS_STATIC_QUERY_ID)); wrap_in_dummy_mod(quote! { use diesel::query_builder::QueryId; #[allow(non_camel_case_types)] impl #impl_generics QueryId for #struct_name #ty_generics #where_clause { type QueryId = #struct_name<#(#lifetimes,)* #(#query_id_ty_params,)*>; const HAS_STATIC_QUERY_ID: bool = #(#has_static_query_id &&)* true; } }) } diesel_derives-2.0.2/src/queryable.rs000064400000000000000000000037311046102023000157450ustar 00000000000000use proc_macro2::{Span, TokenStream}; use syn::{DeriveInput, Ident, Index}; use field::Field; use model::Model; use util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); let struct_name = &item.ident; let field_ty = &model .fields() .iter() .map(Field::ty_for_deserialize) .collect::>(); let build_expr = model.fields().iter().enumerate().map(|(i, f)| { let field_name = &f.name; let i = Index::from(i); quote!(#field_name: row.#i.try_into()?) }); let sql_type = &(0..model.fields().len()) .map(|i| { let i = Ident::new(&format!("__ST{}", i), Span::call_site()); quote!(#i) }) .collect::>(); let (_, ty_generics, _) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics .params .push(parse_quote!(__DB: diesel::backend::Backend)); for id in 0..model.fields().len() { let ident = Ident::new(&format!("__ST{}", id), Span::call_site()); generics.params.push(parse_quote!(#ident)); } { let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); where_clause .predicates .push(parse_quote!((#(#field_ty,)*): FromStaticSqlRow<(#(#sql_type,)*), __DB>)); } let (impl_generics, _, where_clause) = generics.split_for_impl(); wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, FromStaticSqlRow, Queryable}; use diesel::row::{Row, Field}; use std::convert::TryInto; impl #impl_generics Queryable<(#(#sql_type,)*), __DB> for #struct_name #ty_generics #where_clause { type Row = (#(#field_ty,)*); fn build(row: Self::Row) -> deserialize::Result { Ok(Self { #(#build_expr,)* }) } } }) } diesel_derives-2.0.2/src/queryable_by_name.rs000064400000000000000000000071541046102023000174420ustar 00000000000000use proc_macro2::{Span, TokenStream}; use syn::{DeriveInput, Ident, LitStr, Type}; use attrs::AttributeSpanWrapper; use field::{Field, FieldName}; use model::Model; use util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); let struct_name = &item.ident; let fields = &model.fields().iter().map(get_ident).collect::>(); let field_names = model.fields().iter().map(|f| &f.name); let initial_field_expr = model.fields().iter().map(|f| { let field_ty = &f.ty; if f.embed() { quote!(<#field_ty as QueryableByName<__DB>>::build(row)?) } else { let deserialize_ty = f.ty_for_deserialize(); let name = f.column_name(); let name = LitStr::new(&name.to_string(), name.span()); quote!( { let field = diesel::row::NamedRow::get(row, #name)?; <#deserialize_ty as Into<#field_ty>>::into(field) } ) } }); let (_, ty_generics, ..) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics .params .push(parse_quote!(__DB: diesel::backend::Backend)); for field in model.fields() { let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); let field_ty = field.ty_for_deserialize(); if field.embed() { where_clause .predicates .push(parse_quote!(#field_ty: QueryableByName<__DB>)); } else { let st = sql_type(field, &model); where_clause .predicates .push(parse_quote!(#field_ty: diesel::deserialize::FromSql<#st, __DB>)); } } let (impl_generics, _, where_clause) = generics.split_for_impl(); wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, QueryableByName}; use diesel::row::{NamedRow}; use diesel::sql_types::Untyped; impl #impl_generics QueryableByName<__DB> for #struct_name #ty_generics #where_clause { fn build<'__a>(row: &impl NamedRow<'__a, __DB>) -> deserialize::Result { #( let mut #fields = #initial_field_expr; )* deserialize::Result::Ok(Self { #( #field_names: #fields, )* }) } } }) } fn get_ident(field: &Field) -> Ident { match &field.name { FieldName::Named(n) => n.clone(), FieldName::Unnamed(i) => Ident::new(&format!("field_{}", i.index), i.span), } } fn sql_type(field: &Field, model: &Model) -> Type { let table_name = model.table_name(); match field.sql_type { Some(AttributeSpanWrapper { item: ref st, .. }) => st.clone(), None => { if model.has_table_name_attribute() { let column_name = field.column_name(); parse_quote!(diesel::dsl::SqlTypeOf<#table_name::#column_name>) } else { let field_name = match field.name { FieldName::Named(ref x) => x.clone(), _ => Ident::new("field", Span::call_site()), }; abort!( field.span, "Cannot determine the SQL type of {}", field_name; help = "Your struct must either be annotated with `#[diesel(table_name = foo)]` or have this field annotated with `#[diesel(sql_type = ...)]`"; ); } } } } diesel_derives-2.0.2/src/selectable.rs000064400000000000000000000046551046102023000160650ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use field::Field; use model::Model; use util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, false); let (_, ty_generics, _) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics .params .push(parse_quote!(__DB: diesel::backend::Backend)); for embed_field in model.fields().iter().filter(|f| f.embed()) { let embed_ty = &embed_field.ty; generics .where_clause .get_or_insert_with(|| parse_quote!(where)) .predicates .push(parse_quote!(#embed_ty: Selectable<__DB>)); } let (impl_generics, _, where_clause) = generics.split_for_impl(); let struct_name = &item.ident; let field_columns_ty = model.fields().iter().map(|f| field_column_ty(f, &model)); let field_columns_inst = model.fields().iter().map(|f| field_column_inst(f, &model)); wrap_in_dummy_mod(quote! { use diesel::expression::Selectable; impl #impl_generics Selectable<__DB> for #struct_name #ty_generics #where_clause { type SelectExpression = (#(#field_columns_ty,)*); fn construct_selection() -> Self::SelectExpression { (#(#field_columns_inst,)*) } } }) } fn field_column_ty(field: &Field, model: &Model) -> TokenStream { if let Some(ref select_expression_type) = field.select_expression_type { let ty = &select_expression_type.item; quote!(#ty) } else if field.embed() { let embed_ty = &field.ty; quote!(<#embed_ty as Selectable<__DB>>::SelectExpression) } else { let table_name = model.table_name(); let column_name = field.column_name(); quote!(#table_name::#column_name) } } fn field_column_inst(field: &Field, model: &Model) -> TokenStream { use syn::spanned::Spanned; if let Some(ref select_expression) = field.select_expression { let expr = &select_expression.item; let span = expr.span(); quote::quote_spanned!(span => #expr) } else if field.embed() { let embed_ty = &field.ty; quote!(<#embed_ty as Selectable<__DB>>::construct_selection()) } else { let table_name = model.table_name(); let column_name = field.column_name(); quote!(#table_name::#column_name) } } diesel_derives-2.0.2/src/sql_function.rs000064400000000000000000000447131046102023000164650ustar 00000000000000use proc_macro2::TokenStream; use quote::ToTokens; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::{ Attribute, GenericArgument, Generics, Ident, Lit, Meta, MetaNameValue, PathArguments, Type, }; pub(crate) fn expand(input: SqlFunctionDecl) -> TokenStream { let SqlFunctionDecl { mut attributes, fn_token, fn_name, mut generics, args, return_type, } = input; let sql_name = attributes .iter() .find(|attr| { attr.parse_meta() .map(|m| m.path().is_ident("sql_name")) .unwrap_or(false) }) .and_then(|attr| { if let Ok(Meta::NameValue(MetaNameValue { lit: Lit::Str(lit), .. })) = attr.parse_meta() { Some(lit.value()) } else { None } }) .unwrap_or_else(|| fn_name.to_string()); let is_aggregate = attributes.iter().any(|attr| { attr.parse_meta() .map(|m| m.path().is_ident("aggregate")) .unwrap_or(false) }); attributes.retain(|attr| { attr.parse_meta() .map(|m| !m.path().is_ident("sql_name") && !m.path().is_ident("aggregate")) .unwrap_or(true) }); let args = &args; let (ref arg_name, ref arg_type): (Vec<_>, Vec<_>) = args .iter() .map(|StrictFnArg { name, ty, .. }| (name, ty)) .unzip(); let arg_struct_assign = args.iter().map( |StrictFnArg { name, colon_token, .. }| { let name2 = name.clone(); quote!(#name #colon_token #name2.as_expression()) }, ); let type_args = &generics .type_params() .map(|type_param| type_param.ident.clone()) .collect::>(); for StrictFnArg { name, .. } in args { generics.params.push(parse_quote!(#name)); } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); // Even if we force an empty where clause, it still won't print the where // token with no bounds. let where_clause = where_clause .map(|w| quote!(#w)) .unwrap_or_else(|| quote!(where)); let mut generics_with_internal = generics.clone(); generics_with_internal .params .push(parse_quote!(__DieselInternal)); let (impl_generics_internal, _, _) = generics_with_internal.split_for_impl(); let sql_type; let numeric_derive; if arg_name.is_empty() { sql_type = None; // FIXME: We can always derive once trivial bounds are stable numeric_derive = None; } else { sql_type = Some(quote!((#(#arg_name),*): Expression,)); numeric_derive = Some(quote!(#[derive(diesel::sql_types::DieselNumericOps)])); } let args_iter = args.iter(); let mut tokens = quote! { use diesel::{self, QueryResult}; use diesel::expression::{AsExpression, Expression, SelectableExpression, AppearsOnTable, ValidGrouping}; use diesel::query_builder::{QueryFragment, AstPass}; use diesel::sql_types::*; use super::*; #[derive(Debug, Clone, Copy, diesel::query_builder::QueryId)] #numeric_derive pub struct #fn_name #ty_generics { #(pub(in super) #args_iter,)* #(pub(in super) #type_args: ::std::marker::PhantomData<#type_args>,)* } pub type HelperType #ty_generics = #fn_name < #(#type_args,)* #(<#arg_name as AsExpression<#arg_type>>::Expression,)* >; impl #impl_generics Expression for #fn_name #ty_generics #where_clause #sql_type { type SqlType = #return_type; } // __DieselInternal is what we call QS normally impl #impl_generics_internal SelectableExpression<__DieselInternal> for #fn_name #ty_generics #where_clause #(#arg_name: SelectableExpression<__DieselInternal>,)* Self: AppearsOnTable<__DieselInternal>, { } // __DieselInternal is what we call QS normally impl #impl_generics_internal AppearsOnTable<__DieselInternal> for #fn_name #ty_generics #where_clause #(#arg_name: AppearsOnTable<__DieselInternal>,)* Self: Expression, { } // __DieselInternal is what we call DB normally impl #impl_generics_internal QueryFragment<__DieselInternal> for #fn_name #ty_generics where __DieselInternal: diesel::backend::Backend, #(#arg_name: QueryFragment<__DieselInternal>,)* { #[allow(unused_assignments)] fn walk_ast<'__b>(&'__b self, mut out: AstPass<'_, '__b, __DieselInternal>) -> QueryResult<()>{ out.push_sql(concat!(#sql_name, "(")); // we unroll the arguments manually here, to prevent borrow check issues let mut needs_comma = false; #( if !self.#arg_name.is_noop(out.backend())? { if needs_comma { out.push_sql(", "); } self.#arg_name.walk_ast(out.reborrow())?; needs_comma = true; } )* out.push_sql(")"); Ok(()) } } }; let is_supported_on_sqlite = cfg!(feature = "sqlite") && type_args.is_empty() && is_sqlite_type(&return_type) && arg_type.iter().all(|a| is_sqlite_type(a)); if is_aggregate { tokens = quote! { #tokens impl #impl_generics_internal ValidGrouping<__DieselInternal> for #fn_name #ty_generics { type IsAggregate = diesel::expression::is_aggregate::Yes; } }; if is_supported_on_sqlite { tokens = quote! { #tokens use diesel::sqlite::{Sqlite, SqliteConnection}; use diesel::serialize::ToSql; use diesel::deserialize::{FromSqlRow, StaticallySizedRow}; use diesel::sqlite::SqliteAggregateFunction; use diesel::sql_types::IntoNullable; }; match arg_name.len() { x if x > 1 => { tokens = quote! { #tokens #[allow(dead_code)] /// Registers an implementation for this aggregate function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). pub fn register_impl( conn: &mut SqliteConnection ) -> QueryResult<()> where A: SqliteAggregateFunction<(#(#arg_name,)*)> + Send + 'static + ::std::panic::UnwindSafe + ::std::panic::RefUnwindSafe, A::Output: ToSql<#return_type, Sqlite>, (#(#arg_name,)*): FromSqlRow<(#(#arg_type,)*), Sqlite> + StaticallySizedRow<(#(#arg_type,)*), Sqlite> + ::std::panic::UnwindSafe, { conn.register_aggregate_function::<(#(#arg_type,)*), #return_type, _, _, A>(#sql_name) } }; } x if x == 1 => { let arg_name = arg_name[0]; let arg_type = arg_type[0]; tokens = quote! { #tokens #[allow(dead_code)] /// Registers an implementation for this aggregate function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). pub fn register_impl( conn: &mut SqliteConnection ) -> QueryResult<()> where A: SqliteAggregateFunction<#arg_name> + Send + 'static + std::panic::UnwindSafe + std::panic::RefUnwindSafe, A::Output: ToSql<#return_type, Sqlite>, #arg_name: FromSqlRow<#arg_type, Sqlite> + StaticallySizedRow<#arg_type, Sqlite> + ::std::panic::UnwindSafe, { conn.register_aggregate_function::<#arg_type, #return_type, _, _, A>(#sql_name) } }; } _ => (), } } } else { tokens = quote! { #tokens #[derive(ValidGrouping)] pub struct __Derived<#(#arg_name,)*>(#(#arg_name,)*); impl #impl_generics_internal ValidGrouping<__DieselInternal> for #fn_name #ty_generics where __Derived<#(#arg_name,)*>: ValidGrouping<__DieselInternal>, { type IsAggregate = <__Derived<#(#arg_name,)*> as ValidGrouping<__DieselInternal>>::IsAggregate; } }; if is_supported_on_sqlite && !arg_name.is_empty() { tokens = quote! { #tokens use diesel::sqlite::{Sqlite, SqliteConnection}; use diesel::serialize::ToSql; use diesel::deserialize::{FromSqlRow, StaticallySizedRow}; #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). If /// the function is nondeterministic, call /// `register_nondeterministic_impl` instead. pub fn register_impl( conn: &mut SqliteConnection, f: F, ) -> QueryResult<()> where F: Fn(#(#arg_name,)*) -> Ret + std::panic::UnwindSafe + Send + 'static, (#(#arg_name,)*): FromSqlRow<(#(#arg_type,)*), Sqlite> + StaticallySizedRow<(#(#arg_type,)*), Sqlite>, Ret: ToSql<#return_type, Sqlite>, { conn.register_sql_function::<(#(#arg_type,)*), #return_type, _, _, _>( #sql_name, true, move |(#(#arg_name,)*)| f(#(#arg_name,)*), ) } #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. /// `register_nondeterministic_impl` should only be used if your /// function can return different results with the same arguments (e.g. /// `random`). If your function is deterministic, you should call /// `register_impl` instead. pub fn register_nondeterministic_impl( conn: &mut SqliteConnection, mut f: F, ) -> QueryResult<()> where F: FnMut(#(#arg_name,)*) -> Ret + std::panic::UnwindSafe + Send + 'static, (#(#arg_name,)*): FromSqlRow<(#(#arg_type,)*), Sqlite> + StaticallySizedRow<(#(#arg_type,)*), Sqlite>, Ret: ToSql<#return_type, Sqlite>, { conn.register_sql_function::<(#(#arg_type,)*), #return_type, _, _, _>( #sql_name, false, move |(#(#arg_name,)*)| f(#(#arg_name,)*), ) } }; } if is_supported_on_sqlite && arg_name.is_empty() { tokens = quote! { #tokens use diesel::sqlite::{Sqlite, SqliteConnection}; use diesel::serialize::ToSql; #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). If /// the function is nondeterministic, call /// `register_nondeterministic_impl` instead. pub fn register_impl( conn: &SqliteConnection, f: F, ) -> QueryResult<()> where F: Fn() -> Ret + std::panic::UnwindSafe + Send + 'static, Ret: ToSql<#return_type, Sqlite>, { conn.register_noarg_sql_function::<#return_type, _, _>( #sql_name, true, f, ) } #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. /// `register_nondeterministic_impl` should only be used if your /// function can return different results with the same arguments (e.g. /// `random`). If your function is deterministic, you should call /// `register_impl` instead. pub fn register_nondeterministic_impl( conn: &SqliteConnection, mut f: F, ) -> QueryResult<()> where F: FnMut() -> Ret + std::panic::UnwindSafe + Send + 'static, Ret: ToSql<#return_type, Sqlite>, { conn.register_noarg_sql_function::<#return_type, _, _>( #sql_name, false, f, ) } }; } } let args_iter = args.iter(); quote! { #(#attributes)* #[allow(non_camel_case_types)] pub #fn_token #fn_name #impl_generics (#(#args_iter,)*) -> #fn_name::HelperType #ty_generics #where_clause #(#arg_name: ::diesel::expression::AsExpression<#arg_type>,)* { #fn_name::#fn_name { #(#arg_struct_assign,)* #(#type_args: ::std::marker::PhantomData,)* } } #[doc(hidden)] #[allow(non_camel_case_types, non_snake_case, unused_imports)] pub(crate) mod #fn_name { #tokens } } } pub(crate) struct SqlFunctionDecl { attributes: Vec, fn_token: Token![fn], fn_name: Ident, generics: Generics, args: Punctuated, return_type: Type, } impl Parse for SqlFunctionDecl { fn parse(input: ParseStream) -> Result { let attributes = Attribute::parse_outer(input)?; let fn_token: Token![fn] = input.parse()?; let fn_name = Ident::parse(input)?; let generics = Generics::parse(input)?; let args; let _paren = parenthesized!(args in input); let args = args.parse_terminated::<_, Token![,]>(StrictFnArg::parse)?; let return_type = if Option::]>::parse(input)?.is_some() { Type::parse(input)? } else { parse_quote!(diesel::expression::expression_types::NotSelectable) }; let _semi = Option::::parse(input)?; Ok(Self { attributes, fn_token, fn_name, generics, args, return_type, }) } } /// Essentially the same as ArgCaptured, but only allowing ident patterns struct StrictFnArg { name: Ident, colon_token: Token![:], ty: Type, } impl Parse for StrictFnArg { fn parse(input: ParseStream) -> Result { let name = input.parse()?; let colon_token = input.parse()?; let ty = input.parse()?; Ok(Self { name, colon_token, ty, }) } } impl ToTokens for StrictFnArg { fn to_tokens(&self, tokens: &mut TokenStream) { self.name.to_tokens(tokens); self.colon_token.to_tokens(tokens); self.name.to_tokens(tokens); } } fn is_sqlite_type(ty: &Type) -> bool { let last_segment = if let Type::Path(tp) = ty { if let Some(segment) = tp.path.segments.last() { segment } else { return false; } } else { return false; }; let ident = last_segment.ident.to_string(); if ident == "Nullable" { if let PathArguments::AngleBracketed(ref ab) = last_segment.arguments { if let Some(GenericArgument::Type(ty)) = ab.args.first() { return is_sqlite_type(ty); } } return false; } [ "BigInt", "Binary", "Bool", "Date", "Double", "Float", "Integer", "Numeric", "SmallInt", "Text", "Time", "Timestamp", ] .contains(&ident.as_str()) } diesel_derives-2.0.2/src/sql_type.rs000064400000000000000000000100521046102023000156060ustar 00000000000000use proc_macro2::{Span, TokenStream}; use syn::{DeriveInput, Ident}; use model::Model; use parsers::PostgresType; use util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, true); let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let sqlite_tokens = sqlite_tokens(&item, &model); let mysql_tokens = mysql_tokens(&item, &model); let pg_tokens = pg_tokens(&item, &model); wrap_in_dummy_mod(quote! { impl #impl_generics diesel::sql_types::SqlType for #struct_name #ty_generics #where_clause { type IsNull = diesel::sql_types::is_nullable::NotNull; } impl #impl_generics diesel::sql_types::SingleValue for #struct_name #ty_generics #where_clause { } #sqlite_tokens #mysql_tokens #pg_tokens }) } fn sqlite_tokens(item: &DeriveInput, model: &Model) -> Option { model .sqlite_type .as_ref() .map(|sqlite_type| Ident::new(&sqlite_type.name.value(), Span::call_site())) .and_then(|ty| { if cfg!(not(feature = "sqlite")) { return None; } let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); Some(quote! { impl #impl_generics diesel::sql_types::HasSqlType<#struct_name #ty_generics> for diesel::sqlite::Sqlite #where_clause { fn metadata(_: &mut ()) -> diesel::sqlite::SqliteType { diesel::sqlite::SqliteType::#ty } } }) }) } fn mysql_tokens(item: &DeriveInput, model: &Model) -> Option { model .mysql_type .as_ref() .map(|mysql_type| Ident::new(&mysql_type.name.value(), Span::call_site())) .and_then(|ty| { if cfg!(not(feature = "mysql")) { return None; } let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); Some(quote! { impl #impl_generics diesel::sql_types::HasSqlType<#struct_name #ty_generics> for diesel::mysql::Mysql #where_clause { fn metadata(_: &mut ()) -> diesel::mysql::MysqlType { diesel::mysql::MysqlType::#ty } } }) }) } fn pg_tokens(item: &DeriveInput, model: &Model) -> Option { model.postgres_type.as_ref().and_then(|ty| { if cfg!(not(feature = "postgres")) { return None; } let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let metadata_fn = match ty { PostgresType::Fixed(oid, array_oid) => quote!( fn metadata(_: &mut Self::MetadataLookup) -> PgTypeMetadata { PgTypeMetadata::new(#oid, #array_oid) } ), PostgresType::Lookup(type_name, Some(type_schema)) => quote!( fn metadata(lookup: &mut Self::MetadataLookup) -> PgTypeMetadata { lookup.lookup_type(#type_name, Some(#type_schema)) } ), PostgresType::Lookup(type_name, None) => quote!( fn metadata(lookup: &mut Self::MetadataLookup) -> PgTypeMetadata { lookup.lookup_type(#type_name, None) } ), }; Some(quote! { use diesel::pg::{PgMetadataLookup, PgTypeMetadata}; impl #impl_generics diesel::sql_types::HasSqlType<#struct_name #ty_generics> for diesel::pg::Pg #where_clause { #metadata_fn } }) }) } diesel_derives-2.0.2/src/util.rs000064400000000000000000000110441046102023000147250ustar 00000000000000use proc_macro2::TokenStream; use syn::parse::{Parse, ParseStream, Result}; use syn::token::Eq; use syn::{parenthesized, Data, DeriveInput, GenericArgument, Ident, Type}; use model::Model; pub const COLUMN_NAME_NOTE: &str = "column_name = foo"; pub const SQL_TYPE_NOTE: &str = "sql_type = Foo"; pub const SERIALIZE_AS_NOTE: &str = "serialize_as = Foo"; pub const DESERIALIZE_AS_NOTE: &str = "deserialize_as = Foo"; pub const TABLE_NAME_NOTE: &str = "table_name = foo"; pub const TREAT_NONE_AS_DEFAULT_VALUE_NOTE: &str = "treat_none_as_default_value = true"; pub const TREAT_NONE_AS_NULL_NOTE: &str = "treat_none_as_null = true"; pub const BELONGS_TO_NOTE: &str = "belongs_to(Foo, foreign_key = foo_id)"; pub const MYSQL_TYPE_NOTE: &str = "mysql_type(name = \"foo\")"; pub const SQLITE_TYPE_NOTE: &str = "sqlite_type(name = \"foo\")"; pub const POSTGRES_TYPE_NOTE: &str = "postgres_type(name = \"foo\", schema = \"public\")"; pub const POSTGRES_TYPE_NOTE_ID: &str = "postgres_type(oid = 37, array_oid = 54)"; pub const SELECT_EXPRESSION_NOTE: &str = "select_expression = schema::table_name::column_name.is_not_null()"; pub const SELECT_EXPRESSION_TYPE_NOTE: &str = "select_expression_type = dsl::IsNotNull"; pub fn unknown_attribute(name: &Ident, valid: &[&str]) -> ! { let prefix = if valid.len() == 1 { "" } else { " one of" }; abort!( name, "unknown attribute, expected{} `{}`", prefix, valid.join("`, `") ) } pub fn parse_eq(input: ParseStream, help: &str) -> Result { if input.is_empty() { abort!( input.span(), "unexpected end of input, expected `=`"; help = "The correct format looks like `#[diesel({})]`", help ); } input.parse::()?; input.parse() } pub fn parse_paren(input: ParseStream, help: &str) -> Result { if input.is_empty() { abort!( input.span(), "unexpected end of input, expected parentheses"; help = "The correct format looks like `#[diesel({})]`", help ); } let content; parenthesized!(content in input); content.parse() } pub fn wrap_in_dummy_mod(item: TokenStream) -> TokenStream { quote! { #[allow(unused_imports)] const _: () = { // This import is not actually redundant. When using diesel_derives // inside of diesel, `diesel` doesn't exist as an extern crate, and // to work around that it contains a private // `mod diesel { pub use super::*; }` that this import will then // refer to. In all other cases, this imports refers to the extern // crate diesel. use diesel; #item }; } } pub fn inner_of_option_ty(ty: &Type) -> &Type { option_ty_arg(ty).unwrap_or(ty) } pub fn is_option_ty(ty: &Type) -> bool { option_ty_arg(ty).is_some() } fn option_ty_arg(ty: &Type) -> Option<&Type> { use syn::PathArguments::AngleBracketed; match *ty { Type::Path(ref ty) => { let last_segment = ty.path.segments.iter().last().unwrap(); match last_segment.arguments { AngleBracketed(ref args) if last_segment.ident == "Option" => { match args.args.iter().last() { Some(&GenericArgument::Type(ref ty)) => Some(ty), _ => None, } } _ => None, } } _ => None, } } pub fn ty_for_foreign_derive(item: &DeriveInput, model: &Model) -> Type { if model.foreign_derive { match item.data { Data::Struct(ref body) => match body.fields.iter().next() { Some(field) => field.ty.clone(), None => abort_call_site!("foreign_derive requires at least one field"), }, _ => abort_call_site!("foreign_derive can only be used with structs"), } } else { let ident = &item.ident; let (_, ty_generics, ..) = item.generics.split_for_impl(); parse_quote!(#ident #ty_generics) } } pub fn camel_to_snake(name: &str) -> String { let mut result = String::with_capacity(name.len()); result.push_str(&name[..1].to_lowercase()); for character in name[1..].chars() { if character.is_uppercase() { result.push('_'); for lowercase in character.to_lowercase() { result.push(lowercase); } } else { result.push(character); } } result } diesel_derives-2.0.2/src/valid_grouping.rs000064400000000000000000000044331046102023000167650ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; use model::Model; use util::{ty_for_foreign_derive, wrap_in_dummy_mod}; pub fn derive(mut item: DeriveInput) -> TokenStream { let model = Model::from_item(&item, true); let struct_ty = ty_for_foreign_derive(&item, &model); let type_params = item .generics .type_params() .map(|param| param.ident.clone()) .collect::>(); for type_param in type_params { let where_clause = item.generics.make_where_clause(); where_clause .predicates .push(parse_quote!(#type_param: ValidGrouping<__GroupByClause>)); } if model.aggregate { item.generics.params.push(parse_quote!(__GroupByClause)); let (impl_generics, _, where_clause) = item.generics.split_for_impl(); wrap_in_dummy_mod(quote! { use diesel::expression::{ValidGrouping, MixedAggregates, is_aggregate}; impl #impl_generics ValidGrouping<__GroupByClause> for #struct_ty #where_clause { type IsAggregate = is_aggregate::Yes; } }) } else { let mut aggregates = item .generics .type_params() .map(|t| quote!(#t::IsAggregate)) .collect::>() .into_iter(); let is_aggregate = aggregates .next() .map(|first| { let where_clause = item.generics.make_where_clause(); aggregates.fold(first, |left, right| { where_clause.predicates.push(parse_quote!( #left: MixedAggregates<#right> )); quote!(<#left as MixedAggregates<#right>>::Output) }) }) .unwrap_or_else(|| quote!(is_aggregate::Never)); item.generics.params.push(parse_quote!(__GroupByClause)); let (impl_generics, _, where_clause) = item.generics.split_for_impl(); wrap_in_dummy_mod(quote! { use diesel::expression::{ValidGrouping, MixedAggregates, is_aggregate}; impl #impl_generics ValidGrouping<__GroupByClause> for #struct_ty #where_clause { type IsAggregate = #is_aggregate; } }) } }