diesel_derives-2.2.3/.cargo_vcs_info.json0000644000000001540000000000100140260ustar { "git": { "sha1": "4c0e1f765c1216d6f829940ecc857af74b18d841" }, "path_in_vcs": "diesel_derives" }diesel_derives-2.2.3/Cargo.toml0000644000000033300000000000100120230ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.78.0" name = "diesel_derives" version = "2.2.3" build = false include = [ "src/**/*.rs", "tests/**/*.rs", "LICENSE-*", "README.md", ] autobins = false autoexamples = false autotests = false autobenches = false description = "You should not use this crate directly, it is internal to Diesel." homepage = "https://diesel.rs" documentation = "https://diesel.rs/guides/" readme = false license = "MIT OR Apache-2.0" repository = "https://github.com/diesel-rs/diesel/" [lib] name = "diesel_derives" path = "src/lib.rs" proc-macro = true [[test]] name = "tests" path = "tests/tests.rs" [dependencies.diesel_table_macro_syntax] version = "0.2" [dependencies.dsl_auto_type] version = "0.1" [dependencies.proc-macro2] version = "1.0.27" [dependencies.quote] version = "1.0.9" [dependencies.syn] version = "2.0" features = [ "derive", "fold", "full", ] [dev-dependencies.cfg-if] version = "1" [dev-dependencies.dotenvy] version = "0.15" [features] 128-column-tables = ["64-column-tables"] 32-column-tables = [] 64-column-tables = ["32-column-tables"] chrono = [] default = [] mysql = [] nightly = ["proc-macro2/nightly"] postgres = [] r2d2 = [] sqlite = [] time = [] with-deprecated = [] without-deprecated = [] diesel_derives-2.2.3/Cargo.toml.orig000064400000000000000000000020601046102023000155030ustar 00000000000000[package] name = "diesel_derives" version = "2.2.3" license = "MIT OR Apache-2.0" description = "You should not use this crate directly, it is internal to Diesel." documentation = "https://diesel.rs/guides/" homepage = "https://diesel.rs" repository = "https://github.com/diesel-rs/diesel/" autotests = false include.workspace = true rust-version.workspace = true edition = "2021" [dependencies] syn = { version = "2.0", features = ["derive", "fold", "full"] } quote = "1.0.9" proc-macro2 = "1.0.27" diesel_table_macro_syntax = {version = "0.2", path = "../diesel_table_macro_syntax"} dsl_auto_type = { version = "0.1", path = "../dsl_auto_type" } [dev-dependencies] cfg-if = "1" dotenvy = "0.15" [dev-dependencies.diesel] path = "../diesel" [lib] proc-macro = true [[test]] name = "tests" [features] default = [] nightly = ["proc-macro2/nightly"] postgres = [] sqlite = [] mysql = [] 32-column-tables = [] 64-column-tables = ["32-column-tables"] 128-column-tables = ["64-column-tables"] without-deprecated = [] with-deprecated = [] r2d2 = [] chrono = [] time = [] diesel_derives-2.2.3/LICENSE-APACHE000064400000000000000000000250461046102023000145510ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Copyright 2015-2021 Sean Griffin, 2018-2021 Diesel Core Team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diesel_derives-2.2.3/LICENSE-MIT000064400000000000000000000021121046102023000142460ustar 00000000000000The MIT License (MIT) 2015-2021 Sean Griffin, 2018-2021 Diesel Core Team Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diesel_derives-2.2.3/src/as_changeset.rs000064400000000000000000000165571046102023000164150ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::spanned::Spanned as _; use syn::{parse_quote, DeriveInput, Expr, Path, Result, Type}; use crate::attrs::AttributeSpanWrapper; use crate::field::Field; use crate::model::Model; use crate::util::{inner_of_option_ty, is_option_ty, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, false)?; let struct_name = &item.ident; let table_name = &model.table_names()[0]; let fields_for_update = model .fields() .iter() .filter(|f| { !model .primary_key_names .iter() .any(|p| f.column_name().map(|f| f == *p).unwrap_or_default()) }) .collect::>(); if fields_for_update.is_empty() { return Err(syn::Error::new( proc_macro2::Span::call_site(), "Deriving `AsChangeset` on a structure that only contains primary keys isn't supported.\n\ help: If you want to change the primary key of a row, you should do so with `.set(table::id.eq(new_id))`.\n\ note: `#[derive(AsChangeset)]` never changes the primary key of a row." )); } let treat_none_as_null = model.treat_none_as_null(); let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let mut generate_borrowed_changeset = true; let mut direct_field_ty = Vec::with_capacity(fields_for_update.len()); let mut direct_field_assign = Vec::with_capacity(fields_for_update.len()); let mut ref_field_ty = Vec::with_capacity(fields_for_update.len()); let mut ref_field_assign = Vec::with_capacity(fields_for_update.len()); for field in fields_for_update { // Use field-level attr. with fallback to the struct-level one. let treat_none_as_null = match &field.treat_none_as_null { Some(attr) => { if !is_option_ty(&field.ty) { return Err(syn::Error::new( field.ty.span(), "expected `treat_none_as_null` field to be of type `Option<_>`", )); } attr.item } None => treat_none_as_null, }; match field.serialize_as.as_ref() { Some(AttributeSpanWrapper { item: ty, .. }) => { direct_field_ty.push(field_changeset_ty_serialize_as( field, table_name, ty, treat_none_as_null, )?); direct_field_assign.push(field_changeset_expr_serialize_as( field, table_name, ty, treat_none_as_null, )?); generate_borrowed_changeset = false; // as soon as we hit one field with #[diesel(serialize_as)] there is no point in generating the impl of AsChangeset for borrowed structs } None => { direct_field_ty.push(field_changeset_ty( field, table_name, None, treat_none_as_null, )?); direct_field_assign.push(field_changeset_expr( field, table_name, None, treat_none_as_null, )?); ref_field_ty.push(field_changeset_ty( field, table_name, Some(quote!(&'update)), treat_none_as_null, )?); ref_field_assign.push(field_changeset_expr( field, table_name, Some(quote!(&)), treat_none_as_null, )?); } } } let changeset_owned = quote! { impl #impl_generics AsChangeset for #struct_name #ty_generics #where_clause { type Target = #table_name::table; type Changeset = <(#(#direct_field_ty,)*) as AsChangeset>::Changeset; fn as_changeset(self) -> Self::Changeset { (#(#direct_field_assign,)*).as_changeset() } } }; let changeset_borrowed = if generate_borrowed_changeset { let mut impl_generics = item.generics.clone(); impl_generics.params.push(parse_quote!('update)); let (impl_generics, _, _) = impl_generics.split_for_impl(); quote! { impl #impl_generics AsChangeset for &'update #struct_name #ty_generics #where_clause { type Target = #table_name::table; type Changeset = <(#(#ref_field_ty,)*) as AsChangeset>::Changeset; fn as_changeset(self) -> Self::Changeset { (#(#ref_field_assign,)*).as_changeset() } } } } else { quote! {} }; Ok(wrap_in_dummy_mod(quote!( use diesel::query_builder::AsChangeset; use diesel::prelude::*; #changeset_owned #changeset_borrowed ))) } fn field_changeset_ty( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_null: bool, ) -> Result { let column_name = field.column_name()?.to_ident()?; if !treat_none_as_null && is_option_ty(&field.ty) { let field_ty = inner_of_option_ty(&field.ty); Ok( quote!(std::option::Option>), ) } else { let field_ty = &field.ty; Ok(quote!(diesel::dsl::Eq<#table_name::#column_name, #lifetime #field_ty>)) } } fn field_changeset_expr( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_null: bool, ) -> Result { let field_name = &field.name; let column_name = field.column_name()?.to_ident()?; if !treat_none_as_null && is_option_ty(&field.ty) { if lifetime.is_some() { Ok(quote!(self.#field_name.as_ref().map(|x| #table_name::#column_name.eq(x)))) } else { Ok(quote!(self.#field_name.map(|x| #table_name::#column_name.eq(x)))) } } else { Ok(quote!(#table_name::#column_name.eq(#lifetime self.#field_name))) } } fn field_changeset_ty_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_null: bool, ) -> Result { let column_name = field.column_name()?.to_ident()?; if !treat_none_as_null && is_option_ty(&field.ty) { let inner_ty = inner_of_option_ty(ty); Ok(quote!(std::option::Option>)) } else { Ok(quote!(diesel::dsl::Eq<#table_name::#column_name, #ty>)) } } fn field_changeset_expr_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_null: bool, ) -> Result { let field_name = &field.name; let column_name = field.column_name()?.to_ident()?; let column: Expr = parse_quote!(#table_name::#column_name); if !treat_none_as_null && is_option_ty(&field.ty) { Ok(quote!(self.#field_name.map(|x| #column.eq(::std::convert::Into::<#ty>::into(x))))) } else { Ok(quote!(#column.eq(::std::convert::Into::<#ty>::into(self.#field_name)))) } } diesel_derives-2.2.3/src/as_expression.rs000064400000000000000000000104031046102023000166330ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse_quote; use syn::DeriveInput; use syn::Result; use crate::model::Model; use crate::util::{ty_for_foreign_derive, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, true, false)?; if model.sql_types.is_empty() { return Err(syn::Error::new( proc_macro2::Span::call_site(), "At least one `sql_type` is needed for deriving `AsExpression` on a structure.", )); } let struct_ty = ty_for_foreign_derive(&item, &model)?; // type generics are already handled by `ty_for_foreign_derive` let (impl_generics_plain, _, where_clause_plain) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics.params.push(parse_quote!('__expr)); let (impl_generics, _, where_clause) = generics.split_for_impl(); let mut generics2 = generics.clone(); generics2.params.push(parse_quote!('__expr2)); let (impl_generics2, _, where_clause2) = generics2.split_for_impl(); let tokens = model.sql_types.iter().map(|sql_type| { let mut to_sql_generics = item.generics.clone(); to_sql_generics.params.push(parse_quote!(__DB)); to_sql_generics.make_where_clause().predicates.push(parse_quote!(__DB: diesel::backend::Backend)); to_sql_generics.make_where_clause().predicates.push(parse_quote!(Self: ToSql<#sql_type, __DB>)); let (to_sql_impl_generics, _, to_sql_where_clause) = to_sql_generics.split_for_impl(); let tokens = quote!( impl #impl_generics AsExpression<#sql_type> for &'__expr #struct_ty #where_clause { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl #impl_generics AsExpression> for &'__expr #struct_ty #where_clause { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl #impl_generics2 AsExpression<#sql_type> for &'__expr2 &'__expr #struct_ty #where_clause2 { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl #impl_generics2 AsExpression> for &'__expr2 &'__expr #struct_ty #where_clause2 { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl #to_sql_impl_generics diesel::serialize::ToSql, __DB> for #struct_ty #to_sql_where_clause { fn to_sql<'__b>(&'__b self, out: &mut Output<'__b, '_, __DB>) -> serialize::Result { ToSql::<#sql_type, __DB>::to_sql(self, out) } } ); if model.not_sized { tokens } else { quote!( #tokens impl #impl_generics_plain AsExpression<#sql_type> for #struct_ty #where_clause_plain { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } impl #impl_generics_plain AsExpression> for #struct_ty #where_clause_plain { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { Bound::new(self) } } ) } }); Ok(wrap_in_dummy_mod(quote! { use diesel::expression::AsExpression; use diesel::internal::derives::as_expression::Bound; use diesel::sql_types::Nullable; use diesel::serialize::{self, ToSql, Output}; #(#tokens)* })) } diesel_derives-2.2.3/src/associations.rs000064400000000000000000000104661046102023000164610ustar 00000000000000use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::fold::Fold; use syn::parse_quote; use syn::{DeriveInput, Ident, Lifetime, Result}; use crate::model::Model; use crate::parsers::BelongsTo; use crate::util::{camel_to_snake, wrap_in_dummy_mod}; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, false)?; if model.belongs_to.is_empty() { return Err(syn::Error::new( proc_macro2::Span::call_site(), "At least one `belongs_to` is needed for deriving `Associations` on a structure.", )); } let tokens = model .belongs_to .iter() .map(|assoc| derive_belongs_to(&item, &model, assoc)) .collect::>>()?; Ok(wrap_in_dummy_mod(quote!(#(#tokens)*))) } fn derive_belongs_to(item: &DeriveInput, model: &Model, assoc: &BelongsTo) -> Result { let (_, ty_generics, _) = item.generics.split_for_impl(); let struct_name = &item.ident; let table_name = &model.table_names()[0]; let foreign_key = &foreign_key(assoc); let foreign_key_field = model.find_column(foreign_key)?; let foreign_key_name = &foreign_key_field.name; let foreign_key_ty = &foreign_key_field.ty; let mut generics = item.generics.clone(); let parent_struct = ReplacePathLifetimes::new(|i, span| { let letter = char::from(b'b' + i as u8); let lifetime = Lifetime::new(&format!("'__{letter}"), span); generics.params.push(parse_quote!(#lifetime)); lifetime }) .fold_type_path(assoc.parent.clone()); generics.params.push(parse_quote!(__FK)); { let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); where_clause .predicates .push(parse_quote!(__FK: std::hash::Hash + std::cmp::Eq)); where_clause.predicates.push( parse_quote!(for<'__a> &'__a #foreign_key_ty: std::convert::Into<::std::option::Option<&'__a __FK>>), ); where_clause.predicates.push( parse_quote!(for<'__a> &'__a #parent_struct: diesel::associations::Identifiable), ); } let foreign_key_expr = quote!(std::convert::Into::into(&self.#foreign_key_name)); let foreign_key_ty = quote!(__FK); let (impl_generics, _, where_clause) = generics.split_for_impl(); Ok(quote! { impl #impl_generics diesel::associations::BelongsTo<#parent_struct> for #struct_name #ty_generics #where_clause { type ForeignKey = #foreign_key_ty; type ForeignKeyColumn = #table_name::#foreign_key; fn foreign_key(&self) -> std::option::Option<&Self::ForeignKey> { #foreign_key_expr } fn foreign_key_column() -> Self::ForeignKeyColumn { #table_name::#foreign_key } } impl #impl_generics diesel::associations::BelongsTo<&'_ #parent_struct> for #struct_name #ty_generics #where_clause { type ForeignKey = #foreign_key_ty; type ForeignKeyColumn = #table_name::#foreign_key; fn foreign_key(&self) -> std::option::Option<&Self::ForeignKey> { #foreign_key_expr } fn foreign_key_column() -> Self::ForeignKeyColumn { #table_name::#foreign_key } } }) } fn foreign_key(assoc: &BelongsTo) -> Ident { let ident = &assoc .parent .path .segments .last() .expect("paths always have at least one segment") .ident; assoc .foreign_key .clone() .unwrap_or_else(|| infer_foreign_key(ident)) } fn infer_foreign_key(name: &Ident) -> Ident { let snake_case = camel_to_snake(&name.to_string()); Ident::new(&format!("{snake_case}_id"), name.span()) } struct ReplacePathLifetimes { count: usize, f: F, } impl ReplacePathLifetimes { fn new(f: F) -> Self { Self { count: 0, f } } } impl Fold for ReplacePathLifetimes where F: FnMut(usize, Span) -> Lifetime, { fn fold_lifetime(&mut self, mut lt: Lifetime) -> Lifetime { if lt.ident == "_" { lt = (self.f)(self.count, lt.span()); self.count += 1; } lt } } diesel_derives-2.2.3/src/attrs.rs000064400000000000000000000272731046102023000151230ustar 00000000000000use std::fmt::{Display, Formatter}; use proc_macro2::{Span, TokenStream}; use quote::ToTokens; use syn::parse::discouraged::Speculative; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::token::Comma; use syn::{Attribute, Expr, Ident, LitBool, LitStr, Path, Type, TypePath}; use crate::deprecated::ParseDeprecated; use crate::parsers::{BelongsTo, MysqlType, PostgresType, SqliteType}; use crate::util::{ parse_eq, parse_paren, unknown_attribute, BELONGS_TO_NOTE, COLUMN_NAME_NOTE, DESERIALIZE_AS_NOTE, MYSQL_TYPE_NOTE, POSTGRES_TYPE_NOTE, SELECT_EXPRESSION_NOTE, SELECT_EXPRESSION_TYPE_NOTE, SERIALIZE_AS_NOTE, SQLITE_TYPE_NOTE, SQL_TYPE_NOTE, TABLE_NAME_NOTE, TREAT_NONE_AS_DEFAULT_VALUE_NOTE, TREAT_NONE_AS_NULL_NOTE, }; use crate::util::{parse_paren_list, CHECK_FOR_BACKEND_NOTE}; pub trait MySpanned { fn span(&self) -> Span; } pub struct AttributeSpanWrapper { pub item: T, pub attribute_span: Span, pub ident_span: Span, } pub enum FieldAttr { Embed(Ident), SkipInsertion(Ident), ColumnName(Ident, SqlIdentifier), SqlType(Ident, TypePath), TreatNoneAsDefaultValue(Ident, LitBool), TreatNoneAsNull(Ident, LitBool), SerializeAs(Ident, TypePath), DeserializeAs(Ident, TypePath), SelectExpression(Ident, Expr), SelectExpressionType(Ident, Type), } #[derive(Clone)] pub struct SqlIdentifier { field_name: String, span: Span, } impl SqlIdentifier { pub fn span(&self) -> Span { self.span } pub fn to_ident(&self) -> Result { match syn::parse_str::(&format!("r#{}", self.field_name)) { Ok(mut ident) => { ident.set_span(self.span); Ok(ident) } Err(_e) if self.field_name.contains(' ') => Err(syn::Error::new( self.span(), format!( "Expected valid identifier, found `{0}`. \ Diesel does not support column names with whitespaces yet", self.field_name ), )), Err(_e) => Err(syn::Error::new( self.span(), format!( "Expected valid identifier, found `{0}`. \ Diesel automatically renames invalid identifiers, \ perhaps you meant to write `{0}_`?", self.field_name ), )), } } } impl ToTokens for SqlIdentifier { fn to_tokens(&self, tokens: &mut TokenStream) { if self.field_name.starts_with("r#") { Ident::new_raw(&self.field_name[2..], self.span).to_tokens(tokens) } else { Ident::new(&self.field_name, self.span).to_tokens(tokens) } } } impl Display for SqlIdentifier { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let mut start = 0; if self.field_name.starts_with("r#") { start = 2; } f.write_str(&self.field_name[start..]) } } impl PartialEq for SqlIdentifier { fn eq(&self, other: &Ident) -> bool { *other == self.field_name } } impl From<&'_ Ident> for SqlIdentifier { fn from(ident: &'_ Ident) -> Self { use syn::ext::IdentExt; let ident = ident.unraw(); Self { span: ident.span(), field_name: ident.to_string(), } } } impl Parse for SqlIdentifier { fn parse(input: ParseStream) -> Result { let fork = input.fork(); if let Ok(ident) = fork.parse::() { input.advance_to(&fork); Ok((&ident).into()) } else { let name = input.parse::()?; Ok(Self { field_name: name.value(), span: name.span(), }) } } } impl Parse for FieldAttr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "embed" => Ok(FieldAttr::Embed(name)), "skip_insertion" => Ok(FieldAttr::SkipInsertion(name)), "column_name" => Ok(FieldAttr::ColumnName( name, parse_eq(input, COLUMN_NAME_NOTE)?, )), "sql_type" => Ok(FieldAttr::SqlType(name, parse_eq(input, SQL_TYPE_NOTE)?)), "treat_none_as_default_value" => Ok(FieldAttr::TreatNoneAsDefaultValue( name, parse_eq(input, TREAT_NONE_AS_DEFAULT_VALUE_NOTE)?, )), "treat_none_as_null" => Ok(FieldAttr::TreatNoneAsNull( name, parse_eq(input, TREAT_NONE_AS_NULL_NOTE)?, )), "serialize_as" => Ok(FieldAttr::SerializeAs( name, parse_eq(input, SERIALIZE_AS_NOTE)?, )), "deserialize_as" => Ok(FieldAttr::DeserializeAs( name, parse_eq(input, DESERIALIZE_AS_NOTE)?, )), "select_expression" => Ok(FieldAttr::SelectExpression( name, parse_eq(input, SELECT_EXPRESSION_NOTE)?, )), "select_expression_type" => Ok(FieldAttr::SelectExpressionType( name, parse_eq(input, SELECT_EXPRESSION_TYPE_NOTE)?, )), _ => Err(unknown_attribute( &name, &[ "embed", "skip_insertion", "column_name", "sql_type", "treat_none_as_default_value", "treat_none_as_null", "serialize_as", "deserialize_as", "select_expression", "select_expression_type", ], )), } } } impl MySpanned for FieldAttr { fn span(&self) -> Span { match self { FieldAttr::Embed(ident) | FieldAttr::SkipInsertion(ident) | FieldAttr::ColumnName(ident, _) | FieldAttr::SqlType(ident, _) | FieldAttr::TreatNoneAsNull(ident, _) | FieldAttr::TreatNoneAsDefaultValue(ident, _) | FieldAttr::SerializeAs(ident, _) | FieldAttr::DeserializeAs(ident, _) | FieldAttr::SelectExpression(ident, _) | FieldAttr::SelectExpressionType(ident, _) => ident.span(), } } } #[allow(clippy::large_enum_variant)] pub enum StructAttr { Aggregate(Ident), NotSized(Ident), ForeignDerive(Ident), TableName(Ident, Path), SqlType(Ident, TypePath), TreatNoneAsDefaultValue(Ident, LitBool), TreatNoneAsNull(Ident, LitBool), BelongsTo(Ident, BelongsTo), MysqlType(Ident, MysqlType), SqliteType(Ident, SqliteType), PostgresType(Ident, PostgresType), PrimaryKey(Ident, Punctuated), CheckForBackend(Ident, syn::punctuated::Punctuated), } impl Parse for StructAttr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "aggregate" => Ok(StructAttr::Aggregate(name)), "not_sized" => Ok(StructAttr::NotSized(name)), "foreign_derive" => Ok(StructAttr::ForeignDerive(name)), "table_name" => Ok(StructAttr::TableName( name, parse_eq(input, TABLE_NAME_NOTE)?, )), "sql_type" => Ok(StructAttr::SqlType(name, parse_eq(input, SQL_TYPE_NOTE)?)), "treat_none_as_default_value" => Ok(StructAttr::TreatNoneAsDefaultValue( name, parse_eq(input, TREAT_NONE_AS_DEFAULT_VALUE_NOTE)?, )), "treat_none_as_null" => Ok(StructAttr::TreatNoneAsNull( name, parse_eq(input, TREAT_NONE_AS_NULL_NOTE)?, )), "belongs_to" => Ok(StructAttr::BelongsTo( name, parse_paren(input, BELONGS_TO_NOTE)?, )), "mysql_type" => Ok(StructAttr::MysqlType( name, parse_paren(input, MYSQL_TYPE_NOTE)?, )), "sqlite_type" => Ok(StructAttr::SqliteType( name, parse_paren(input, SQLITE_TYPE_NOTE)?, )), "postgres_type" => Ok(StructAttr::PostgresType( name, parse_paren(input, POSTGRES_TYPE_NOTE)?, )), "primary_key" => Ok(StructAttr::PrimaryKey( name, parse_paren_list(input, "key1, key2", syn::Token![,])?, )), "check_for_backend" => Ok(StructAttr::CheckForBackend( name, parse_paren_list(input, CHECK_FOR_BACKEND_NOTE, syn::Token![,])?, )), _ => Err(unknown_attribute( &name, &[ "aggregate", "not_sized", "foreign_derive", "table_name", "sql_type", "treat_none_as_default_value", "treat_none_as_null", "belongs_to", "mysql_type", "sqlite_type", "postgres_type", "primary_key", "check_for_backend", ], )), } } } impl MySpanned for StructAttr { fn span(&self) -> Span { match self { StructAttr::Aggregate(ident) | StructAttr::NotSized(ident) | StructAttr::ForeignDerive(ident) | StructAttr::TableName(ident, _) | StructAttr::SqlType(ident, _) | StructAttr::TreatNoneAsDefaultValue(ident, _) | StructAttr::TreatNoneAsNull(ident, _) | StructAttr::BelongsTo(ident, _) | StructAttr::MysqlType(ident, _) | StructAttr::SqliteType(ident, _) | StructAttr::PostgresType(ident, _) | StructAttr::CheckForBackend(ident, _) | StructAttr::PrimaryKey(ident, _) => ident.span(), } } } pub fn parse_attributes(attrs: &[Attribute]) -> Result>> where T: Parse + ParseDeprecated + MySpanned, { let mut out = Vec::new(); for attr in attrs { if attr.meta.path().is_ident("diesel") { let map = attr .parse_args_with(Punctuated::::parse_terminated)? .into_iter() .map(|a| AttributeSpanWrapper { ident_span: a.span(), item: a, attribute_span: attr.meta.span(), }); out.extend(map); } else if cfg!(all( not(feature = "without-deprecated"), feature = "with-deprecated" )) { let path = attr.meta.path(); let ident = path.get_ident().map(|f| f.to_string()); if let "sql_type" | "column_name" | "table_name" | "changeset_options" | "primary_key" | "belongs_to" | "sqlite_type" | "mysql_type" | "postgres" = ident.as_deref().unwrap_or_default() { let m = &attr.meta; let ts = quote::quote!(#m).into(); let value = syn::parse::Parser::parse(T::parse_deprecated, ts)?; if let Some(value) = value { out.push(AttributeSpanWrapper { ident_span: value.span(), item: value, attribute_span: attr.meta.span(), }); } } } } Ok(out) } diesel_derives-2.2.3/src/deprecated/belongs_to.rs000064400000000000000000000027341046102023000202140ustar 00000000000000use syn::parse::{ParseStream, Result}; use syn::token::Comma; use syn::{parenthesized, Ident, LitStr}; use crate::deprecated::utils::parse_eq_and_lit_str; use crate::parsers::BelongsTo; use crate::util::BELONGS_TO_NOTE; pub fn parse_belongs_to(name: Ident, input: ParseStream) -> Result { if input.is_empty() { return Err(syn::Error::new( name.span(), format!( "unexpected end of input, expected parentheses\n\ help: The correct format looks like `#[diesel({})]`", BELONGS_TO_NOTE ), )); } let content; parenthesized!(content in input); let parent = if content.peek(Ident) { let name: Ident = content.parse()?; if name == "parent" { let lit_str = parse_eq_and_lit_str(name, &content, BELONGS_TO_NOTE)?; lit_str.parse()? } else { LitStr::new(&name.to_string(), name.span()).parse()? } } else { content.parse()? }; let mut foreign_key = None; if content.peek(Comma) { content.parse::()?; let name: Ident = content.parse()?; if name != "foreign_key" { return Err(syn::Error::new(name.span(), "expected `foreign_key`")); } let lit_str = parse_eq_and_lit_str(name, &content, BELONGS_TO_NOTE)?; foreign_key = Some(lit_str.parse()?); } Ok(BelongsTo { parent, foreign_key, }) } diesel_derives-2.2.3/src/deprecated/changeset_options.rs000064400000000000000000000015641046102023000215750ustar 00000000000000use syn::parse::{ParseStream, Result}; use syn::{parenthesized, Ident, LitBool}; use crate::deprecated::utils::parse_eq_and_lit_str; use crate::util::TREAT_NONE_AS_NULL_NOTE; pub fn parse_changeset_options(name: Ident, input: ParseStream) -> Result<(Ident, LitBool)> { if input.is_empty() { return Err(syn::Error::new( name.span(), "unexpected end of input, expected parentheses", )); } let content; parenthesized!(content in input); let name: Ident = content.parse()?; let name_str = name.to_string(); if name_str != "treat_none_as_null" { return Err(syn::Error::new( name.span(), "expected `treat_none_as_null`", )); } Ok((name.clone(), { let lit_str = parse_eq_and_lit_str(name, &content, TREAT_NONE_AS_NULL_NOTE)?; lit_str.parse()? })) } diesel_derives-2.2.3/src/deprecated/mod.rs000064400000000000000000000217061046102023000166400ustar 00000000000000use syn::parse::{ParseStream, Result}; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod belongs_to; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod changeset_options; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod postgres_type; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod primary_key; #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod utils; pub trait ParseDeprecated: Sized { fn parse_deprecated(input: ParseStream) -> Result>; } #[cfg(any(feature = "without-deprecated", not(feature = "with-deprecated")))] mod not_deprecated { use super::{ParseDeprecated, ParseStream, Result}; use crate::attrs::{FieldAttr, StructAttr}; impl ParseDeprecated for StructAttr { fn parse_deprecated(_input: ParseStream) -> Result> { unimplemented!() } } impl ParseDeprecated for FieldAttr { fn parse_deprecated(_input: ParseStream) -> Result> { unimplemented!() } } } #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] mod impl_deprecated { use super::{ParseDeprecated, ParseStream, Result}; use crate::attrs::{FieldAttr, StructAttr}; use crate::deprecated::belongs_to::parse_belongs_to; use crate::deprecated::changeset_options::parse_changeset_options; use crate::deprecated::postgres_type::parse_postgres_type; use crate::deprecated::primary_key::parse_primary_key; use crate::deprecated::utils::parse_eq_and_lit_str; use crate::parsers::{MysqlType, PostgresType, SqliteType}; use crate::util::{ COLUMN_NAME_NOTE, MYSQL_TYPE_NOTE, SQLITE_TYPE_NOTE, SQL_TYPE_NOTE, TABLE_NAME_NOTE, }; use proc_macro2::Span; use syn::Ident; macro_rules! warn { ($ident: expr, $help: expr) => { warn( $ident.span(), &format!("#[{}] attribute form is deprecated", $ident), $help, ); }; } impl ParseDeprecated for StructAttr { fn parse_deprecated(input: ParseStream) -> Result> { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "table_name" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, TABLE_NAME_NOTE)?; warn!( name, &format!("use `#[diesel(table_name = {})]` instead", lit_str.value()) ); Ok(Some(StructAttr::TableName(name, lit_str.parse()?))) } "changeset_options" => { let (ident, value) = parse_changeset_options(name.clone(), input)?; warn!( name, &format!( "use `#[diesel(treat_none_as_null = {})]` instead", value.value ) ); Ok(Some(StructAttr::TreatNoneAsNull(ident, value))) } "sql_type" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, SQL_TYPE_NOTE)?; warn!( name, &format!("use `#[diesel(sql_type = {})]` instead", lit_str.value()) ); Ok(Some(StructAttr::SqlType(name, lit_str.parse()?))) } "primary_key" => { let keys = parse_primary_key(name.clone(), input)?; let hint = keys .iter() .map(|i| i.to_string()) .collect::>() .join(", "); warn!( name, &format!("use `#[diesel(primary_key({hint}))]` instead") ); Ok(Some(StructAttr::PrimaryKey(name, keys))) } "belongs_to" => { let belongs_to = parse_belongs_to(name.clone(), input)?; let parent = belongs_to .parent .path .segments .iter() .map(|s| s.ident.to_string()) .collect::>() .join("::"); if let Some(ref key) = belongs_to.foreign_key { warn!( name, &format!( "use `#[diesel(belongs_to({parent}, foreign_key = {key}))]` instead" ) ); } else { warn!( name, &format!("use `#[diesel(belongs_to({parent}))]` instead") ); } Ok(Some(StructAttr::BelongsTo(name, belongs_to))) } "sqlite_type" => { let name_value = parse_eq_and_lit_str(name.clone(), input, SQLITE_TYPE_NOTE)?; warn!( name, &format!( "use `#[diesel(sqlite_type(name = \"{}\"))]` instead", name_value.value() ) ); Ok(Some(StructAttr::SqliteType( name, SqliteType { name: name_value }, ))) } "mysql_type" => { let name_value = parse_eq_and_lit_str(name.clone(), input, MYSQL_TYPE_NOTE)?; warn!( name, &format!( "use `#[diesel(mysql_type(name = \"{}\"))]` instead", name_value.value() ) ); Ok(Some(StructAttr::MysqlType( name, MysqlType { name: name_value }, ))) } "postgres" => { let pg_type = parse_postgres_type(name.clone(), input)?; let msg = match &pg_type { PostgresType::Fixed(oid, array_oid) => format!( "use `#[diesel(postgres_type(oid = {}, array_oid = {}))]` instead", oid.base10_parse::()?, array_oid.base10_parse::()? ), PostgresType::Lookup(name, Some(schema)) => format!( "use `#[diesel(postgres_type(name = \"{}\", schema = \"{}\"))]` instead", name.value(), schema.value() ), PostgresType::Lookup(name, None) => format!( "use `#[diesel(postgres_type(name = \"{}\"))]` instead", name.value(), ), }; warn!(name, &msg); Ok(Some(StructAttr::PostgresType(name, pg_type))) } _ => Ok(None), } } } #[cfg(all(not(feature = "without-deprecated"), feature = "with-deprecated"))] impl ParseDeprecated for FieldAttr { fn parse_deprecated(input: ParseStream) -> Result> { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "column_name" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, COLUMN_NAME_NOTE)?; warn!( name, &format!("use `#[diesel(column_name = {})]` instead", lit_str.value()) ); Ok(Some(FieldAttr::ColumnName(name, lit_str.parse()?))) } "sql_type" => { let lit_str = parse_eq_and_lit_str(name.clone(), input, SQL_TYPE_NOTE)?; warn!( name, &format!("use `#[diesel(sql_type = {})]` instead", lit_str.value()) ); Ok(Some(FieldAttr::SqlType(name, lit_str.parse()?))) } _ => Ok(None), } } } #[cfg(feature = "nightly")] fn warn(span: Span, message: &str, help: &str) { proc_macro::Diagnostic::spanned(span.unwrap(), proc_macro::Level::Warning, message) .help(help) .emit() } #[cfg(not(feature = "nightly"))] fn warn(_span: Span, message: &str, help: &str) { eprintln!("warning: {message}\n = help: {help}\n"); } } diesel_derives-2.2.3/src/deprecated/postgres_type.rs000064400000000000000000000042121046102023000207610ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{parenthesized, Ident, LitInt, LitStr}; use crate::deprecated::utils::parse_eq_and_lit_str; use crate::parsers::PostgresType; use crate::util::{unknown_attribute, POSTGRES_TYPE_NOTE}; enum Attr { Oid(Ident, LitInt), ArrayOid(Ident, LitInt), TypeName(Ident, LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "oid" => Ok(Attr::Oid(name.clone(), { let lit_str = parse_eq_and_lit_str(name, input, POSTGRES_TYPE_NOTE)?; lit_str.parse()? })), "array_oid" => Ok(Attr::ArrayOid(name.clone(), { let lit_str = parse_eq_and_lit_str(name, input, POSTGRES_TYPE_NOTE)?; lit_str.parse()? })), "type_name" => Ok(Attr::TypeName( name.clone(), parse_eq_and_lit_str(name, input, POSTGRES_TYPE_NOTE)?, )), _ => Err(unknown_attribute(&name, &["oid", "array_oid", "type_name"])), } } } pub fn parse_postgres_type(name: Ident, input: ParseStream) -> Result { if input.is_empty() { return Err(syn::Error::new( name.span(), format!( "unexpected end of input, expected parentheses\n\ help: The correct format looks like `#[diesel({})]`", POSTGRES_TYPE_NOTE ), )); } let content; parenthesized!(content in input); let mut oid = None; let mut array_oid = None; let mut type_name = None; for attr in Punctuated::::parse_terminated(&content)? { match attr { Attr::Oid(ident, value) => oid = Some((ident, value)), Attr::ArrayOid(ident, value) => array_oid = Some((ident, value)), Attr::TypeName(ident, value) => type_name = Some((ident, value)), } } PostgresType::validate_and_build(&content, oid, array_oid, type_name, None) } diesel_derives-2.2.3/src/deprecated/primary_key.rs000064400000000000000000000010041046102023000204010ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{parenthesized, Ident}; pub fn parse_primary_key(name: Ident, input: ParseStream) -> Result> { if input.is_empty() { return Err(syn::Error::new( name.span(), "unexpected end of input, expected parentheses", )); } let content; parenthesized!(content in input); content.parse_terminated(Ident::parse, syn::Token![,]) } diesel_derives-2.2.3/src/deprecated/utils.rs000064400000000000000000000010011046102023000172030ustar 00000000000000use syn::parse::{ParseStream, Result}; use syn::token::Eq; use syn::{Ident, LitStr}; pub fn parse_eq_and_lit_str(name: Ident, input: ParseStream, help: &str) -> Result { if input.is_empty() { return Err(syn::Error::new( name.span(), format!( "unexpected end of input, expected `=`\n\ help: The correct format looks like `#[diesel({help})]`" ), )); } input.parse::()?; input.parse::() } diesel_derives-2.2.3/src/diesel_for_each_tuple.rs000064400000000000000000000035151046102023000202630ustar 00000000000000use proc_macro2::{Ident, Span, TokenStream}; use quote::quote; #[cfg(not(feature = "32-column-tables"))] pub const MAX_TUPLE_SIZE: i32 = 16; #[cfg(all(not(feature = "64-column-tables"), feature = "32-column-tables"))] pub const MAX_TUPLE_SIZE: i32 = 32; #[cfg(all(not(feature = "128-column-tables"), feature = "64-column-tables"))] pub const MAX_TUPLE_SIZE: i32 = 64; #[cfg(feature = "128-column-tables")] pub const MAX_TUPLE_SIZE: i32 = 128; pub(crate) fn expand(input: ForEachTupleInput) -> TokenStream { let call_side = Span::mixed_site(); let pairs = (0..input.max_size as usize) .map(|i| { let t = Ident::new(&format!("T{i}"), call_side); let st = Ident::new(&format!("ST{i}"), call_side); let tt = Ident::new(&format!("TT{i}"), call_side); let i = syn::Index::from(i); quote!((#i) -> #t, #st, #tt,) }) .collect::>(); let mut out = Vec::with_capacity(input.max_size as usize); for i in 0..input.max_size { let items = &pairs[0..=i as usize]; let tuple = i + 1; out.push(quote! { #tuple { #(#items)* } }); } let input = input.inner; quote! { #input! { #(#out)* } } } pub struct ForEachTupleInput { inner: Ident, max_size: i32, } impl syn::parse::Parse for ForEachTupleInput { fn parse(input: syn::parse::ParseStream) -> syn::Result { let inner = input.parse()?; let max_size = if input.peek(syn::Token![,]) { let _ = input.parse::(); input.parse::()?.base10_parse()? } else if input.is_empty() { MAX_TUPLE_SIZE } else { unreachable!("Invalid syntax") }; Ok(Self { inner, max_size }) } } diesel_derives-2.2.3/src/diesel_numeric_ops.rs000064400000000000000000000062011046102023000176220ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse_quote; use syn::DeriveInput; use crate::util::wrap_in_dummy_mod; pub fn derive(mut item: DeriveInput) -> TokenStream { let struct_name = &item.ident; { let where_clause = item .generics .where_clause .get_or_insert(parse_quote!(where)); where_clause.predicates.push(parse_quote!(Self: Expression)); where_clause.predicates.push_punct(Default::default()); } let (_, ty_generics, where_clause) = item.generics.split_for_impl(); let mut impl_generics = item.generics.clone(); impl_generics.params.push(parse_quote!(__Rhs)); let (impl_generics, _, _) = impl_generics.split_for_impl(); wrap_in_dummy_mod(quote! { use diesel::internal::derives::numeric_ops as ops; use diesel::expression::{Expression, AsExpression}; use diesel::sql_types::ops::{Add, Sub, Mul, Div}; use diesel::sql_types::{SqlType, SingleValue}; impl #impl_generics ::std::ops::Add<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Add, <::SqlType as Add>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Add>::Rhs>, { type Output = ops::Add; fn add(self, rhs: __Rhs) -> Self::Output { ops::Add::new(self, rhs.as_expression()) } } impl #impl_generics ::std::ops::Sub<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Sub, <::SqlType as Sub>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Sub>::Rhs>, { type Output = ops::Sub; fn sub(self, rhs: __Rhs) -> Self::Output { ops::Sub::new(self, rhs.as_expression()) } } impl #impl_generics ::std::ops::Mul<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Mul, <::SqlType as Mul>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Mul>::Rhs>, { type Output = ops::Mul; fn mul(self, rhs: __Rhs) -> Self::Output { ops::Mul::new(self, rhs.as_expression()) } } impl #impl_generics ::std::ops::Div<__Rhs> for #struct_name #ty_generics #where_clause Self: Expression, ::SqlType: Div, <::SqlType as Div>::Rhs: SqlType + SingleValue, __Rhs: AsExpression<<::SqlType as Div>::Rhs>, { type Output = ops::Div; fn div(self, rhs: __Rhs) -> Self::Output { ops::Div::new(self, rhs.as_expression()) } } }) } diesel_derives-2.2.3/src/diesel_public_if.rs000064400000000000000000000132621046102023000172400ustar 00000000000000use quote::quote; use syn::Token; use syn::{punctuated::Punctuated, DeriveInput}; pub(crate) fn expand(cfg: CfgInput, item: EntryWithVisibility) -> proc_macro2::TokenStream { item.hide_for_cfg(cfg.cfg, cfg.field_list) } pub struct CfgInput { cfg: syn::Meta, field_list: Vec, } impl syn::parse::Parse for CfgInput { fn parse(input: syn::parse::ParseStream) -> syn::Result { let mut cfg = Punctuated::::parse_terminated(input)?; if cfg.len() == 1 { Ok(Self { cfg: cfg .pop() .expect("There is exactly one element") .into_value(), field_list: Vec::new(), }) } else if cfg.len() == 2 { let value_1 = cfg .pop() .expect("There is exactly one element") .into_value(); let value_2 = cfg .pop() .expect("There is exactly one element") .into_value(); let (cfg, fields) = if matches!(&value_1, syn::Meta::List(v) if v.path.is_ident("public_fields")) { (value_2, value_1) } else if matches!(&value_2, syn::Meta::List(v) if v.path.is_ident("public_fields")) { (value_1, value_2) } else { panic!( "Incompatible argument list detected. `__diesel_public_if` \ expects a cfg argument and a optional public_fields" ) }; let field_list = if let syn::Meta::List(v) = fields { use syn::parse::Parser; let parser = syn::punctuated::Punctuated::::parse_terminated; let idents = parser.parse2(v.tokens)?; idents.into_iter().collect() } else { unreachable!() }; Ok(Self { cfg, field_list }) } else { panic!( "Incompatible argument list detected. `__diesel_public_if` \ expects a cfg argument and an optional public_fields" ) } } } #[derive(Clone)] pub enum EntryWithVisibility { TraitFunction { meta: Vec, tail: proc_macro2::TokenStream, }, Item { meta: Vec, vis: syn::Visibility, tail: proc_macro2::TokenStream, }, Struct { meta: Vec, vis: syn::Visibility, def: syn::DataStruct, ident: syn::Ident, generics: syn::Generics, }, } impl syn::parse::Parse for EntryWithVisibility { fn parse(input: syn::parse::ParseStream) -> syn::Result { let meta = syn::Attribute::parse_outer(input)?; if input.peek(Token![fn]) || input.peek(Token![type]) { let tail = input.parse()?; Ok(Self::TraitFunction { meta, tail }) } else { let vis = input.parse()?; if input.peek(Token![struct]) { let s = DeriveInput::parse(input)?; if let syn::Data::Struct(def) = s.data { Ok(Self::Struct { meta, vis, def, generics: s.generics, ident: s.ident, }) } else { unreachable!() } } else { let tail = input.parse()?; Ok(Self::Item { meta, vis, tail }) } } } } impl EntryWithVisibility { fn hide_for_cfg( &self, cfg: syn::Meta, field_list: Vec, ) -> proc_macro2::TokenStream { match self { EntryWithVisibility::TraitFunction { meta, tail } if field_list.is_empty() => quote! { #(#meta)* #[cfg_attr(not(#cfg), doc(hidden))] #[cfg_attr(docsrs, doc(cfg(#cfg)))] #tail }, EntryWithVisibility::Item { meta, vis, tail } if field_list.is_empty() => { quote! { #(#meta)* #[cfg(not(#cfg))] #vis #tail #(#meta)* #[cfg(#cfg)] pub #tail } } EntryWithVisibility::Struct { meta, vis, def, ident, generics, } => { let fields1 = def.fields.iter(); let fields2 = def.fields.iter().map(|f| { let mut ret = f.clone(); if ret .ident .as_ref() .map(|i| field_list.contains(i)) .unwrap_or(false) { ret.vis = syn::Visibility::Public(Default::default()); } ret }); quote! { #(#meta)* #[cfg(not(#cfg))] #vis struct #ident #generics { #(#fields1,)* } #(#meta)* #[cfg(#cfg)] #[non_exhaustive] pub struct #ident #generics { #(#fields2,)* } } } EntryWithVisibility::TraitFunction { .. } | EntryWithVisibility::Item { .. } => { panic!("Public field list is only supported for structs") } } } } diesel_derives-2.2.3/src/field.rs000064400000000000000000000145701046102023000150450ustar 00000000000000use proc_macro2::{Span, TokenStream}; use syn::spanned::Spanned; use syn::{Expr, Field as SynField, Ident, Index, Result, Type}; use crate::attrs::{parse_attributes, AttributeSpanWrapper, FieldAttr, SqlIdentifier}; pub struct Field { pub ty: Type, pub span: Span, pub name: FieldName, column_name: Option>, pub sql_type: Option>, pub treat_none_as_default_value: Option>, pub treat_none_as_null: Option>, pub serialize_as: Option>, pub deserialize_as: Option>, pub select_expression: Option>, pub select_expression_type: Option>, pub embed: Option>, pub skip_insertion: Option>, } impl Field { pub fn from_struct_field(field: &SynField, index: usize) -> Result { let SynField { ident, attrs, ty, .. } = field; let mut column_name = None; let mut sql_type = None; let mut serialize_as = None; let mut deserialize_as = None; let mut embed = None; let mut skip_insertion = None; let mut select_expression = None; let mut select_expression_type = None; let mut treat_none_as_default_value = None; let mut treat_none_as_null = None; for attr in parse_attributes(attrs)? { let attribute_span = attr.attribute_span; let ident_span = attr.ident_span; match attr.item { FieldAttr::ColumnName(_, value) => { column_name = Some(AttributeSpanWrapper { item: value, attribute_span, ident_span, }) } FieldAttr::SqlType(_, value) => { sql_type = Some(AttributeSpanWrapper { item: Type::Path(value), attribute_span, ident_span, }) } FieldAttr::TreatNoneAsDefaultValue(_, value) => { treat_none_as_default_value = Some(AttributeSpanWrapper { item: value.value, attribute_span, ident_span, }) } FieldAttr::TreatNoneAsNull(_, value) => { treat_none_as_null = Some(AttributeSpanWrapper { item: value.value, attribute_span, ident_span, }) } FieldAttr::SerializeAs(_, value) => { serialize_as = Some(AttributeSpanWrapper { item: Type::Path(value), attribute_span, ident_span, }) } FieldAttr::DeserializeAs(_, value) => { deserialize_as = Some(AttributeSpanWrapper { item: Type::Path(value), attribute_span, ident_span, }) } FieldAttr::SelectExpression(_, value) => { select_expression = Some(AttributeSpanWrapper { item: value, attribute_span, ident_span, }) } FieldAttr::SelectExpressionType(_, value) => { select_expression_type = Some(AttributeSpanWrapper { item: value, attribute_span, ident_span, }) } FieldAttr::Embed(_) => { embed = Some(AttributeSpanWrapper { item: true, attribute_span, ident_span, }) } FieldAttr::SkipInsertion(_) => { skip_insertion = Some(AttributeSpanWrapper { item: true, attribute_span, ident_span, }) } } } let name = match ident.clone() { Some(x) => FieldName::Named(x), None => FieldName::Unnamed(index.into()), }; let span = match name { FieldName::Named(ref ident) => ident.span(), FieldName::Unnamed(_) => ty.span(), }; Ok(Self { ty: ty.clone(), span, name, column_name, sql_type, treat_none_as_default_value, treat_none_as_null, serialize_as, deserialize_as, select_expression, select_expression_type, embed, skip_insertion, }) } pub fn column_name(&self) -> Result { let identifier = self.column_name.as_ref().map(|a| a.item.clone()); if let Some(identifier) = identifier { Ok(identifier) } else { match self.name { FieldName::Named(ref x) => Ok(x.into()), FieldName::Unnamed(ref x) => Err(syn::Error::new( x.span(), "All fields of tuple structs must be annotated with `#[diesel(column_name)]`", )), } } } pub fn ty_for_deserialize(&self) -> &Type { if let Some(AttributeSpanWrapper { item: value, .. }) = &self.deserialize_as { value } else { &self.ty } } pub(crate) fn embed(&self) -> bool { self.embed.as_ref().map(|a| a.item).unwrap_or(false) } pub(crate) fn skip_insertion(&self) -> bool { self.skip_insertion .as_ref() .map(|a| a.item) .unwrap_or(false) } } pub enum FieldName { Named(Ident), Unnamed(Index), } impl quote::ToTokens for FieldName { fn to_tokens(&self, tokens: &mut TokenStream) { match *self { FieldName::Named(ref x) => x.to_tokens(tokens), FieldName::Unnamed(ref x) => x.to_tokens(tokens), } } } diesel_derives-2.2.3/src/from_sql_row.rs000064400000000000000000000025571046102023000164750ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse_quote; use syn::DeriveInput; use syn::Result; use crate::model::Model; use crate::util::{ty_for_foreign_derive, wrap_in_dummy_mod}; pub fn derive(mut item: DeriveInput) -> Result { let model = Model::from_item(&item, true, false)?; let struct_ty = ty_for_foreign_derive(&item, &model)?; { item.generics.params.push(parse_quote!(__DB)); item.generics.params.push(parse_quote!(__ST)); let where_clause = item.generics.make_where_clause(); where_clause .predicates .push(parse_quote!(__DB: diesel::backend::Backend)); where_clause .predicates .push(parse_quote!(__ST: diesel::sql_types::SingleValue)); where_clause .predicates .push(parse_quote!(Self: FromSql<__ST, __DB>)); } let (impl_generics, _, where_clause) = item.generics.split_for_impl(); Ok(wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, FromSql, Queryable}; // Need to put __ST and __DB after lifetimes but before const params impl #impl_generics Queryable<__ST, __DB> for #struct_ty #where_clause { type Row = Self; fn build(row: Self::Row) -> deserialize::Result { Ok(row) } } })) } diesel_derives-2.2.3/src/identifiable.rs000064400000000000000000000033021046102023000163700ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse_quote; use syn::DeriveInput; use syn::Result; use crate::model::Model; use crate::util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, false)?; let struct_name = &item.ident; let table_name = &model.table_names()[0]; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let mut ref_generics = item.generics.clone(); ref_generics.params.push(parse_quote!('ident)); let (ref_generics, ..) = ref_generics.split_for_impl(); let mut field_ty = Vec::new(); let mut field_name = Vec::new(); for pk in model.primary_key_names.iter() { let f = model.find_column(pk)?; field_ty.push(&f.ty); field_name.push(&f.name); } Ok(wrap_in_dummy_mod(quote! { use diesel::associations::{HasTable, Identifiable}; impl #impl_generics HasTable for #struct_name #ty_generics #where_clause { type Table = #table_name::table; fn table() -> Self::Table { #table_name::table } } impl #ref_generics Identifiable for &'ident #struct_name #ty_generics #where_clause { type Id = (#(&'ident #field_ty),*); fn id(self) -> Self::Id { (#(&self.#field_name),*) } } impl #ref_generics Identifiable for &'_ &'ident #struct_name #ty_generics #where_clause { type Id = (#(&'ident #field_ty),*); fn id(self) -> Self::Id { (#(&self.#field_name),*) } } })) } diesel_derives-2.2.3/src/insertable.rs000064400000000000000000000226161046102023000161120ustar 00000000000000use crate::attrs::AttributeSpanWrapper; use crate::field::Field; use crate::model::Model; use crate::util::{inner_of_option_ty, is_option_ty, wrap_in_dummy_mod}; use proc_macro2::TokenStream; use quote::quote; use quote::quote_spanned; use syn::parse_quote; use syn::spanned::Spanned as _; use syn::{DeriveInput, Expr, Path, Result, Type}; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, true)?; let tokens = model .table_names() .iter() .map(|table_name| derive_into_single_table(&item, &model, table_name)) .collect::>>()?; Ok(wrap_in_dummy_mod(quote! { use diesel::insertable::Insertable; use diesel::internal::derives::insertable::UndecoratedInsertRecord; use diesel::prelude::*; #(#tokens)* })) } fn derive_into_single_table( item: &DeriveInput, model: &Model, table_name: &Path, ) -> Result { let treat_none_as_default_value = model.treat_none_as_default_value(); let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let mut generate_borrowed_insert = true; let mut direct_field_ty = Vec::with_capacity(model.fields().len()); let mut direct_field_assign = Vec::with_capacity(model.fields().len()); let mut ref_field_ty = Vec::with_capacity(model.fields().len()); let mut ref_field_assign = Vec::with_capacity(model.fields().len()); for field in model.fields() { // skip this field while generating the insertion if field.skip_insertion() { continue; } // Use field-level attr. with fallback to the struct-level one. let treat_none_as_default_value = match &field.treat_none_as_default_value { Some(attr) => { if let Some(embed) = &field.embed { return Err(syn::Error::new( embed.attribute_span, "`embed` and `treat_none_as_default_value` are mutually exclusive", )); } if !is_option_ty(&field.ty) { return Err(syn::Error::new( field.ty.span(), "expected `treat_none_as_default_value` field to be of type `Option<_>`", )); } attr.item } None => treat_none_as_default_value, }; match (field.serialize_as.as_ref(), field.embed()) { (None, true) => { direct_field_ty.push(field_ty_embed(field, None)); direct_field_assign.push(field_expr_embed(field, None)); ref_field_ty.push(field_ty_embed(field, Some(quote!(&'insert)))); ref_field_assign.push(field_expr_embed(field, Some(quote!(&)))); } (None, false) => { direct_field_ty.push(field_ty( field, table_name, None, treat_none_as_default_value, )?); direct_field_assign.push(field_expr( field, table_name, None, treat_none_as_default_value, )?); ref_field_ty.push(field_ty( field, table_name, Some(quote!(&'insert)), treat_none_as_default_value, )?); ref_field_assign.push(field_expr( field, table_name, Some(quote!(&)), treat_none_as_default_value, )?); } (Some(AttributeSpanWrapper { item: ty, .. }), false) => { direct_field_ty.push(field_ty_serialize_as( field, table_name, ty, treat_none_as_default_value, )?); direct_field_assign.push(field_expr_serialize_as( field, table_name, ty, treat_none_as_default_value, )?); generate_borrowed_insert = false; // as soon as we hit one field with #[diesel(serialize_as)] there is no point in generating the impl of Insertable for borrowed structs } (Some(AttributeSpanWrapper { attribute_span, .. }), true) => { return Err(syn::Error::new( *attribute_span, "`#[diesel(embed)]` cannot be combined with `#[diesel(serialize_as)]`", )); } } } let insert_owned = quote! { impl #impl_generics Insertable<#table_name::table> for #struct_name #ty_generics #where_clause { type Values = <(#(#direct_field_ty,)*) as Insertable<#table_name::table>>::Values; fn values(self) -> <(#(#direct_field_ty,)*) as Insertable<#table_name::table>>::Values { (#(#direct_field_assign,)*).values() } } }; let insert_borrowed = if generate_borrowed_insert { let mut impl_generics = item.generics.clone(); impl_generics.params.push(parse_quote!('insert)); let (impl_generics, ..) = impl_generics.split_for_impl(); quote! { impl #impl_generics Insertable<#table_name::table> for &'insert #struct_name #ty_generics #where_clause { type Values = <(#(#ref_field_ty,)*) as Insertable<#table_name::table>>::Values; fn values(self) -> <(#(#ref_field_ty,)*) as Insertable<#table_name::table>>::Values { (#(#ref_field_assign,)*).values() } } } } else { quote! {} }; Ok(quote! { #[allow(unused_qualifications)] #insert_owned #[allow(unused_qualifications)] #insert_borrowed impl #impl_generics UndecoratedInsertRecord<#table_name::table> for #struct_name #ty_generics #where_clause { } }) } fn field_ty_embed(field: &Field, lifetime: Option) -> TokenStream { let field_ty = &field.ty; let span = field.span; quote_spanned!(span=> #lifetime #field_ty) } fn field_expr_embed(field: &Field, lifetime: Option) -> TokenStream { let field_name = &field.name; quote!(#lifetime self.#field_name) } fn field_ty_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_default_value: bool, ) -> Result { let column_name = field.column_name()?.to_ident()?; let span = field.span; if treat_none_as_default_value { let inner_ty = inner_of_option_ty(ty); Ok(quote_spanned! {span=> std::option::Option> }) } else { Ok(quote_spanned! {span=> diesel::dsl::Eq< #table_name::#column_name, #ty, > }) } } fn field_expr_serialize_as( field: &Field, table_name: &Path, ty: &Type, treat_none_as_default_value: bool, ) -> Result { let field_name = &field.name; let column_name = field.column_name()?.to_ident()?; let column = quote!(#table_name::#column_name); if treat_none_as_default_value { if is_option_ty(ty) { Ok(quote!(::std::convert::Into::<#ty>::into(self.#field_name).map(|v| #column.eq(v)))) } else { Ok( quote!(std::option::Option::Some(#column.eq(::std::convert::Into::<#ty>::into(self.#field_name)))), ) } } else { Ok(quote!(#column.eq(::std::convert::Into::<#ty>::into(self.#field_name)))) } } fn field_ty( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_default_value: bool, ) -> Result { let column_name = field.column_name()?.to_ident()?; let span = field.span; if treat_none_as_default_value { let inner_ty = inner_of_option_ty(&field.ty); Ok(quote_spanned! {span=> std::option::Option> }) } else { let inner_ty = &field.ty; Ok(quote_spanned! {span=> diesel::dsl::Eq< #table_name::#column_name, #lifetime #inner_ty, > }) } } fn field_expr( field: &Field, table_name: &Path, lifetime: Option, treat_none_as_default_value: bool, ) -> Result { let field_name = &field.name; let column_name = field.column_name()?.to_ident()?; let column: Expr = parse_quote!(#table_name::#column_name); if treat_none_as_default_value { if is_option_ty(&field.ty) { if lifetime.is_some() { Ok(quote!(self.#field_name.as_ref().map(|x| #column.eq(x)))) } else { Ok(quote!(self.#field_name.map(|x| #column.eq(x)))) } } else { Ok(quote!(std::option::Option::Some(#column.eq(#lifetime self.#field_name)))) } } else { Ok(quote!(#column.eq(#lifetime self.#field_name))) } } diesel_derives-2.2.3/src/lib.rs000064400000000000000000002104321046102023000145230ustar 00000000000000// Clippy lints #![allow( clippy::needless_doctest_main, clippy::needless_pass_by_value, clippy::map_unwrap_or )] #![warn( clippy::mut_mut, clippy::non_ascii_literal, clippy::similar_names, clippy::unicode_not_nfc, clippy::if_not_else, clippy::items_after_statements, clippy::used_underscore_binding, missing_copy_implementations )] #![cfg_attr(feature = "nightly", feature(proc_macro_diagnostic))] extern crate diesel_table_macro_syntax; extern crate proc_macro; extern crate proc_macro2; extern crate quote; extern crate syn; use proc_macro::TokenStream; use syn::{parse_macro_input, parse_quote}; mod attrs; mod deprecated; mod field; mod model; mod parsers; mod util; mod as_changeset; mod as_expression; mod associations; mod diesel_for_each_tuple; mod diesel_numeric_ops; mod diesel_public_if; mod from_sql_row; mod identifiable; mod insertable; mod multiconnection; mod query_id; mod queryable; mod queryable_by_name; mod selectable; mod sql_function; mod sql_type; mod table; mod valid_grouping; /// Implements `AsChangeset` /// /// To implement `AsChangeset` this derive needs to know the corresponding table /// type. By default, it uses the `snake_case` type name with an added `s` from /// the current scope. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// /// If a field name of your struct differs /// from the name of the corresponding column, you can annotate the field with /// `#[diesel(column_name = some_column_name)]`. /// /// To provide custom serialization behavior for a field, you can use /// `#[diesel(serialize_as = SomeType)]`. If this attribute is present, Diesel /// will call `.into` on the corresponding field and serialize the instance of `SomeType`, /// rather than the actual field on your struct. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// Normally, Diesel produces two implementations of the `AsChangeset` trait for your /// struct using this derive: one for an owned version and one for a borrowed version. /// Using `#[diesel(serialize_as)]` implies a conversion using `.into` which consumes the underlying value. /// Hence, once you use `#[diesel(serialize_as)]`, Diesel can no longer insert borrowed /// versions of your struct. /// /// By default, any `Option` fields on the struct are skipped if their value is /// `None`. If you would like to assign `NULL` to the field instead, you can /// annotate your struct with `#[diesel(treat_none_as_null = true)]`. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(treat_none_as_null = true)]`, specifies that /// the derive should treat `None` values as `NULL`. By default /// `Option::::None` is just skipped. To insert a `NULL` using default /// behavior use `Option::>::Some(None)` /// * `#[diesel(table_name = path::to::table)]`, specifies a path to the table for which the /// current type is a changeset. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name. /// * `#[diesel(primary_key(id1, id2))]` to specify the struct field that /// that corresponds to the primary key. If not used, `id` will be /// assumed as primary key field /// /// ## Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column name /// of the current field to `some_column_name`. By default, the field /// name is used as column name. /// * `#[diesel(serialize_as = SomeType)]`, instead of serializing the actual /// field type, Diesel will convert the field into `SomeType` using `.into` and /// serialize that instead. By default, this derive will serialize directly using /// the actual field type. /// * `#[diesel(treat_none_as_null = true/false)]`, overrides the container-level /// `treat_none_as_null` attribute for the current field. #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive( AsChangeset, attributes(diesel, table_name, column_name, primary_key, changeset_options) ) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(AsChangeset, attributes(diesel)) )] pub fn derive_as_changeset(input: TokenStream) -> TokenStream { as_changeset::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements all required variants of `AsExpression` /// /// This derive will generate the following impls: /// /// - `impl AsExpression for YourType` /// - `impl AsExpression> for YourType` /// - `impl AsExpression for &'a YourType` /// - `impl AsExpression> for &'a YourType` /// - `impl AsExpression for &'a &'b YourType` /// - `impl AsExpression> for &'a &'b YourType` /// /// If your type is unsized, /// you can specify this by adding the annotation `#[diesel(not_sized)]` /// as attribute on the type. This will skip the impls for non-reference types. /// /// Using this derive requires implementing the `ToSql` trait for your type. /// /// # Attributes: /// /// ## Required container attributes /// /// * `#[diesel(sql_type = SqlType)]`, to specify the sql type of the /// generated implementations. If the attribute exists multiple times /// impls for each sql type is generated. /// /// ## Optional container attributes /// /// * `#[diesel(not_sized)]`, to skip generating impls that require /// that the type is `Sized` #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(AsExpression, attributes(diesel, sql_type)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(AsExpression, attributes(diesel)) )] pub fn derive_as_expression(input: TokenStream) -> TokenStream { as_expression::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implement required traits for the associations API /// /// This derive implements support for Diesel's associations api. Check the /// module level documentation of the `diesel::associations` module for details. /// /// This derive generates the following impls: /// * `impl BelongsTo for YourType` /// * `impl BelongsTo<&'a Parent> for YourType` /// /// # Attributes /// /// # Required container attributes /// /// * `#[diesel(belongs_to(User))]`, to specify a child-to-parent relationship /// between the current type and the specified parent type (`User`). /// If this attribute is given multiple times, multiple relationships /// are generated. `#[diesel(belongs_to(User, foreign_key = mykey))]` variant /// allows us to specify the name of the foreign key. If the foreign key /// is not specified explicitly, the remote lower case type name with /// appended `_id` is used as a foreign key name. (`user_id` in this example /// case) /// /// # Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]` specifies a path to the table this /// type belongs to. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name. /// /// # Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column the current /// field maps to `some_column_name`. By default, the field name is used /// as a column name. #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Associations, attributes(diesel, belongs_to, column_name, table_name)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Associations, attributes(diesel, belongs_to, column_name, table_name)) )] pub fn derive_associations(input: TokenStream) -> TokenStream { associations::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implement numeric operators for the current query node #[proc_macro_derive(DieselNumericOps)] pub fn derive_diesel_numeric_ops(input: TokenStream) -> TokenStream { diesel_numeric_ops::derive(parse_macro_input!(input)).into() } /// Implements `Queryable` for types that correspond to a single SQL type. The type must implement `FromSql`. /// /// This derive is mostly useful to implement support deserializing /// into rust types not supported by Diesel itself. /// /// There are no options or special considerations needed for this derive. #[proc_macro_derive(FromSqlRow, attributes(diesel))] pub fn derive_from_sql_row(input: TokenStream) -> TokenStream { from_sql_row::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements `Identifiable` for references of the current type /// /// By default, the primary key field is assumed to be a single field called `id`. /// If it isn't, you can put `#[diesel(primary_key(your_id))]` on your struct. /// If you have a composite primary key, the syntax is `#[diesel(primary_key(id1, id2))]`. /// /// By default, `#[derive(Identifiable)]` will assume that your table is /// in scope and its name is the plural form of your struct name. /// Diesel uses basic pluralization rules. /// It only adds an `s` to the end, and converts `CamelCase` to `snake_case`. /// If your table name doesn't follow this convention or is not in scope, /// you can specify a path to the table with `#[diesel(table_name = path::to::table)]`. /// Our rules for inferring table names are considered public API. /// It will never change without a major version bump. /// /// This derive generates the following impls: /// * `impl Identifiable for &'a YourType` /// * `impl Identifiable for &'_ &'a YourType` /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]` specifies a path to the table this /// type belongs to. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name /// * `#[diesel(primary_key(id1, id2))]` to specify the struct field that /// that corresponds to the primary key. If not used, `id` will be /// assumed as primary key field /// /// # Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column the current /// field maps to `some_column_name`. By default, the field name is used /// as a column name. #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Identifiable, attributes(diesel, table_name, column_name, primary_key)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Identifiable, attributes(diesel)) )] pub fn derive_identifiable(input: TokenStream) -> TokenStream { identifiable::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements `Insertable` /// /// To implement `Insertable` this derive needs to know the corresponding table /// type. By default, it uses the `snake_case` type name with an added `s` /// from the current scope. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// If `table_name` attribute is given multiple times, impls for each table are generated. /// /// If a field name of your /// struct differs from the name of the corresponding column, /// you can annotate the field with `#[diesel(column_name = some_column_name)]`. /// /// Your struct can also contain fields which implement `Insertable`. This is /// useful when you want to have one field map to more than one column (for /// example, an enum that maps to a label and a value column). Add /// `#[diesel(embed)]` to any such fields. /// /// To provide custom serialization behavior for a field, you can use /// `#[diesel(serialize_as = SomeType)]`. If this attribute is present, Diesel /// will call `.into` on the corresponding field and serialize the instance of `SomeType`, /// rather than the actual field on your struct. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// Using `#[diesel(serialize_as)]` is **incompatible** with `#[diesel(embed)]`. /// Normally, Diesel produces two implementations of the `Insertable` trait for your /// struct using this derive: one for an owned version and one for a borrowed version. /// Using `#[diesel(serialize_as)]` implies a conversion using `.into` which consumes the underlying value. /// Hence, once you use `#[diesel(serialize_as)]`, Diesel can no longer insert borrowed /// versions of your struct. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]`, specifies a path to the table this type /// is insertable into. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name /// * `#[diesel(treat_none_as_default_value = false)]`, specifies that `None` values /// should be converted to `NULL` values on the SQL side instead of being treated as `DEFAULT` /// value primitive. *Note*: This option may control if your query is stored in the /// prepared statement cache or not* /// /// ## Optional field attributes /// /// * `#[diesel(column_name = some_column_name)]`, overrides the column the current /// field maps to `some_column_name`. By default, the field name is used /// as column name /// * `#[diesel(embed)]`, specifies that the current field maps not only /// to a single database field, but is a struct that implements `Insertable` /// * `#[diesel(serialize_as = SomeType)]`, instead of serializing the actual /// field type, Diesel will convert the field into `SomeType` using `.into` and /// serialize that instead. By default, this derive will serialize directly using /// the actual field type. /// * `#[diesel(treat_none_as_default_value = true/false)]`, overrides the container-level /// `treat_none_as_default_value` attribute for the current field. /// * `#[diesel(skip_insertion)]`, skips insertion of this field. Useful for working with /// generated columns. /// /// # Examples /// /// If we want to customize the serialization during insert, we can use `#[diesel(serialize_as)]`. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use diesel::{prelude::*, serialize::{ToSql, Output, self}, deserialize::{FromSqlRow}, expression::AsExpression, sql_types, backend::Backend}; /// # use schema::users; /// # use std::io::Write; /// # /// #[derive(Debug, FromSqlRow, AsExpression)] /// #[diesel(sql_type = sql_types::Text)] /// struct UppercaseString(pub String); /// /// impl Into for String { /// fn into(self) -> UppercaseString { /// UppercaseString(self.to_uppercase()) /// } /// } /// /// impl ToSql for UppercaseString /// where /// DB: Backend, /// String: ToSql, /// { /// fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result { /// self.0.to_sql(out) /// } /// } /// /// #[derive(Insertable, PartialEq, Debug)] /// #[diesel(table_name = users)] /// struct InsertableUser { /// id: i32, /// #[diesel(serialize_as = UppercaseString)] /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut connection_no_data(); /// # diesel::sql_query("CREATE TABLE users (id INTEGER PRIMARY KEY, name VARCHAR(255) NOT NULL)") /// # .execute(connection) /// # .unwrap(); /// let user = InsertableUser { /// id: 1, /// name: "thomas".to_string(), /// }; /// /// diesel::insert_into(users) /// .values(user) /// .execute(connection) /// .unwrap(); /// /// assert_eq!( /// Ok("THOMAS".to_string()), /// users.select(name).first(connection) /// ); /// # Ok(()) /// # } /// ``` #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Insertable, attributes(diesel, table_name, column_name)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Insertable, attributes(diesel)) )] pub fn derive_insertable(input: TokenStream) -> TokenStream { insertable::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements `QueryId` /// /// For example, given this struct: /// /// ```rust /// # extern crate diesel; /// #[derive(diesel::query_builder::QueryId)] /// pub struct And { /// left: Left, /// right: Right, /// } /// ``` /// /// the following implementation will be generated /// /// ```rust /// # extern crate diesel; /// # struct And(Left, Right); /// # use diesel::query_builder::QueryId; /// impl QueryId for And /// where /// Left: QueryId, /// Right: QueryId, /// { /// type QueryId = And; /// /// const HAS_STATIC_QUERY_ID: bool = Left::HAS_STATIC_QUERY_ID && Right::HAS_STATIC_QUERY_ID; /// } /// ``` /// /// If the SQL generated by a struct is not uniquely identifiable by its type, /// meaning that `HAS_STATIC_QUERY_ID` should always be false, /// you shouldn't derive this trait. /// In that case, you should implement it manually instead. #[proc_macro_derive(QueryId)] pub fn derive_query_id(input: TokenStream) -> TokenStream { query_id::derive(parse_macro_input!(input)).into() } /// Implements `Queryable` to load the result of statically typed queries /// /// This trait can only be derived for structs, not enums. /// /// **Note**: When this trait is derived, it will assume that __all fields on /// your struct__ matches __all fields in the query__, including the order and /// count. This means that field order is significant if you're using /// `#[derive(Queryable)]`. __Field name has no effect__. If you see errors while /// loading data into a struct that derives `Queryable`: Consider using /// [`#[derive(Selectable)]`] + `#[diesel(check_for_backend(YourBackendType))]` /// to check for mismatching fields at compile-time. /// /// To provide custom deserialization behavior for a field, you can use /// `#[diesel(deserialize_as = SomeType)]`. If this attribute is present, Diesel /// will deserialize the corresponding field into `SomeType`, rather than the /// actual field type on your struct and then call /// [`.try_into`](https://doc.rust-lang.org/stable/std/convert/trait.TryInto.html#tymethod.try_into) /// to convert it to the actual field type. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// (Note: all types that have `Into` automatically implement `TryInto`, /// for cases where your conversion is not fallible.) /// /// # Attributes /// /// ## Optional field attributes /// /// * `#[diesel(deserialize_as = Type)]`, instead of deserializing directly /// into the field type, the implementation will deserialize into `Type`. /// Then `Type` is converted via /// [`.try_into`](https://doc.rust-lang.org/stable/std/convert/trait.TryInto.html#tymethod.try_into) /// into the field type. By default, this derive will deserialize directly into the field type /// /// # Examples /// /// If we just want to map a query to our struct, we can use `derive`. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # /// #[derive(Queryable, PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut establish_connection(); /// let first_user = users.first(connection)?; /// let expected = User { id: 1, name: "Sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// If we want to do additional work during deserialization, we can use /// `deserialize_as` to use a different implementation. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # /// # use schema::users; /// # use diesel::backend::{self, Backend}; /// # use diesel::deserialize::{self, Queryable, FromSql}; /// # use diesel::sql_types::Text; /// # /// struct LowercaseString(String); /// /// impl Into for LowercaseString { /// fn into(self) -> String { /// self.0 /// } /// } /// /// impl Queryable for LowercaseString /// where /// DB: Backend, /// String: FromSql /// { /// /// type Row = String; /// /// fn build(s: String) -> deserialize::Result { /// Ok(LowercaseString(s.to_lowercase())) /// } /// } /// /// #[derive(Queryable, PartialEq, Debug)] /// struct User { /// id: i32, /// #[diesel(deserialize_as = LowercaseString)] /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut establish_connection(); /// let first_user = users.first(connection)?; /// let expected = User { id: 1, name: "sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// Alternatively, we can implement the trait for our struct manually. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # /// use schema::users; /// use diesel::deserialize::{self, Queryable, FromSqlRow}; /// use diesel::row::Row; /// /// # /* /// type DB = diesel::sqlite::Sqlite; /// # */ /// /// #[derive(PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// impl Queryable for User /// where /// (i32, String): FromSqlRow, /// { /// type Row = (i32, String); /// /// fn build((id, name): Self::Row) -> deserialize::Result { /// Ok(User { id, name: name.to_lowercase() }) /// } /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # use schema::users::dsl::*; /// # let connection = &mut establish_connection(); /// let first_user = users.first(connection)?; /// let expected = User { id: 1, name: "sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(Queryable, attributes(diesel, column_name)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(Queryable, attributes(diesel)) )] pub fn derive_queryable(input: TokenStream) -> TokenStream { queryable::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements `QueryableByName` for untyped sql queries, such as that one generated /// by `sql_query` /// /// To derive this trait, Diesel needs to know the SQL type of each field. /// It can get the data from the corresponding table type. /// It uses the `snake_case` type name with an added `s`. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// If you define use the table type, the SQL type will be /// `diesel::dsl::SqlTypeOf`. In cases which there are no table type, /// you can do the same by annotating each field with `#[diesel(sql_type = SomeType)]`. /// /// If the name of a field on your struct is different from the column in your /// `table!` declaration, or if you're deriving this trait on a tuple struct, /// you can annotate the field with `#[diesel(column_name = some_column)]`. For tuple /// structs, all fields must have this annotation. /// /// If a field is another struct which implements `QueryableByName`, /// instead of a column, you can annotate that with `#[diesel(embed)]`. /// Then all fields contained by that inner struct are loaded into the embedded struct. /// /// To provide custom deserialization behavior for a field, you can use /// `#[diesel(deserialize_as = SomeType)]`. If this attribute is present, Diesel /// will deserialize the corresponding field into `SomeType`, rather than the /// actual field type on your struct and then call `.into` to convert it to the /// actual field type. This can be used to add custom behavior for a /// single field, or use types that are otherwise unsupported by Diesel. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(table_name = path::to::table)]`, to specify that this type contains /// columns for the specified table. The path is relative to the current module. /// If no field attributes are specified the derive will use the sql type of /// the corresponding column. /// * `#[diesel(check_for_backend(diesel::pg::Pg, diesel::mysql::Mysql))]`, instructs /// the derive to generate additional code to identify potential type mismatches. /// It accepts a list of backend types to check the types against. Using this option /// will result in much better error messages in cases where some types in your `QueryableByName` /// struct don't match. You need to specify the concrete database backend /// this specific struct is indented to be used with, as otherwise rustc can't correctly /// identify the required deserialization implementation. /// /// ## Optional field attributes /// /// * `#[diesel(column_name = some_column)]`, overrides the column name for /// a given field. If not set, the name of the field is used as a column /// name. This attribute is required on tuple structs, if /// `#[diesel(table_name = some_table)]` is used, otherwise it's optional. /// * `#[diesel(sql_type = SomeType)]`, assumes `SomeType` as sql type of the /// corresponding field. These attributes have precedence over all other /// variants to specify the sql type. /// * `#[diesel(deserialize_as = Type)]`, instead of deserializing directly /// into the field type, the implementation will deserialize into `Type`. /// Then `Type` is converted via `.into()` into the field type. By default, /// this derive will deserialize directly into the field type /// * `#[diesel(embed)]`, specifies that the current field maps not only /// a single database column, but it is a type that implements /// `QueryableByName` on its own /// /// # Examples /// /// If we just want to map a query to our struct, we can use `derive`. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use schema::users; /// # use diesel::sql_query; /// # /// #[derive(QueryableByName, PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let connection = &mut establish_connection(); /// let first_user = sql_query("SELECT * FROM users ORDER BY id LIMIT 1") /// .get_result(connection)?; /// let expected = User { id: 1, name: "Sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// If we want to do additional work during deserialization, we can use /// `deserialize_as` to use a different implementation. /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use diesel::sql_query; /// # use schema::users; /// # use diesel::backend::{self, Backend}; /// # use diesel::deserialize::{self, FromSql}; /// # /// struct LowercaseString(String); /// /// impl Into for LowercaseString { /// fn into(self) -> String { /// self.0 /// } /// } /// /// impl FromSql for LowercaseString /// where /// DB: Backend, /// String: FromSql, /// { /// fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result { /// String::from_sql(bytes) /// .map(|s| LowercaseString(s.to_lowercase())) /// } /// } /// /// #[derive(QueryableByName, PartialEq, Debug)] /// struct User { /// id: i32, /// #[diesel(deserialize_as = LowercaseString)] /// name: String, /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let connection = &mut establish_connection(); /// let first_user = sql_query("SELECT * FROM users ORDER BY id LIMIT 1") /// .get_result(connection)?; /// let expected = User { id: 1, name: "sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` /// /// The custom derive generates impls similar to the following one /// /// ```rust /// # extern crate diesel; /// # extern crate dotenvy; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use schema::users; /// # use diesel::sql_query; /// # use diesel::deserialize::{self, QueryableByName, FromSql}; /// # use diesel::row::NamedRow; /// # use diesel::backend::Backend; /// # /// #[derive(PartialEq, Debug)] /// struct User { /// id: i32, /// name: String, /// } /// /// impl QueryableByName for User /// where /// DB: Backend, /// i32: FromSql, DB>, /// String: FromSql, DB>, /// { /// fn build<'a>(row: &impl NamedRow<'a, DB>) -> deserialize::Result { /// let id = NamedRow::get::, _>(row, "id")?; /// let name = NamedRow::get::, _>(row, "name")?; /// /// Ok(Self { id, name }) /// } /// } /// /// # fn main() { /// # run_test(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let connection = &mut establish_connection(); /// let first_user = sql_query("SELECT * FROM users ORDER BY id LIMIT 1") /// .get_result(connection)?; /// let expected = User { id: 1, name: "Sean".into() }; /// assert_eq!(expected, first_user); /// # Ok(()) /// # } /// ``` #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(QueryableByName, attributes(diesel, table_name, column_name, sql_type)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(QueryableByName, attributes(diesel)) )] pub fn derive_queryable_by_name(input: TokenStream) -> TokenStream { queryable_by_name::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements `Selectable` /// /// To implement `Selectable` this derive needs to know the corresponding table /// type. By default, it uses the `snake_case` type name with an added `s`. /// It is possible to change this default by using `#[diesel(table_name = something)]`. /// /// If the name of a field on your struct is different from the column in your /// `table!` declaration, or if you're deriving this trait on a tuple struct, /// you can annotate the field with `#[diesel(column_name = some_column)]`. For tuple /// structs, all fields must have this annotation. /// /// If a field is another struct which implements `Selectable`, /// instead of a column, you can annotate that with `#[diesel(embed)]`. /// Then all fields contained by that inner struct are selected as separate tuple. /// Fields from an inner struct can come from a different table, as long as the /// select clause is valid in the current query. /// /// The derive enables using the `SelectableHelper::as_select` method to construct /// select clauses, in order to use LoadDsl, you might also check the /// `Queryable` trait and derive. /// /// # Attributes /// /// ## Type attributes /// /// * `#[diesel(table_name = path::to::table)]`, specifies a path to the table for which the /// current type is selectable. The path is relative to the current module. /// If this attribute is not used, the type name converted to /// `snake_case` with an added `s` is used as table name. /// /// ## Optional Type attributes /// /// * `#[diesel(check_for_backend(diesel::pg::Pg, diesel::mysql::Mysql))]`, instructs /// the derive to generate additional code to identify potential type mismatches. /// It accepts a list of backend types to check the types against. Using this option /// will result in much better error messages in cases where some types in your `Queryable` /// struct don't match. You need to specify the concrete database backend /// this specific struct is indented to be used with, as otherwise rustc can't correctly /// identify the required deserialization implementation. /// /// ## Field attributes /// /// * `#[diesel(column_name = some_column)]`, overrides the column name for /// a given field. If not set, the name of the field is used as column /// name. /// * `#[diesel(embed)]`, specifies that the current field maps not only /// a single database column, but is a type that implements /// `Selectable` on its own /// * `#[diesel(select_expression = some_custom_select_expression)]`, overrides /// the entire select expression for the given field. It may be used to select with /// custom tuples, or specify `select_expression = my_table::some_field.is_not_null()`, /// or separate tables... /// It may be used in conjunction with `select_expression_type` (described below) /// * `#[diesel(select_expression_type = the_custom_select_expression_type]`, should be used /// in conjunction with `select_expression` (described above) if the type is too complex /// for diesel to infer it automatically. This will be required if select_expression is a custom /// function call that doesn't have the corresponding associated type defined at the same path. /// Example use (this would actually be inferred): /// `#[diesel(select_expression_type = dsl::IsNotNull)]` #[proc_macro_derive(Selectable, attributes(diesel))] pub fn derive_selectable(input: TokenStream) -> TokenStream { selectable::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implement necessary traits for adding a new sql type /// /// This trait implements all necessary traits to define a /// new sql type. This is useful for adding support for unsupported /// or custom types on the sql side. The sql type will be usable for /// all backends you specified via the attributes listed below. /// /// This derive will implement `NotNull`, `HasSqlType` and `SingleValue`. /// When using this derive macro, /// you need to specify how the type is represented on various backends. /// You don't need to specify every backend, /// only the ones supported by your type. /// /// For PostgreSQL, add `#[diesel(postgres_type(name = "pg_type_name", schema = "pg_schema_name"))]` /// or `#[diesel(postgres_type(oid = "some_oid", array_oid = "some_oid"))]` for /// builtin types. /// For MySQL, specify which variant of `MysqlType` should be used /// by adding `#[diesel(mysql_type(name = "Variant"))]`. /// For SQLite, specify which variant of `SqliteType` should be used /// by adding `#[diesel(sqlite_type(name = "Variant"))]`. /// /// # Attributes /// /// ## Type attributes /// /// * `#[diesel(postgres_type(name = "TypeName", schema = "public"))]` specifies support for /// a postgresql type with the name `TypeName` in the schema `public`. Prefer this variant /// for types with no stable OID (== everything but the builtin types). It is possible to leaf /// of the `schema` part. In that case, Diesel defaults to the default postgres search path. /// * `#[diesel(postgres_type(oid = 42, array_oid = 142))]`, specifies support for a /// postgresql type with the given `oid` and `array_oid`. This variant /// should only be used with types that have a stable OID. /// * `#[diesel(sqlite_type(name = "TypeName"))]`, specifies support for a sqlite type /// with the given name. `TypeName` needs to be one of the possible values /// in `SqliteType` /// * `#[diesel(mysql_type(name = "TypeName"))]`, specifies support for a mysql type /// with the given name. `TypeName` needs to be one of the possible values /// in `MysqlType` #[cfg_attr( all(not(feature = "without-deprecated"), feature = "with-deprecated"), proc_macro_derive(SqlType, attributes(diesel, postgres, sqlite_type, mysql_type)) )] #[cfg_attr( any(feature = "without-deprecated", not(feature = "with-deprecated")), proc_macro_derive(SqlType, attributes(diesel)) )] pub fn derive_sql_type(input: TokenStream) -> TokenStream { sql_type::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Implements `ValidGrouping` /// /// This trait can be automatically derived for structs with no type parameters /// which are never aggregate, as well as for structs which are `NonAggregate` /// when all type parameters are `NonAggregate`. For example: /// /// ```ignore /// #[derive(ValidGrouping)] /// struct LiteralOne; /// /// #[derive(ValidGrouping)] /// struct Plus(Lhs, Rhs); /// /// // The following impl will be generated: /// /// impl ValidGrouping for LiteralOne { /// type IsAggregate = is_aggregate::Never; /// } /// /// impl ValidGrouping for Plus /// where /// Lhs: ValidGrouping, /// Rhs: ValidGrouping, /// Lhs::IsAggregate: MixedAggregates, /// { /// type IsAggregate = >::Output; /// } /// ``` /// /// For types which are always considered aggregate (such as an aggregate /// function), annotate your struct with `#[diesel(aggregate)]` to set `IsAggregate` /// explicitly to `is_aggregate::Yes`. /// /// # Attributes /// /// ## Optional container attributes /// /// * `#[diesel(aggregate)]` for cases where the type represents an aggregating /// SQL expression #[proc_macro_derive(ValidGrouping, attributes(diesel))] pub fn derive_valid_grouping(input: TokenStream) -> TokenStream { valid_grouping::derive(parse_macro_input!(input)) .unwrap_or_else(syn::Error::into_compile_error) .into() } /// Declare a sql function for use in your code. /// /// Diesel only provides support for a very small number of SQL functions. /// This macro enables you to add additional functions from the SQL standard, /// as well as any custom functions your application might have. /// /// The syntax for this macro is very similar to that of a normal Rust function, /// except the argument and return types will be the SQL types being used. /// Typically, these types will come from [`diesel::sql_types`](../diesel/sql_types/index.html) /// /// This macro will generate two items. A function with the name that you've /// given, and a module with a helper type representing the return type of your /// function. For example, this invocation: /// /// ```ignore /// define_sql_function!(fn lower(x: Text) -> Text); /// ``` /// /// will generate this code: /// /// ```ignore /// pub fn lower(x: X) -> lower { /// ... /// } /// /// pub type lower = ...; /// ``` /// /// Most attributes given to this macro will be put on the generated function /// (including doc comments). /// /// # Adding Doc Comments /// /// ```no_run /// # extern crate diesel; /// # use diesel::*; /// # /// # table! { crates { id -> Integer, name -> VarChar, } } /// # /// use diesel::sql_types::Text; /// /// define_sql_function! { /// /// Represents the `canon_crate_name` SQL function, created in /// /// migration .... /// fn canon_crate_name(a: Text) -> Text; /// } /// /// # fn main() { /// # use self::crates::dsl::*; /// let target_name = "diesel"; /// crates.filter(canon_crate_name(name).eq(canon_crate_name(target_name))); /// // This will generate the following SQL /// // SELECT * FROM crates WHERE canon_crate_name(crates.name) = canon_crate_name($1) /// # } /// ``` /// /// # Special Attributes /// /// There are a handful of special attributes that Diesel will recognize. They /// are: /// /// - `#[aggregate]` /// - Indicates that this is an aggregate function, and that `NonAggregate` /// shouldn't be implemented. /// - `#[sql_name = "name"]` /// - The SQL to be generated is different from the Rust name of the function. /// This can be used to represent functions which can take many argument /// types, or to capitalize function names. /// /// Functions can also be generic. Take the definition of `sum`, for example: /// /// ```no_run /// # extern crate diesel; /// # use diesel::*; /// # /// # table! { crates { id -> Integer, name -> VarChar, } } /// # /// use diesel::sql_types::Foldable; /// /// define_sql_function! { /// #[aggregate] /// #[sql_name = "SUM"] /// fn sum(expr: ST) -> ST::Sum; /// } /// /// # fn main() { /// # use self::crates::dsl::*; /// crates.select(sum(id)); /// # } /// ``` /// /// # SQL Functions without Arguments /// /// A common example is ordering a query using the `RANDOM()` sql function, /// which can be implemented using `define_sql_function!` like this: /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # table! { crates { id -> Integer, name -> VarChar, } } /// # /// define_sql_function!(fn random() -> Text); /// /// # fn main() { /// # use self::crates::dsl::*; /// crates.order(random()); /// # } /// ``` /// /// # Use with SQLite /// /// On most backends, the implementation of the function is defined in a /// migration using `CREATE FUNCTION`. On SQLite, the function is implemented in /// Rust instead. You must call `register_impl` or /// `register_nondeterministic_impl` (in the generated function's `_internals` /// module) with every connection before you can use the function. /// /// These functions will only be generated if the `sqlite` feature is enabled, /// and the function is not generic. /// SQLite doesn't support generic functions and variadic functions. /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # #[cfg(feature = "sqlite")] /// # fn main() { /// # run_test().unwrap(); /// # } /// # /// # #[cfg(not(feature = "sqlite"))] /// # fn main() { /// # } /// # /// use diesel::sql_types::{Integer, Double}; /// define_sql_function!(fn add_mul(x: Integer, y: Integer, z: Double) -> Double); /// /// # #[cfg(feature = "sqlite")] /// # fn run_test() -> Result<(), Box> { /// let connection = &mut SqliteConnection::establish(":memory:")?; /// /// add_mul_utils::register_impl(connection, |x: i32, y: i32, z: f64| { /// (x + y) as f64 * z /// })?; /// /// let result = select(add_mul(1, 2, 1.5)) /// .get_result::(connection)?; /// assert_eq!(4.5, result); /// # Ok(()) /// # } /// ``` /// /// ## Panics /// /// If an implementation of the custom function panics and unwinding is enabled, the panic is /// caught and the function returns to libsqlite with an error. It can't propagate the panics due /// to the FFI boundary. /// /// This is the same for [custom aggregate functions](#custom-aggregate-functions). /// /// ## Custom Aggregate Functions /// /// Custom aggregate functions can be created in SQLite by adding an `#[aggregate]` /// attribute inside `define_sql_function`. `register_impl` (in the generated function's `_utils` /// module) needs to be called with a type implementing the /// [SqliteAggregateFunction](../diesel/sqlite/trait.SqliteAggregateFunction.html) /// trait as a type parameter as shown in the examples below. /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # #[cfg(feature = "sqlite")] /// # fn main() { /// # run().unwrap(); /// # } /// # /// # #[cfg(not(feature = "sqlite"))] /// # fn main() { /// # } /// use diesel::sql_types::Integer; /// # #[cfg(feature = "sqlite")] /// use diesel::sqlite::SqliteAggregateFunction; /// /// define_sql_function! { /// #[aggregate] /// fn my_sum(x: Integer) -> Integer; /// } /// /// #[derive(Default)] /// struct MySum { sum: i32 } /// /// # #[cfg(feature = "sqlite")] /// impl SqliteAggregateFunction for MySum { /// type Output = i32; /// /// fn step(&mut self, expr: i32) { /// self.sum += expr; /// } /// /// fn finalize(aggregator: Option) -> Self::Output { /// aggregator.map(|a| a.sum).unwrap_or_default() /// } /// } /// # table! { /// # players { /// # id -> Integer, /// # score -> Integer, /// # } /// # } /// /// # #[cfg(feature = "sqlite")] /// fn run() -> Result<(), Box> { /// # use self::players::dsl::*; /// let connection = &mut SqliteConnection::establish(":memory:")?; /// # diesel::sql_query("create table players (id integer primary key autoincrement, score integer)") /// # .execute(connection) /// # .unwrap(); /// # diesel::sql_query("insert into players (score) values (10), (20), (30)") /// # .execute(connection) /// # .unwrap(); /// /// my_sum_utils::register_impl::(connection)?; /// /// let total_score = players.select(my_sum(score)) /// .get_result::(connection)?; /// /// println!("The total score of all the players is: {}", total_score); /// /// # assert_eq!(60, total_score); /// Ok(()) /// } /// ``` /// /// With multiple function arguments, the arguments are passed as a tuple to `SqliteAggregateFunction` /// /// ```rust /// # extern crate diesel; /// # use diesel::*; /// # /// # #[cfg(feature = "sqlite")] /// # fn main() { /// # run().unwrap(); /// # } /// # /// # #[cfg(not(feature = "sqlite"))] /// # fn main() { /// # } /// use diesel::sql_types::{Float, Nullable}; /// # #[cfg(feature = "sqlite")] /// use diesel::sqlite::SqliteAggregateFunction; /// /// define_sql_function! { /// #[aggregate] /// fn range_max(x0: Float, x1: Float) -> Nullable; /// } /// /// #[derive(Default)] /// struct RangeMax { max_value: Option } /// /// # #[cfg(feature = "sqlite")] /// impl SqliteAggregateFunction<(T, T)> for RangeMax { /// type Output = Option; /// /// fn step(&mut self, (x0, x1): (T, T)) { /// # let max = if x0 >= x1 { /// # x0 /// # } else { /// # x1 /// # }; /// # /// # self.max_value = match self.max_value { /// # Some(current_max_value) if max > current_max_value => Some(max), /// # None => Some(max), /// # _ => self.max_value, /// # }; /// // Compare self.max_value to x0 and x1 /// } /// /// fn finalize(aggregator: Option) -> Self::Output { /// aggregator?.max_value /// } /// } /// # table! { /// # student_avgs { /// # id -> Integer, /// # s1_avg -> Float, /// # s2_avg -> Float, /// # } /// # } /// /// # #[cfg(feature = "sqlite")] /// fn run() -> Result<(), Box> { /// # use self::student_avgs::dsl::*; /// let connection = &mut SqliteConnection::establish(":memory:")?; /// # diesel::sql_query("create table student_avgs (id integer primary key autoincrement, s1_avg float, s2_avg float)") /// # .execute(connection) /// # .unwrap(); /// # diesel::sql_query("insert into student_avgs (s1_avg, s2_avg) values (85.5, 90), (79.8, 80.1)") /// # .execute(connection) /// # .unwrap(); /// /// range_max_utils::register_impl::, _, _>(connection)?; /// /// let result = student_avgs.select(range_max(s1_avg, s2_avg)) /// .get_result::>(connection)?; /// /// if let Some(max_semester_avg) = result { /// println!("The largest semester average is: {}", max_semester_avg); /// } /// /// # assert_eq!(Some(90f32), result); /// Ok(()) /// } /// ``` #[proc_macro] pub fn define_sql_function(input: TokenStream) -> TokenStream { sql_function::expand(parse_macro_input!(input), false).into() } /// A legacy version of [`define_sql_function!`]. /// /// The difference is that it makes the helper type available in a module named the exact same as /// the function: /// /// ```ignore /// sql_function!(fn lower(x: Text) -> Text); /// ``` /// /// will generate this code: /// /// ```ignore /// pub fn lower(x: X) -> lower::HelperType { /// ... /// } /// /// pub(crate) mod lower { /// pub type HelperType = ...; /// } /// ``` /// /// This turned out to be an issue for the support of the `auto_type` feature, which is why /// [`define_sql_function!`] was introduced (and why this is deprecated). /// /// SQL functions declared with this version of the macro will not be usable with `#[auto_type]` /// or `Selectable` `select_expression` type inference. #[deprecated(since = "2.2.0", note = "Use [`define_sql_function`] instead")] #[proc_macro] #[cfg(all(feature = "with-deprecated", not(feature = "without-deprecated")))] pub fn sql_function_proc(input: TokenStream) -> TokenStream { sql_function::expand(parse_macro_input!(input), true).into() } /// This is an internal diesel macro that /// helps to implement all traits for tuples of /// various sizes #[doc(hidden)] #[proc_macro] pub fn __diesel_for_each_tuple(input: TokenStream) -> TokenStream { diesel_for_each_tuple::expand(parse_macro_input!(input)).into() } /// This is an internal diesel macro that /// helps to restrict the visibility of an item based /// on a feature flag #[doc(hidden)] #[proc_macro_attribute] pub fn __diesel_public_if(attrs: TokenStream, input: TokenStream) -> TokenStream { diesel_public_if::expand(parse_macro_input!(attrs), parse_macro_input!(input)).into() } /// Specifies that a table exists, and what columns it has. This will create a /// new public module, with the same name, as the name of the table. In this /// module, you will find a unit struct named `table`, and a unit struct with the /// name of each column. /// /// By default, this allows a maximum of 32 columns per table. /// You can increase this limit to 64 by enabling the `64-column-tables` feature. /// You can increase it to 128 by enabling the `128-column-tables` feature. /// You can decrease it to 16 columns, /// which improves compilation time, /// by disabling the default features of Diesel. /// Note that enabling 64 column tables or larger will substantially increase /// the compile time of Diesel. /// /// Example usage /// ------------- /// /// ```rust /// # extern crate diesel; /// /// diesel::table! { /// users { /// id -> Integer, /// name -> VarChar, /// favorite_color -> Nullable, /// } /// } /// ``` /// /// You may also specify a primary key if it is called something other than `id`. /// Tables with no primary key aren't supported. /// /// ```rust /// # extern crate diesel; /// /// diesel::table! { /// users (non_standard_primary_key) { /// non_standard_primary_key -> Integer, /// name -> VarChar, /// favorite_color -> Nullable, /// } /// } /// ``` /// /// For tables with composite primary keys, list all the columns in the primary key. /// /// ```rust /// # extern crate diesel; /// /// diesel::table! { /// followings (user_id, post_id) { /// user_id -> Integer, /// post_id -> Integer, /// favorited -> Bool, /// } /// } /// # fn main() { /// # use diesel::prelude::Table; /// # use self::followings::dsl::*; /// # // Poor man's assert_eq! -- since this is type level this would fail /// # // to compile if the wrong primary key were generated /// # let (user_id {}, post_id {}) = followings.primary_key(); /// # } /// ``` /// /// If you are using types that aren't from Diesel's core types, you can specify /// which types to import. /// /// ``` /// # extern crate diesel; /// # mod diesel_full_text_search { /// # #[derive(diesel::sql_types::SqlType)] /// # pub struct TsVector; /// # } /// /// diesel::table! { /// use diesel::sql_types::*; /// # use crate::diesel_full_text_search::*; /// # /* /// use diesel_full_text_search::*; /// # */ /// /// posts { /// id -> Integer, /// title -> Text, /// keywords -> TsVector, /// } /// } /// # fn main() {} /// ``` /// /// If you want to add documentation to the generated code, you can use the /// following syntax: /// /// ``` /// # extern crate diesel; /// /// diesel::table! { /// /// The table containing all blog posts /// posts { /// /// The post's unique id /// id -> Integer, /// /// The post's title /// title -> Text, /// } /// } /// ``` /// /// If you have a column with the same name as a Rust reserved keyword, you can use /// the `sql_name` attribute like this: /// /// ``` /// # extern crate diesel; /// /// diesel::table! { /// posts { /// id -> Integer, /// /// This column is named `mytype` but references the table `type` column. /// #[sql_name = "type"] /// mytype -> Text, /// } /// } /// ``` /// /// This module will also contain several helper types: /// /// dsl /// --- /// /// This simply re-exports the table, renamed to the same name as the module, /// and each of the columns. This is useful to glob import when you're dealing /// primarily with one table, to allow writing `users.filter(name.eq("Sean"))` /// instead of `users::table.filter(users::name.eq("Sean"))`. /// /// `all_columns` /// ----------- /// /// A constant will be assigned called `all_columns`. This is what will be /// selected if you don't otherwise specify a select clause. It's type will be /// `table::AllColumns`. You can also get this value from the /// `Table::all_columns` function. /// /// star /// ---- /// /// This will be the qualified "star" expression for this table (e.g. /// `users.*`). Internally, we read columns by index, not by name, so this /// column is not safe to read data out of, and it has had its SQL type set to /// `()` to prevent accidentally using it as such. It is sometimes useful for /// counting statements, however. It can also be accessed through the `Table.star()` /// method. /// /// `SqlType` /// ------- /// /// A type alias called `SqlType` will be created. It will be the SQL type of /// `all_columns`. The SQL type is needed for things like returning boxed /// queries. /// /// `BoxedQuery` /// ---------- /// /// ```ignore /// pub type BoxedQuery<'a, DB, ST = SqlType> = BoxedSelectStatement<'a, ST, table, DB>; /// ``` #[proc_macro] pub fn table_proc(input: TokenStream) -> TokenStream { match syn::parse(input) { Ok(input) => table::expand(input).into(), Err(_) => quote::quote! { compile_error!( "Invalid `table!` syntax. Please see the `table!` macro docs for more info.\n\ Docs available at: `https://docs.diesel.rs/master/diesel/macro.table.html`\n" ); } .into(), } } /// This derives implements `diesel::Connection` and related traits for an enum of /// connections to different databases. /// /// By applying this derive to such an enum, you can use the enum as a connection type in /// any location all the inner connections are valid. This derive supports enum /// variants containing a single tuple field. Each tuple field type must implement /// `diesel::Connection` and a number of related traits. Connection types form Diesel itself /// as well as third party connection types are supported by this derive. /// /// The implementation of `diesel::Connection::establish` tries to establish /// a new connection with the given connection string in the order the connections /// are specified in the enum. If one connection fails, it tries the next one and so on. /// That means that as soon as more than one connection type accepts a certain connection /// string the first matching type in your enum will always establish the connection. This /// is especially important if one of the connection types is `diesel::SqliteConnection` /// as this connection type accepts arbitrary paths. It should normally place as last entry /// in your enum. If you want control of which connection type is created, just construct the /// corresponding enum manually by first establishing the connection via the inner type and then /// wrap the result into the enum. /// /// # Example /// ``` /// # extern crate diesel; /// # use diesel::result::QueryResult; /// use diesel::prelude::*; /// /// #[derive(diesel::MultiConnection)] /// pub enum AnyConnection { /// # #[cfg(feature = "postgres")] /// Postgresql(diesel::PgConnection), /// # #[cfg(feature = "mysql")] /// Mysql(diesel::MysqlConnection), /// # #[cfg(feature = "sqlite")] /// Sqlite(diesel::SqliteConnection), /// } /// /// diesel::table! { /// users { /// id -> Integer, /// name -> Text, /// } /// } /// /// fn use_multi(conn: &mut AnyConnection) -> QueryResult<()> { /// // Use the connection enum as any other connection type /// // for inserting/updating/loading/… /// diesel::insert_into(users::table) /// .values(users::name.eq("Sean")) /// .execute(conn)?; /// /// let users = users::table.load::<(i32, String)>(conn)?; /// /// // Match on the connection type to access /// // the inner connection. This allows us then to use /// // backend specific methods. /// # #[cfg(feature = "postgres")] /// if let AnyConnection::Postgresql(ref mut conn) = conn { /// // perform a postgresql specific query here /// let users = users::table.load::<(i32, String)>(conn)?; /// } /// /// Ok(()) /// } /// /// # fn main() {} /// ``` /// /// # Limitations /// /// The derived connection implementation can only cover the common subset of /// all inner connection types. So, if one backend doesn't support certain SQL features, /// like for example, returning clauses, the whole connection implementation doesn't /// support this feature. In addition, only a limited set of SQL types is supported: /// /// * `diesel::sql_types::SmallInt` /// * `diesel::sql_types::Integer` /// * `diesel::sql_types::BigInt` /// * `diesel::sql_types::Double` /// * `diesel::sql_types::Float` /// * `diesel::sql_types::Text` /// * `diesel::sql_types::Date` /// * `diesel::sql_types::Time` /// * `diesel::sql_types::Timestamp` /// /// Support for additional types can be added by providing manual implementations of /// `HasSqlType`, `FromSql` and `ToSql` for the corresponding type, all databases included /// in your enum, and the backend generated by this derive called `MultiBackend`. /// For example to support a custom enum `MyEnum` with the custom SQL type `MyInteger`: /// ``` /// extern crate diesel; /// use diesel::backend::Backend; /// use diesel::deserialize::{self, FromSql, FromSqlRow}; /// use diesel::serialize::{self, IsNull, ToSql}; /// use diesel::AsExpression; /// use diesel::sql_types::{HasSqlType, SqlType}; /// use diesel::prelude::*; /// /// #[derive(diesel::MultiConnection)] /// pub enum AnyConnection { /// # #[cfg(feature = "postgres")] /// Postgresql(diesel::PgConnection), /// # #[cfg(feature = "mysql")] /// Mysql(diesel::MysqlConnection), /// # #[cfg(feature = "sqlite")] /// Sqlite(diesel::SqliteConnection), /// } /// /// // defining an custom SQL type is optional /// // you can also use types from `diesel::sql_types` /// #[derive(Copy, Clone, Debug, SqlType)] /// #[diesel(postgres_type(name = "Int4"))] /// #[diesel(mysql_type(name = "Long"))] /// #[diesel(sqlite_type(name = "Integer"))] /// struct MyInteger; /// /// /// // our custom enum /// #[repr(i32)] /// #[derive(Debug, Clone, Copy, AsExpression, FromSqlRow)] /// #[diesel(sql_type = MyInteger)] /// pub enum MyEnum { /// A = 1, /// B = 2, /// } /// /// // The `MultiBackend` type is generated by `#[derive(diesel::MultiConnection)]` /// // This part is only required if you define a custom sql type /// impl HasSqlType for MultiBackend { /// fn metadata(lookup: &mut Self::MetadataLookup) -> Self::TypeMetadata { /// // The `lookup_sql_type` function is exposed by the `MultiBackend` type /// MultiBackend::lookup_sql_type::(lookup) /// } /// } /// /// impl FromSql for MyEnum { /// fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { /// // The `from_sql` function is exposed by the `RawValue` type of the /// // `MultiBackend` type /// // This requires a `FromSql` impl for each backend /// bytes.from_sql::() /// } /// } /// /// impl ToSql for MyEnum { /// fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, MultiBackend>) -> serialize::Result { /// /// `set_value` expects a tuple consisting of the target SQL type /// /// and self for `MultiBackend` /// /// This requires a `ToSql` impl for each backend /// out.set_value((MyInteger, self)); /// Ok(IsNull::No) /// } /// } /// # #[cfg(feature = "postgres")] /// # impl ToSql for MyEnum { /// # fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, diesel::pg::Pg>) -> serialize::Result { todo!() } /// # } /// # #[cfg(feature = "mysql")] /// # impl ToSql for MyEnum { /// # fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, diesel::mysql::Mysql>) -> serialize::Result { todo!() } /// # } /// # #[cfg(feature = "sqlite")] /// # impl ToSql for MyEnum { /// # fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, diesel::sqlite::Sqlite>) -> serialize::Result { todo!() } /// # } /// # #[cfg(feature = "postgres")] /// # impl FromSql for MyEnum { /// # fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { todo!() } /// # } /// # #[cfg(feature = "mysql")] /// # impl FromSql for MyEnum { /// # fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { todo!() } /// # } /// # #[cfg(feature = "sqlite")] /// # impl FromSql for MyEnum { /// # fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { todo!() } /// # } /// # fn main() {} /// ``` #[proc_macro_derive(MultiConnection)] pub fn derive_multiconnection(input: TokenStream) -> TokenStream { multiconnection::derive(syn::parse_macro_input!(input)).into() } /// Automatically annotates return type of a query fragment function /// /// This may be useful when factoring out common query fragments into functions. /// If not using this, it would typically involve explicitly writing the full /// type of the query fragment function, which depending on the length of said /// query fragment can be quite difficult (especially to maintain) and verbose. /// /// # Example /// /// ```rust /// # extern crate diesel; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use schema::{users, posts}; /// use diesel::dsl; /// /// # fn main() { /// # run_test().unwrap(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let conn = &mut establish_connection(); /// # /// #[dsl::auto_type] /// fn user_has_post() -> _ { /// dsl::exists(posts::table.filter(posts::user_id.eq(users::id))) /// } /// /// let users_with_posts: Vec = users::table /// .filter(user_has_post()) /// .select(users::name) /// .load(conn)?; /// /// assert_eq!( /// &["Sean", "Tess"] as &[_], /// users_with_posts /// .iter() /// .map(|s| s.as_str()) /// .collect::>() /// ); /// # Ok(()) /// # } /// ``` /// # Limitations /// /// While this attribute tries to support as much of diesels built-in DSL as possible it's /// unfortunately not possible to support everything. Notable unsupported types are: /// /// * Update statements /// * Insert from select statements /// * Query constructed by `diesel::sql_query` /// * Expressions using `diesel::dsl::sql` /// /// For these cases a manual type annotation is required. See the "Annotating Types" section below /// for details. /// /// /// # Advanced usage /// /// By default, the macro will: /// - Generate a type alias for the return type of the function, named the /// exact same way as the function itself. /// - Assume that functions, unless otherwise annotated, have a type alias for /// their return type available at the same path as the function itself /// (including case). (e.g. for the `dsl::not(x)` call, it expects that there /// is a `dsl::not` type alias available) /// - Assume that methods, unless otherwise annotated, have a type alias /// available as `diesel::dsl::PascalCaseOfMethodName` (e.g. for the /// `x.and(y)` call, it expects that there is a `diesel::dsl::And` type /// alias available) /// /// The defaults can be changed by passing the following attributes to the /// macro: /// - `#[auto_type(no_type_alias)]` to disable the generation of the type alias. /// - `#[auto_type(dsl_path = "path::to::dsl")]` to change the path where the /// macro will look for type aliases for methods. This is required if you mix your own /// custom query dsl extensions with diesel types. In that case, you may use this argument to /// reference a module defined like so: /// ```ignore /// mod dsl { /// /// export all of diesel dsl /// pub use diesel::dsl::*; /// /// /// Export your extension types here /// pub use crate::your_extension::dsl::YourType; /// } /// ``` /// - `#[auto_type(method_type_case = "snake_case")]` to change the case of the /// method type alias. /// - `#[auto_type(function_type_case = "snake_case")]` to change the case of /// the function type alias (if you don't want the exact same path but want to /// change the case of the last element of the path). /// /// The `dsl_path` attribute in particular may be used to declare an /// intermediate module where you would define the few additional needed type /// aliases that can't be inferred automatically. /// /// ## Annotating types /// /// Sometimes the macro can't infer the type of a particular sub-expression. In /// that case, you can annotate the type of the sub-expression: /// /// ```rust /// # extern crate diesel; /// # include!("../../diesel/src/doctest_setup.rs"); /// # use schema::{users, posts}; /// use diesel::dsl; /// /// # fn main() { /// # run_test().unwrap(); /// # } /// # /// # fn run_test() -> QueryResult<()> { /// # let conn = &mut establish_connection(); /// # /// // This will generate a `user_has_post_with_id_greater_than` type alias /// #[dsl::auto_type] /// fn user_has_post_with_id_greater_than(id_greater_than: i32) -> _ { /// dsl::exists( /// posts::table /// .filter(posts::user_id.eq(users::id)) /// .filter(posts::id.gt(id_greater_than)), /// ) /// } /// /// #[dsl::auto_type] /// fn users_with_posts_with_id_greater_than(id_greater_than: i32) -> _ { /// // If we didn't specify the type for this query fragment, the macro would infer it as /// // `user_has_post_with_id_greater_than`, which would be incorrect because there is /// // no generic parameter. /// let filter: user_has_post_with_id_greater_than = /// user_has_post_with_id_greater_than(id_greater_than); /// // The macro inferring that it has to pass generic parameters is still the convention /// // because it's the most general case, as well as the common case within Diesel itself, /// // and because annotating this way is reasonably simple, while the other way around /// // would be hard. /// /// users::table.filter(filter).select(users::name) /// } /// /// let users_with_posts: Vec = users_with_posts_with_id_greater_than(2).load(conn)?; /// /// assert_eq!( /// &["Tess"] as &[_], /// users_with_posts /// .iter() /// .map(|s| s.as_str()) /// .collect::>() /// ); /// # Ok(()) /// # } /// ``` #[proc_macro_attribute] pub fn auto_type( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { dsl_auto_type::auto_type_proc_macro_attribute( proc_macro2::TokenStream::from(attr), proc_macro2::TokenStream::from(input), dsl_auto_type::DeriveSettings::builder() .default_dsl_path(parse_quote!(diesel::dsl)) .default_generate_type_alias(true) .default_method_type_case(AUTO_TYPE_DEFAULT_METHOD_TYPE_CASE) .default_function_type_case(AUTO_TYPE_DEFAULT_FUNCTION_TYPE_CASE) .build(), ) .into() } const AUTO_TYPE_DEFAULT_METHOD_TYPE_CASE: dsl_auto_type::Case = dsl_auto_type::Case::UpperCamel; const AUTO_TYPE_DEFAULT_FUNCTION_TYPE_CASE: dsl_auto_type::Case = dsl_auto_type::Case::DoNotChange; diesel_derives-2.2.3/src/model.rs000064400000000000000000000146411046102023000150610ustar 00000000000000use proc_macro2::Span; use std::slice::from_ref; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::Result; use syn::{ Data, DataStruct, DeriveInput, Field as SynField, Fields, FieldsNamed, FieldsUnnamed, Ident, LitBool, Path, Type, }; use crate::attrs::{parse_attributes, StructAttr}; use crate::field::Field; use crate::parsers::{BelongsTo, MysqlType, PostgresType, SqliteType}; use crate::util::camel_to_snake; pub struct Model { name: Path, table_names: Vec, pub primary_key_names: Vec, treat_none_as_default_value: Option, treat_none_as_null: Option, pub belongs_to: Vec, pub sql_types: Vec, pub aggregate: bool, pub not_sized: bool, pub foreign_derive: bool, pub mysql_type: Option, pub sqlite_type: Option, pub postgres_type: Option, pub check_for_backend: Option>, fields: Vec, } impl Model { pub fn from_item( item: &DeriveInput, allow_unit_structs: bool, allow_multiple_table: bool, ) -> Result { let DeriveInput { data, ident, attrs, .. } = item; let fields = match *data { Data::Struct(DataStruct { fields: Fields::Named(FieldsNamed { ref named, .. }), .. }) => Some(named), Data::Struct(DataStruct { fields: Fields::Unnamed(FieldsUnnamed { ref unnamed, .. }), .. }) => Some(unnamed), _ if !allow_unit_structs => { return Err(syn::Error::new( proc_macro2::Span::call_site(), "This derive can only be used on non-unit structs", )); } _ => None, }; let mut table_names = vec![]; let mut primary_key_names = vec![Ident::new("id", Span::call_site())]; let mut treat_none_as_default_value = None; let mut treat_none_as_null = None; let mut belongs_to = vec![]; let mut sql_types = vec![]; let mut aggregate = false; let mut not_sized = false; let mut foreign_derive = false; let mut mysql_type = None; let mut sqlite_type = None; let mut postgres_type = None; let mut check_for_backend = None; for attr in parse_attributes(attrs)? { match attr.item { StructAttr::SqlType(_, value) => sql_types.push(Type::Path(value)), StructAttr::TableName(ident, value) => { if !allow_multiple_table && !table_names.is_empty() { return Err(syn::Error::new( ident.span(), "expected a single table name attribute\n\ note: remove this attribute", )); } table_names.push(value) } StructAttr::PrimaryKey(_, keys) => { primary_key_names = keys.into_iter().collect(); } StructAttr::TreatNoneAsDefaultValue(_, val) => { treat_none_as_default_value = Some(val) } StructAttr::TreatNoneAsNull(_, val) => treat_none_as_null = Some(val), StructAttr::BelongsTo(_, val) => belongs_to.push(val), StructAttr::Aggregate(_) => aggregate = true, StructAttr::NotSized(_) => not_sized = true, StructAttr::ForeignDerive(_) => foreign_derive = true, StructAttr::MysqlType(_, val) => mysql_type = Some(val), StructAttr::SqliteType(_, val) => sqlite_type = Some(val), StructAttr::PostgresType(_, val) => postgres_type = Some(val), StructAttr::CheckForBackend(_, b) => { check_for_backend = Some(b); } } } let name = Ident::new(&infer_table_name(&ident.to_string()), ident.span()).into(); Ok(Self { name, table_names, primary_key_names, treat_none_as_default_value, treat_none_as_null, belongs_to, sql_types, aggregate, not_sized, foreign_derive, mysql_type, sqlite_type, postgres_type, fields: fields_from_item_data(fields)?, check_for_backend, }) } pub fn table_names(&self) -> &[Path] { match self.table_names.len() { 0 => from_ref(&self.name), _ => &self.table_names, } } pub fn fields(&self) -> &[Field] { &self.fields } pub fn find_column(&self, column_name: &Ident) -> Result<&Field> { self.fields() .iter() .find(|f| { f.column_name() .map(|c| c == *column_name) .unwrap_or_default() }) .ok_or_else(|| { syn::Error::new( column_name.span(), format!("No field with column name {column_name}"), ) }) } pub fn treat_none_as_default_value(&self) -> bool { self.treat_none_as_default_value .as_ref() .map(|v| v.value()) .unwrap_or(true) } pub fn treat_none_as_null(&self) -> bool { self.treat_none_as_null .as_ref() .map(|v| v.value()) .unwrap_or(false) } } fn fields_from_item_data(fields: Option<&Punctuated>) -> Result> { fields .map(|fields| { fields .iter() .enumerate() .map(|(i, f)| Field::from_struct_field(f, i)) .collect::>>() }) .unwrap_or_else(|| Ok(Vec::new())) } pub fn infer_table_name(name: &str) -> String { let mut result = camel_to_snake(name); result.push('s'); result } #[test] fn infer_table_name_pluralizes_and_downcases() { assert_eq!("foos", &infer_table_name("Foo")); assert_eq!("bars", &infer_table_name("Bar")); } #[test] fn infer_table_name_properly_handles_underscores() { assert_eq!("foo_bars", &infer_table_name("FooBar")); assert_eq!("foo_bar_bazs", &infer_table_name("FooBarBaz")); } diesel_derives-2.2.3/src/multiconnection.rs000064400000000000000000001736211046102023000171770ustar 00000000000000use proc_macro2::TokenStream; use syn::DeriveInput; struct ConnectionVariant<'a> { ty: &'a syn::Type, name: &'a syn::Ident, } pub fn derive(item: DeriveInput) -> TokenStream { if let syn::Data::Enum(e) = item.data { let connection_types = e .variants .iter() .map(|v| match &v.fields { syn::Fields::Unnamed(f) if f.unnamed.len() == 1 => ConnectionVariant { ty: &f.unnamed.first().unwrap().ty, name: &v.ident, }, _ => panic!("Only enums with on field per variant are supported"), }) .collect::>(); let backend = generate_backend(&connection_types); let query_builder = generate_querybuilder(&connection_types); let bind_collector = generate_bind_collector(&connection_types); let row = generate_row(&connection_types); let connection = generate_connection_impl(&connection_types, &item.ident); quote::quote! { mod multi_connection_impl { use super::*; mod backend { use super::*; #backend } mod query_builder { use super::*; #query_builder } mod bind_collector { use super::*; #bind_collector } mod row { use super::*; #row } mod connection { use super::*; #connection } pub use self::backend::{MultiBackend, MultiRawValue}; pub use self::row::{MultiRow, MultiField}; } pub use self::multi_connection_impl::{MultiBackend, MultiRow, MultiRawValue, MultiField}; } } else { panic!("Only enums are supported as multiconnection type"); } } fn generate_connection_impl( connection_types: &[ConnectionVariant], ident: &syn::Ident, ) -> TokenStream { let batch_execute_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(conn) => conn.batch_execute(query) } }); let execute_returning_count_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(conn) => { let query = SerializedQuery { inner: source, backend: MultiBackend::#ident(Default::default()), query_builder: super::query_builder::MultiQueryBuilder::#ident(Default::default()), p: std::marker::PhantomData::<#ty>, }; conn.execute_returning_count(&query) } } }); let load_impl = connection_types.iter().map(|c| { let variant_ident = c.name; let ty = &c.ty; quote::quote! { #ident::#variant_ident(conn) => { let query = SerializedQuery { inner: source, backend: MultiBackend::#variant_ident(Default::default()), query_builder: super::query_builder::MultiQueryBuilder::#variant_ident(Default::default()), p: std::marker::PhantomData::<#ty>, }; let r = <#ty as diesel::connection::LoadConnection>::load(conn, query)?; Ok(super::row::MultiCursor::#variant_ident(r)) } } }); let instrumentation_impl = connection_types.iter().map(|c| { let variant_ident = c.name; quote::quote! { #ident::#variant_ident(conn) => { diesel::connection::Connection::set_instrumentation(conn, instrumentation); } } }); let get_instrumentation_impl = connection_types.iter().map(|c| { let variant_ident = c.name; quote::quote! { #ident::#variant_ident(conn) => { diesel::connection::Connection::instrumentation(conn) } } }); let establish_impls = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { if let Ok(conn) = #ty::establish(database_url) { return Ok(Self::#ident(conn)); } } }); let begin_transaction_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(conn) => <#ty as Connection>::TransactionManager::begin_transaction(conn) } }); let commit_transaction_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(conn) => <#ty as Connection>::TransactionManager::commit_transaction(conn) } }); let rollback_transaction_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(conn) => <#ty as Connection>::TransactionManager::rollback_transaction(conn) } }); let is_broken_transaction_manager_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(conn) => <#ty as Connection>::TransactionManager::is_broken_transaction_manager(conn) } }); let transaction_manager_status_mut_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(conn) => <#ty as Connection>::TransactionManager::transaction_manager_status_mut(conn) } }); let bind_param_helper_impl = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { impl BindParamHelper for #ty { fn handle_inner_pass<'a, 'b: 'a>( outer_collector: &mut ::BindCollector<'a>, lookup: &mut ::MetadataLookup, backend: &'b MultiBackend, q: &'b impl diesel::query_builder::QueryFragment, ) -> diesel::QueryResult<()> { use diesel::internal::derives::multiconnection::MultiConnectionHelper; let mut collector = super::bind_collector::MultiBindCollector::#ident(Default::default()); let lookup = Self::to_any(lookup); q.collect_binds(&mut collector, lookup, backend)?; if let super::bind_collector::MultiBindCollector::#ident(collector) = collector { *outer_collector = collector; } Ok(()) } } } }); let impl_migration_connection = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(conn) => { use diesel::migration::MigrationConnection; conn.setup() } } }); let impl_begin_test_transaction = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(conn) => conn.begin_test_transaction() } }); let r2d2_impl = if cfg!(feature = "r2d2") { let impl_ping_r2d2 = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(conn) => conn.ping() } }); let impl_is_broken_r2d2 = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(conn) => conn.is_broken() } }); Some(quote::quote! { impl diesel::r2d2::R2D2Connection for MultiConnection { fn ping(&mut self) -> diesel::QueryResult<()> { use diesel::r2d2::R2D2Connection; match self { #(#impl_ping_r2d2,)* } } fn is_broken(&mut self) -> bool { use diesel::r2d2::R2D2Connection; match self { #(#impl_is_broken_r2d2,)* } } } }) } else { None }; quote::quote! { use diesel::connection::*; pub(super) use super::#ident as MultiConnection; impl SimpleConnection for MultiConnection { fn batch_execute(&mut self, query: &str) -> diesel::result::QueryResult<()> { match self { #(#batch_execute_impl,)* } } } impl diesel::internal::derives::multiconnection::ConnectionSealed for MultiConnection {} struct SerializedQuery { inner: T, backend: MultiBackend, query_builder: super::query_builder::MultiQueryBuilder, p: std::marker::PhantomData, } trait BindParamHelper: Connection { fn handle_inner_pass<'a, 'b: 'a>( collector: &mut ::BindCollector<'a>, lookup: &mut ::MetadataLookup, backend: &'b MultiBackend, q: &'b impl diesel::query_builder::QueryFragment, ) -> diesel::QueryResult<()>; } #(#bind_param_helper_impl)* impl diesel::query_builder::QueryFragment for SerializedQuery where DB: diesel::backend::Backend + 'static, T: diesel::query_builder::QueryFragment, C: diesel::connection::Connection + BindParamHelper + diesel::internal::derives::multiconnection::MultiConnectionHelper, { fn walk_ast<'b>( &'b self, mut pass: diesel::query_builder::AstPass<'_, 'b, DB>, ) -> diesel::QueryResult<()> { use diesel::query_builder::QueryBuilder; use diesel::internal::derives::multiconnection::AstPassHelper; let mut query_builder = self.query_builder.duplicate(); self.inner.to_sql(&mut query_builder, &self.backend)?; pass.push_sql(&query_builder.finish()); if !self.inner.is_safe_to_cache_prepared(&self.backend)? { pass.unsafe_to_cache_prepared(); } if let Some((outer_collector, lookup)) = pass.bind_collector() { C::handle_inner_pass(outer_collector, lookup, &self.backend, &self.inner)?; } Ok(()) } } impl diesel::query_builder::QueryId for SerializedQuery where T: diesel::query_builder::QueryId, { type QueryId = ::QueryId; const HAS_STATIC_QUERY_ID: bool = ::HAS_STATIC_QUERY_ID; } impl diesel::query_builder::Query for SerializedQuery where T: diesel::query_builder::Query { // we use untyped here as this does not really matter // + that type is supported for all backends type SqlType = diesel::sql_types::Untyped; } impl Connection for MultiConnection { type Backend = super::MultiBackend; type TransactionManager = Self; fn establish(database_url: &str) -> diesel::ConnectionResult { #(#establish_impls)* Err(diesel::ConnectionError::BadConnection("Invalid connection url for multiconnection".into())) } fn execute_returning_count(&mut self, source: &T) -> diesel::result::QueryResult where T: diesel::query_builder::QueryFragment + diesel::query_builder::QueryId, { match self { #(#execute_returning_count_impl,)* } } fn transaction_state( &mut self, ) -> &mut >::TransactionStateData { self } fn instrumentation(&mut self) -> &mut dyn diesel::connection::Instrumentation { match self { #(#get_instrumentation_impl,)* } } fn set_instrumentation(&mut self, instrumentation: impl diesel::connection::Instrumentation) { match self { #(#instrumentation_impl,)* } } fn begin_test_transaction(&mut self) -> diesel::QueryResult<()> { match self { #(#impl_begin_test_transaction,)* } } } impl LoadConnection for MultiConnection { type Cursor<'conn, 'query> = super::row::MultiCursor<'conn, 'query>; type Row<'conn, 'query> = super::MultiRow<'conn, 'query>; fn load<'conn, 'query, T>( &'conn mut self, source: T, ) -> diesel::result::QueryResult> where T: diesel::query_builder::Query + diesel::query_builder::QueryFragment + diesel::query_builder::QueryId + 'query, Self::Backend: diesel::expression::QueryMetadata, { match self { #(#load_impl,)* } } } impl TransactionManager for MultiConnection { type TransactionStateData = Self; fn begin_transaction(conn: &mut MultiConnection) -> diesel::QueryResult<()> { match conn { #(#begin_transaction_impl,)* } } fn rollback_transaction(conn: &mut MultiConnection) -> diesel::QueryResult<()> { match conn { #(#rollback_transaction_impl,)* } } fn commit_transaction(conn: &mut MultiConnection) -> diesel::QueryResult<()> { match conn { #(#commit_transaction_impl,)* } } fn transaction_manager_status_mut(conn: &mut MultiConnection) -> &mut diesel::connection::TransactionManagerStatus { match conn { #(#transaction_manager_status_mut_impl,)* } } fn is_broken_transaction_manager(conn: &mut MultiConnection) -> bool { match conn { #(#is_broken_transaction_manager_impl,)* } } } impl diesel::migration::MigrationConnection for MultiConnection { fn setup(&mut self) -> diesel::QueryResult { match self { #(#impl_migration_connection,)* } } } #r2d2_impl } } fn generate_row(connection_types: &[ConnectionVariant]) -> TokenStream { let row_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<#ty as diesel::connection::LoadConnection>::Row<'conn, 'query>) } }); let field_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<<#ty as diesel::connection::LoadConnection>::Row<'conn, 'query> as diesel::row::Row<'conn, <#ty as diesel::connection::Connection>::Backend>>::Field<'query>) } }); let field_name_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(f) => f.field_name() } }); let field_value_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(f) => f.value().map(super::MultiRawValue::#ident) } }); let row_index_impl = connection_types .iter() .map(|c| { let ident = c.name; quote::quote! { Self::#ident(r) => r.idx(idx) } }) .collect::>(); let row_index_impl = &row_index_impl; let cursor_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<#ty as diesel::connection::LoadConnection>::Cursor<'conn, 'query>) } }); let iterator_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(r) => Some(r.next()?.map(MultiRow::#ident)) } }); let field_count_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(r) => r.field_count() } }); let get_field_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(r) => r.get(idx).map(MultiField::#ident) } }); quote::quote! { pub enum MultiRow<'conn, 'query> { #(#row_variants,)* } impl<'conn, 'query> diesel::internal::derives::multiconnection::RowSealed for MultiRow<'conn, 'query> {} pub enum MultiField<'conn: 'query, 'query> { #(#field_variants,)* } impl<'conn, 'query> diesel::row::Field<'conn, super::MultiBackend> for MultiField<'conn, 'query> { fn field_name(&self) -> Option<&str> { use diesel::row::Field; match self { #(#field_name_impl,)* } } fn value(&self) -> Option<::RawValue<'_>> { use diesel::row::Field; match self { #(#field_value_impl,)* } } } impl<'conn, 'query, 'c> diesel::row::RowIndex<&'c str> for MultiRow<'conn, 'query> { fn idx(&self, idx: &'c str) -> Option { use diesel::row::RowIndex; match self { #(#row_index_impl,)* } } } impl<'conn, 'query> diesel::row::RowIndex for MultiRow<'conn, 'query> { fn idx(&self, idx: usize) -> Option { use diesel::row::RowIndex; match self { #(#row_index_impl,)* } } } impl<'conn, 'query> diesel::row::Row<'conn, super::MultiBackend> for MultiRow<'conn, 'query> { type Field<'a> = MultiField<'a, 'a> where 'conn: 'a, Self: 'a; type InnerPartialRow = Self; fn field_count(&self) -> usize { use diesel::row::Row; match self { #(#field_count_impl,)* } } fn get<'b, I>(&'b self, idx: I) -> Option> where 'conn: 'b, Self: diesel::row::RowIndex, { use diesel::row::{RowIndex, Row}; let idx = self.idx(idx)?; match self { #(#get_field_impl,)* } } fn partial_row( &self, range: std::ops::Range, ) -> diesel::internal::derives::multiconnection::PartialRow<'_, Self::InnerPartialRow> { diesel::internal::derives::multiconnection::PartialRow::new(self, range) } } pub enum MultiCursor<'conn, 'query> { #(#cursor_variants,)* } impl<'conn, 'query> Iterator for MultiCursor<'conn, 'query> { type Item = diesel::QueryResult>; fn next(&mut self) -> Option { match self { #(#iterator_impl,)* } } } } } fn generate_bind_collector(connection_types: &[ConnectionVariant]) -> TokenStream { let mut to_sql_impls = vec![ ( quote::quote!(diesel::sql_types::SmallInt), quote::quote!(i16), ), ( quote::quote!(diesel::sql_types::Integer), quote::quote!(i32), ), (quote::quote!(diesel::sql_types::BigInt), quote::quote!(i64)), (quote::quote!(diesel::sql_types::Double), quote::quote!(f64)), (quote::quote!(diesel::sql_types::Float), quote::quote!(f32)), (quote::quote!(diesel::sql_types::Text), quote::quote!(str)), ( quote::quote!(diesel::sql_types::Binary), quote::quote!([u8]), ), (quote::quote!(diesel::sql_types::Bool), quote::quote!(bool)), ]; if cfg!(feature = "chrono") { to_sql_impls.push(( quote::quote!(diesel::sql_types::Timestamp), quote::quote!(diesel::internal::derives::multiconnection::chrono::NaiveDateTime), )); to_sql_impls.push(( quote::quote!(diesel::sql_types::Date), quote::quote!(diesel::internal::derives::multiconnection::chrono::NaiveDate), )); to_sql_impls.push(( quote::quote!(diesel::sql_types::Time), quote::quote!(diesel::internal::derives::multiconnection::chrono::NaiveTime), )); } if cfg!(feature = "time") { to_sql_impls.push(( quote::quote!(diesel::sql_types::Timestamp), quote::quote!(diesel::internal::derives::multiconnection::time::PrimitiveDateTime), )); to_sql_impls.push(( quote::quote!(diesel::sql_types::Time), quote::quote!(diesel::internal::derives::multiconnection::time::Time), )); to_sql_impls.push(( quote::quote!(diesel::sql_types::Date), quote::quote!(diesel::internal::derives::multiconnection::time::Date), )); } let to_sql_impls = to_sql_impls .into_iter() .map(|t| generate_to_sql_impls(t, connection_types)); let mut from_sql_impls = vec![ ( quote::quote!(diesel::sql_types::SmallInt), quote::quote!(i16), ), ( quote::quote!(diesel::sql_types::Integer), quote::quote!(i32), ), (quote::quote!(diesel::sql_types::BigInt), quote::quote!(i64)), (quote::quote!(diesel::sql_types::Double), quote::quote!(f64)), (quote::quote!(diesel::sql_types::Float), quote::quote!(f32)), ( quote::quote!(diesel::sql_types::Text), quote::quote!(String), ), ( quote::quote!(diesel::sql_types::Binary), quote::quote!(Vec), ), (quote::quote!(diesel::sql_types::Bool), quote::quote!(bool)), ]; if cfg!(feature = "chrono") { from_sql_impls.push(( quote::quote!(diesel::sql_types::Timestamp), quote::quote!(diesel::internal::derives::multiconnection::chrono::NaiveDateTime), )); from_sql_impls.push(( quote::quote!(diesel::sql_types::Date), quote::quote!(diesel::internal::derives::multiconnection::chrono::NaiveDate), )); from_sql_impls.push(( quote::quote!(diesel::sql_types::Time), quote::quote!(diesel::internal::derives::multiconnection::chrono::NaiveTime), )); } if cfg!(feature = "time") { from_sql_impls.push(( quote::quote!(diesel::sql_types::Timestamp), quote::quote!(diesel::internal::derives::multiconnection::time::PrimitiveDateTime), )); from_sql_impls.push(( quote::quote!(diesel::sql_types::Time), quote::quote!(diesel::internal::derives::multiconnection::time::Time), )); from_sql_impls.push(( quote::quote!(diesel::sql_types::Date), quote::quote!(diesel::internal::derives::multiconnection::time::Date), )); } let from_sql_impls = from_sql_impls.into_iter().map(generate_from_sql_impls); let into_bind_value_bounds = connection_types.iter().map(|c| { let ty = c.ty; quote::quote! { diesel::serialize::ToSql::Backend> } }); let has_sql_type_bounds = connection_types.iter().map(|c| { let ty = c.ty; quote::quote! { <#ty as diesel::connection::Connection>::Backend: diesel::sql_types::HasSqlType } }); let multi_bind_collector_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<<#ty as diesel::connection::Connection>::Backend as diesel::backend::Backend>::BindCollector<'a>) } }); let multi_bind_collector_accessor = connection_types.iter().map(|c| { let ident = c.name; let lower_ident = syn::Ident::new(&c.name.to_string().to_lowercase(), c.name.span()); let ty = c.ty; quote::quote! { pub(super) fn #lower_ident( &mut self, ) -> &mut <<#ty as diesel::connection::Connection>::Backend as diesel::backend::Backend>::BindCollector<'a> { match self { Self::#ident(bc) => bc, _ => unreachable!(), } } } }); let push_to_inner_collector = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { Self::#ident(ref mut bc) => { let out = out.inner.expect("This inner value is set via our custom `ToSql` impls"); let callback = out.push_bound_value_to_collector; let value = out.value; <_ as PushBoundValueToCollectorDB<<#ty as diesel::Connection>::Backend>>::push_bound_value( callback, value, bc, <#ty as diesel::internal::derives::multiconnection::MultiConnectionHelper>::from_any(metadata_lookup) .expect("We can downcast the metadata lookup to the right type") )? } } }); let push_null_to_inner_collector = connection_types .iter() .map(|c| { let ident = c.name; quote::quote! { (Self::#ident(ref mut bc), super::backend::MultiTypeMetadata{ #ident: Some(metadata), .. }) => { bc.push_null_value(metadata)?; } } }) .collect::>(); let push_bound_value_super_traits = connection_types .iter() .map(|c| { let ty = c.ty; quote::quote! { PushBoundValueToCollectorDB<<#ty as diesel::Connection>::Backend> } }) .collect::>(); quote::quote! { pub enum MultiBindCollector<'a> { #(#multi_bind_collector_variants,)* } impl<'a> MultiBindCollector<'a> { #(#multi_bind_collector_accessor)* } trait PushBoundValueToCollectorDB { fn push_bound_value<'a: 'b, 'b>( &self, v: InnerBindValueKind<'a>, collector: &mut ::BindCollector<'b>, lookup: &mut ::MetadataLookup, ) -> diesel::result::QueryResult<()>; } struct PushBoundValueToCollectorImpl { p: std::marker::PhantomData<(ST, T)> } // we need to have separate impls for Sized values and str/[u8] as otherwise // we need separate impls for `Sized` and `str`/`[u8]` here as // we cannot use `Any::downcast_ref` otherwise (which implies `Sized`) impl PushBoundValueToCollectorDB for PushBoundValueToCollectorImpl where DB: diesel::backend::Backend + diesel::sql_types::HasSqlType, T: diesel::serialize::ToSql + 'static, Option: diesel::serialize::ToSql, DB> + 'static, ST: diesel::sql_types::SqlType, { fn push_bound_value<'a: 'b, 'b>( &self, v: InnerBindValueKind<'a>, collector: &mut ::BindCollector<'b>, lookup: &mut ::MetadataLookup, ) -> diesel::result::QueryResult<()> { use diesel::query_builder::BindCollector; match v { InnerBindValueKind::Sized(v) => { let v = v.downcast_ref::().expect("We know the type statically here"); collector.push_bound_value::(v, lookup) } InnerBindValueKind::Null => { collector.push_bound_value::, Option>(&None, lookup) }, _ => unreachable!("We set the value to `InnerBindValueKind::Sized` or `InnerBindValueKind::Null`") } } } impl PushBoundValueToCollectorDB for PushBoundValueToCollectorImpl where DB: diesel::backend::Backend + diesel::sql_types::HasSqlType, str: diesel::serialize::ToSql + 'static, { fn push_bound_value<'a: 'b, 'b>( &self, v: InnerBindValueKind<'a>, collector: &mut ::BindCollector<'b>, lookup: &mut ::MetadataLookup, ) -> diesel::result::QueryResult<()> { use diesel::query_builder::BindCollector; if let InnerBindValueKind::Str(v) = v { collector.push_bound_value::(v, lookup) } else { unreachable!("We set the value to `InnerBindValueKind::Str`") } } } impl PushBoundValueToCollectorDB for PushBoundValueToCollectorImpl where DB: diesel::backend::Backend + diesel::sql_types::HasSqlType, [u8]: diesel::serialize::ToSql + 'static, { fn push_bound_value<'a: 'b, 'b>( &self, v: InnerBindValueKind<'a>, collector: &mut ::BindCollector<'b>, lookup: &mut ::MetadataLookup, ) -> diesel::result::QueryResult<()> { use diesel::query_builder::BindCollector; if let InnerBindValueKind::Bytes(v) = v { collector.push_bound_value::(v, lookup) } else { unreachable!("We set the value to `InnerBindValueKind::Binary`") } } } trait PushBoundValueToCollector: #(#push_bound_value_super_traits +)* {} impl PushBoundValueToCollector for T where T: #(#push_bound_value_super_traits + )* {} #[derive(Default)] pub struct BindValue<'a> { // we use an option here to initialize an "empty" // as part of the `BindCollector` impl below inner: Option> } struct InnerBindValue<'a> { value: InnerBindValueKind<'a>, push_bound_value_to_collector: &'static dyn PushBoundValueToCollector } enum InnerBindValueKind<'a> { Sized(&'a (dyn std::any::Any + std::marker::Send + std::marker::Sync)), Str(&'a str), Bytes(&'a [u8]), Null, } impl<'a> From<(diesel::sql_types::Text, &'a str)> for BindValue<'a> { fn from((_, v): (diesel::sql_types::Text, &'a str)) -> Self { Self { inner: Some(InnerBindValue{ value: InnerBindValueKind::Str(v), push_bound_value_to_collector: &PushBoundValueToCollectorImpl { p: std::marker::PhantomData::<(diesel::sql_types::Text, str)> } }) } } } impl<'a> From<(diesel::sql_types::Binary, &'a [u8])> for BindValue<'a> { fn from((_, v): (diesel::sql_types::Binary, &'a [u8])) -> Self { Self { inner: Some(InnerBindValue { value: InnerBindValueKind::Bytes(v), push_bound_value_to_collector: &PushBoundValueToCollectorImpl { p: std::marker::PhantomData::<(diesel::sql_types::Binary, [u8])> } }) } } } impl<'a, T, ST> From<(ST, &'a T)> for BindValue<'a> where T: std::any::Any #(+ #into_bind_value_bounds)* + Send + Sync + 'static, ST: Send + diesel::sql_types::SqlType + 'static, #(#has_sql_type_bounds,)* { fn from((_, v): (ST, &'a T)) -> Self { Self { inner: Some(InnerBindValue{ value: InnerBindValueKind::Sized(v), push_bound_value_to_collector: &PushBoundValueToCollectorImpl { p: std::marker::PhantomData::<(ST, T)> } }) } } } impl<'a> diesel::query_builder::BindCollector<'a, MultiBackend> for MultiBindCollector<'a> { type Buffer = multi_connection_impl::bind_collector::BindValue<'a>; fn push_bound_value( &mut self, bind: &'a U, metadata_lookup: &mut (dyn std::any::Any + 'static), ) -> diesel::QueryResult<()> where MultiBackend: diesel::sql_types::HasSqlType, U: diesel::serialize::ToSql + ?Sized + 'a, { let out = { let out = multi_connection_impl::bind_collector::BindValue::default(); let mut out = diesel::serialize::Output::::new(out, metadata_lookup); let bind_is_null = bind.to_sql(&mut out).map_err(diesel::result::Error::SerializationError)?; if matches!(bind_is_null, diesel::serialize::IsNull::Yes) { // nulls are special and need a special handling because // there is a wildcard `ToSql` impl in diesel. That means we won't // set the `inner` field of `BindValue` to something for the `None` // case. Therefore we need to handle that explicitly here. // let metadata = >::metadata(metadata_lookup); match (self, metadata) { #(#push_null_to_inner_collector)* _ => { unreachable!("We have matching metadata") }, } return Ok(()); } else { out.into_inner() } }; match self { #(#push_to_inner_collector)* } Ok(()) } fn push_null_value(&mut self, metadata: super::backend::MultiTypeMetadata) -> diesel::QueryResult<()> { match (self, metadata) { #(#push_null_to_inner_collector)* _ => unreachable!("We have matching metadata"), } Ok(()) } } #(#to_sql_impls)* #(#from_sql_impls)* } } fn generate_has_sql_type_impls(sql_type: TokenStream) -> TokenStream { quote::quote! { impl diesel::sql_types::HasSqlType<#sql_type> for super::MultiBackend { fn metadata(lookup: &mut Self::MetadataLookup) -> Self::TypeMetadata { Self::lookup_sql_type::<#sql_type>(lookup) } } } } fn generate_from_sql_impls((sql_type, tpe): (TokenStream, TokenStream)) -> TokenStream { quote::quote! { impl diesel::deserialize::FromSql<#sql_type, super::MultiBackend> for #tpe { fn from_sql( bytes: ::RawValue<'_>, ) -> diesel::deserialize::Result { bytes.from_sql::() } } } } fn generate_to_sql_impls( (sql_type, tpe): (TokenStream, TokenStream), _connection_types: &[ConnectionVariant], ) -> TokenStream { quote::quote! { impl diesel::serialize::ToSql<#sql_type, super::MultiBackend> for #tpe { fn to_sql<'b>( &'b self, out: &mut diesel::serialize::Output<'b, '_, super::MultiBackend>, ) -> diesel::serialize::Result { out.set_value((#sql_type, self)); Ok(diesel::serialize::IsNull::No) } } } } fn generate_queryfragment_impls( trait_def: TokenStream, query_fragment_bounds: &[TokenStream], ) -> TokenStream { quote::quote! { impl #trait_def where Self: #(#query_fragment_bounds+)* { fn walk_ast<'b>( &'b self, pass: diesel::query_builder::AstPass<'_, 'b, MultiBackend>, ) -> diesel::QueryResult<()> { super::backend::MultiBackend::walk_variant_ast(self, pass) } } } } fn generate_querybuilder(connection_types: &[ConnectionVariant]) -> TokenStream { let variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<<#ty as diesel::Connection>::Backend as diesel::backend::Backend>::QueryBuilder) } }); let push_sql_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(q) => q.push_sql(sql) } }); let push_identifier_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(q) => q.push_identifier(identifier) } }); let push_bind_param_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(q) => q.push_bind_param() } }); let finish_impl = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(q) => q.finish() } }); let into_variant_functions = connection_types.iter().map(|c|{ let ty = c.ty; let ident = c.name; let lower_ident = syn::Ident::new(&ident.to_string().to_lowercase(), ident.span()); quote::quote! { pub(super) fn #lower_ident(&mut self) -> &mut <<#ty as diesel::Connection>::Backend as diesel::backend::Backend>::QueryBuilder { match self { Self::#ident(qb) => qb, _ => unreachable!(), } } } }); let query_fragment_bounds = connection_types.iter().map(|c| { let ty = c.ty; quote::quote! { diesel::query_builder::QueryFragment<<#ty as diesel::connection::Connection>::Backend> } }).collect::>(); let duplicate_query_builder = connection_types.iter().map(|c| { let ident = c.name; quote::quote! { Self::#ident(_) => Self::#ident(Default::default()) } }); let query_fragment = quote::quote! { diesel::query_builder::QueryFragment }; let query_fragment_impls = IntoIterator::into_iter([ quote::quote!{ #query_fragment for diesel::internal::derives::multiconnection::LimitOffsetClause }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::Concat }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::array_comparison::In }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::array_comparison::NotIn }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::array_comparison::Many }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::Exists }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::NoFromClause }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::DefaultValues }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::ReturningClause }, quote::quote! { diesel::query_builder::QueryFragment for diesel::insertable::DefaultableColumnInsertValue }, quote::quote! { diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::BatchInsert }, quote::quote! { diesel::query_builder::QueryFragment for diesel::query_source::Alias } ]) .map(|t| generate_queryfragment_impls(t, &query_fragment_bounds)); let insert_values_impl_variants = connection_types.iter().map(|c| { let ident = c.name; let lower_ident = syn::Ident::new(&ident.to_string().to_lowercase(), c.name.span()); let ty = c.ty; quote::quote! { super::backend::MultiBackend::#ident(_) => { ::Backend, Col::Table>>::column_names( &self, out.cast_database( super::bind_collector::MultiBindCollector::#lower_ident, super::query_builder::MultiQueryBuilder::#lower_ident, super::backend::MultiBackend::#lower_ident, |l| { <#ty as diesel::internal::derives::multiconnection::MultiConnectionHelper>::from_any(l) .expect("It's possible to downcast the metadata lookup type to the correct type") }, ), ) } } }); let insert_values_backend_bounds = connection_types.iter().map(|c| { let ty = c.ty; quote::quote! { diesel::insertable::DefaultableColumnInsertValue>: diesel::insertable::InsertValues<<#ty as diesel::connection::Connection>::Backend, Col::Table> } }); quote::quote! { pub enum MultiQueryBuilder { #(#variants,)* } impl MultiQueryBuilder { pub(super) fn duplicate(&self) -> Self { match self { #(#duplicate_query_builder,)* } } } impl MultiQueryBuilder { #(#into_variant_functions)* } impl diesel::query_builder::QueryBuilder for MultiQueryBuilder { fn push_sql(&mut self, sql: &str) { match self { #(#push_sql_impl,)* } } fn push_identifier(&mut self, identifier: &str) -> diesel::QueryResult<()> { match self { #(#push_identifier_impl,)* } } fn push_bind_param(&mut self) { match self { #(#push_bind_param_impl,)* } } fn finish(self) -> String { match self { #(#finish_impl,)* } } } #(#query_fragment_impls)* impl diesel::query_builder::QueryFragment< super::backend::MultiBackend, super::backend::MultiSelectStatementSyntax, > for diesel::internal::derives::multiconnection::SelectStatement< F, S, D, W, O, LOf, G, H, LC, > where S: diesel::query_builder::QueryFragment, F: diesel::query_builder::QueryFragment, D: diesel::query_builder::QueryFragment, W: diesel::query_builder::QueryFragment, O: diesel::query_builder::QueryFragment, LOf: diesel::query_builder::QueryFragment, G: diesel::query_builder::QueryFragment, H: diesel::query_builder::QueryFragment, LC: diesel::query_builder::QueryFragment, { fn walk_ast<'b>( &'b self, mut out: diesel::query_builder::AstPass<'_, 'b, MultiBackend>, ) -> diesel::QueryResult<()> { use diesel::internal::derives::multiconnection::SelectStatementAccessor; out.push_sql("SELECT "); self.distinct_clause().walk_ast(out.reborrow())?; self.select_clause().walk_ast(out.reborrow())?; self.from_clause().walk_ast(out.reborrow())?; self.where_clause().walk_ast(out.reborrow())?; self.group_by_clause().walk_ast(out.reborrow())?; self.having_clause().walk_ast(out.reborrow())?; self.order_clause().walk_ast(out.reborrow())?; self.limit_offset_clause().walk_ast(out.reborrow())?; self.locking_clause().walk_ast(out.reborrow())?; Ok(()) } } impl<'a, ST, QS, GB> diesel::query_builder::QueryFragment< super::backend::MultiBackend, super::backend::MultiSelectStatementSyntax, > for diesel::internal::derives::multiconnection::BoxedSelectStatement< 'a, ST, QS, super::backend::MultiBackend, GB, > where QS: diesel::query_builder::QueryFragment { fn walk_ast<'b>( &'b self, pass: diesel::query_builder::AstPass<'_, 'b, MultiBackend>, ) -> diesel::QueryResult<()> { use diesel::internal::derives::multiconnection::BoxedQueryHelper; self.build_query(pass, |where_clause, pass| where_clause.walk_ast(pass)) } } impl diesel::query_builder::QueryFragment for diesel::internal::derives::multiconnection::BoxedLimitOffsetClause< '_, super::backend::MultiBackend, > { fn walk_ast<'b>( &'b self, mut pass: diesel::query_builder::AstPass<'_, 'b, MultiBackend>, ) -> diesel::QueryResult<()> { if let Some(ref limit) = self.limit { limit.walk_ast(pass.reborrow())?; } if let Some(ref offset) = self.offset { offset.walk_ast(pass.reborrow())?; } Ok(()) } } impl<'a> diesel::query_builder::IntoBoxedClause<'a, super::multi_connection_impl::backend::MultiBackend> for diesel::internal::derives::multiconnection::LimitOffsetClause { type BoxedClause = diesel::internal::derives::multiconnection::BoxedLimitOffsetClause<'a, super::multi_connection_impl::backend::MultiBackend>; fn into_boxed(self) -> Self::BoxedClause { diesel::internal::derives::multiconnection::BoxedLimitOffsetClause { limit: None, offset: None, } } } impl<'a, L> diesel::query_builder::IntoBoxedClause<'a, super::multi_connection_impl::backend::MultiBackend> for diesel::internal::derives::multiconnection::LimitOffsetClause, diesel::internal::derives::multiconnection::NoOffsetClause> where diesel::internal::derives::multiconnection::LimitClause: diesel::query_builder::QueryFragment + Send + 'static, { type BoxedClause = diesel::internal::derives::multiconnection::BoxedLimitOffsetClause<'a, super::multi_connection_impl::backend::MultiBackend>; fn into_boxed(self) -> Self::BoxedClause { diesel::internal::derives::multiconnection::BoxedLimitOffsetClause { limit: Some(Box::new(self.limit_clause)), offset: None, } } } impl<'a, O> diesel::query_builder::IntoBoxedClause<'a, super::multi_connection_impl::backend::MultiBackend> for diesel::internal::derives::multiconnection::LimitOffsetClause> where diesel::internal::derives::multiconnection::OffsetClause: diesel::query_builder::QueryFragment + Send + 'static, { type BoxedClause = diesel::internal::derives::multiconnection::BoxedLimitOffsetClause<'a, super::multi_connection_impl::backend::MultiBackend>; fn into_boxed(self) -> Self::BoxedClause { diesel::internal::derives::multiconnection::BoxedLimitOffsetClause { limit: None, offset: Some(Box::new(self.offset_clause)), } } } impl<'a, L, O> diesel::query_builder::IntoBoxedClause<'a, super::multi_connection_impl::backend::MultiBackend> for diesel::internal::derives::multiconnection::LimitOffsetClause, diesel::internal::derives::multiconnection::OffsetClause> where diesel::internal::derives::multiconnection::LimitClause: diesel::query_builder::QueryFragment + Send + 'static, diesel::internal::derives::multiconnection::OffsetClause: diesel::query_builder::QueryFragment + Send + 'static, { type BoxedClause = diesel::internal::derives::multiconnection::BoxedLimitOffsetClause<'a, super::multi_connection_impl::backend::MultiBackend>; fn into_boxed(self) -> Self::BoxedClause { diesel::internal::derives::multiconnection::BoxedLimitOffsetClause { limit: Some(Box::new(self.limit_clause)), offset: Some(Box::new(self.offset_clause)), } } } impl diesel::insertable::InsertValues for diesel::insertable::DefaultableColumnInsertValue> where Col: diesel::prelude::Column, Expr: diesel::prelude::Expression, Expr: diesel::prelude::AppearsOnTable, Self: diesel::query_builder::QueryFragment, #(#insert_values_backend_bounds,)* { fn column_names( &self, mut out: diesel::query_builder::AstPass<'_, '_, super::multi_connection_impl::backend::MultiBackend> ) -> QueryResult<()> { use diesel::internal::derives::multiconnection::AstPassHelper; match out.backend() { #(#insert_values_impl_variants,)* } } } } } fn generate_backend(connection_types: &[ConnectionVariant]) -> TokenStream { let backend_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<#ty as diesel::Connection>::Backend) } }); let value_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { #ident(<<#ty as diesel::Connection>::Backend as diesel::backend::Backend>::RawValue<'a>) } }); let type_metadata_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; quote::quote! { pub(super) #ident: Option<<<#ty as diesel::Connection>::Backend as diesel::sql_types::TypeMetadata>::TypeMetadata> } }); let has_sql_type_impls = vec![ quote::quote!(diesel::sql_types::SmallInt), quote::quote!(diesel::sql_types::Integer), quote::quote!(diesel::sql_types::BigInt), quote::quote!(diesel::sql_types::Double), quote::quote!(diesel::sql_types::Float), quote::quote!(diesel::sql_types::Text), quote::quote!(diesel::sql_types::Binary), quote::quote!(diesel::sql_types::Date), quote::quote!(diesel::sql_types::Time), quote::quote!(diesel::sql_types::Timestamp), quote::quote!(diesel::sql_types::Bool), ] .into_iter() .map(generate_has_sql_type_impls); let into_variant_functions = connection_types.iter().map(|c| { let ty = c.ty; let ident = c.name; let lower_ident = syn::Ident::new(&ident.to_string().to_lowercase(), ident.span()); quote::quote! { pub(super) fn #lower_ident(&self) -> &<#ty as diesel::Connection>::Backend { match self { Self::#ident(b) => b, _ => unreachable!(), } } } }); let from_sql_match_arms = connection_types.iter().map(|v| { let ident = v.name; let ty = v.ty; quote::quote!{ Self::#ident(b) => { ::Backend>>::from_sql(b) } } }); let backend_from_sql_bounds = connection_types.iter().map(|v| { let ty = v.ty; quote::quote! { T: diesel::deserialize::FromSql::Backend> } }); let query_fragment_impl_variants = connection_types.iter().map(|c| { let ident = c.name; let lower_ident = syn::Ident::new(&ident.to_string().to_lowercase(), c.name.span()); let ty = c.ty; quote::quote! { super::backend::MultiBackend::#ident(_) => { ::Backend>>::walk_ast( ast_node, pass.cast_database( super::bind_collector::MultiBindCollector::#lower_ident, super::query_builder::MultiQueryBuilder::#lower_ident, super::backend::MultiBackend::#lower_ident, |l| { <#ty as diesel::internal::derives::multiconnection::MultiConnectionHelper>::from_any(l) .expect("It's possible to downcast the metadata lookup type to the correct type") }, ), ) } } }); let query_fragment_impl_bounds = connection_types.iter().map(|c| { let ty = c.ty; quote::quote! { T: diesel::query_builder::QueryFragment<<#ty as diesel::Connection>::Backend> } }); let lookup_impl = connection_types.iter().map(|v| { let name = v.name; let ty = v.ty; quote::quote!{ if let Some(lookup) = <#ty as diesel::internal::derives::multiconnection::MultiConnectionHelper>::from_any(lookup) { ret.#name = Some(<<#ty as diesel::Connection>::Backend as diesel::sql_types::HasSqlType>::metadata(lookup)); } } }); let lookup_sql_type_bounds = connection_types.iter().map(|c| { let ty = c.ty; quote::quote! { <#ty as diesel::Connection>::Backend: diesel::sql_types::HasSqlType } }); quote::quote! { pub enum MultiBackend { #(#backend_variants,)* } impl MultiBackend { #(#into_variant_functions)* pub fn lookup_sql_type(lookup: &mut dyn std::any::Any) -> MultiTypeMetadata where #(#lookup_sql_type_bounds,)* { let mut ret = MultiTypeMetadata::default(); #(#lookup_impl)* ret } } impl MultiBackend { pub fn walk_variant_ast<'b, T>( ast_node: &'b T, pass: diesel::query_builder::AstPass<'_, 'b, Self>, ) -> diesel::QueryResult<()> where #(#query_fragment_impl_bounds,)* { use diesel::internal::derives::multiconnection::AstPassHelper; match pass.backend() { #(#query_fragment_impl_variants,)* } } } pub enum MultiRawValue<'a> { #(#value_variants,)* } impl MultiRawValue<'_> { pub fn from_sql(self) -> diesel::deserialize::Result where #(#backend_from_sql_bounds,)* { match self { #(#from_sql_match_arms,)* } } } impl diesel::backend::Backend for MultiBackend { type QueryBuilder = super::query_builder::MultiQueryBuilder; type RawValue<'a> = MultiRawValue<'a>; type BindCollector<'a> = super::bind_collector::MultiBindCollector<'a>; } #[derive(Default)] #[allow(non_snake_case)] pub struct MultiTypeMetadata { #(#type_metadata_variants,)* } impl diesel::sql_types::TypeMetadata for MultiBackend { type TypeMetadata = MultiTypeMetadata; type MetadataLookup = dyn std::any::Any; } pub struct MultiReturningClause; pub struct MultiInsertWithDefaultKeyword; pub struct MultiBatchInsertSupport; pub struct MultiDefaultValueClauseForInsert; pub struct MultiEmptyFromClauseSyntax; pub struct MultiExistsSyntax; pub struct MultiArrayComparisonSyntax; pub struct MultiConcatClauseSyntax; pub struct MultiSelectStatementSyntax; pub struct MultiAliasSyntax; impl diesel::backend::SqlDialect for MultiBackend { type ReturningClause = MultiReturningClause; // no on conflict support is also the default type OnConflictClause = diesel::internal::derives::multiconnection::sql_dialect::on_conflict_clause::DoesNotSupportOnConflictClause; type InsertWithDefaultKeyword = MultiInsertWithDefaultKeyword; type BatchInsertSupport = MultiBatchInsertSupport; type DefaultValueClauseForInsert = MultiDefaultValueClauseForInsert; type EmptyFromClauseSyntax = MultiEmptyFromClauseSyntax; type ExistsSyntax = MultiExistsSyntax; type ArrayComparison = MultiArrayComparisonSyntax; type ConcatClause = MultiConcatClauseSyntax; type SelectStatementSyntax = MultiSelectStatementSyntax; type AliasSyntax = MultiAliasSyntax; } impl diesel::internal::derives::multiconnection::TrustedBackend for MultiBackend {} impl diesel::internal::derives::multiconnection::DieselReserveSpecialization for MultiBackend {} #(#has_sql_type_impls)* } } diesel_derives-2.2.3/src/parsers/belongs_to.rs000064400000000000000000000022501046102023000175640ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, TypePath}; use crate::util::{parse_eq, unknown_attribute, BELONGS_TO_NOTE}; enum Attr { ForeignKey(Ident), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "foreign_key" => Ok(Attr::ForeignKey(parse_eq(input, BELONGS_TO_NOTE)?)), _ => Err(unknown_attribute(&name, &["foreign_key"])), } } } pub struct BelongsTo { pub parent: TypePath, pub foreign_key: Option, } impl Parse for BelongsTo { fn parse(input: ParseStream) -> Result { let parent = input.parse()?; if !input.is_empty() { input.parse::()?; } let mut foreign_key = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::ForeignKey(value) => foreign_key = Some(value), } } Ok(BelongsTo { parent, foreign_key, }) } } diesel_derives-2.2.3/src/parsers/mod.rs000064400000000000000000000003411046102023000162070ustar 00000000000000mod belongs_to; mod mysql_type; mod postgres_type; mod sqlite_type; pub use self::belongs_to::BelongsTo; pub use self::mysql_type::MysqlType; pub use self::postgres_type::PostgresType; pub use self::sqlite_type::SqliteType; diesel_derives-2.2.3/src/parsers/mysql_type.rs000064400000000000000000000024071046102023000176430ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, LitStr}; use crate::util::{parse_eq, unknown_attribute, MYSQL_TYPE_NOTE}; enum Attr { Name(LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "name" => Ok(Attr::Name(parse_eq(input, MYSQL_TYPE_NOTE)?)), _ => Err(unknown_attribute(&name, &["name"])), } } } pub struct MysqlType { pub name: LitStr, } impl Parse for MysqlType { fn parse(input: ParseStream) -> Result { let mut name = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::Name(value) => name = Some(value), } } if let Some(name) = name { Ok(MysqlType { name }) } else { Err(syn::Error::new( input.span(), format!( "expected attribute `name`\n\ help: The correct format looks like #[diesel({})]", MYSQL_TYPE_NOTE ), )) } } } diesel_derives-2.2.3/src/parsers/postgres_type.rs000064400000000000000000000071261046102023000203470ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, LitInt, LitStr}; use crate::util::{parse_eq, unknown_attribute, POSTGRES_TYPE_NOTE, POSTGRES_TYPE_NOTE_ID}; enum Attr { Oid(Ident, LitInt), ArrayOid(Ident, LitInt), Name(Ident, LitStr), Schema(Ident, LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "oid" => Ok(Attr::Oid(name, parse_eq(input, POSTGRES_TYPE_NOTE_ID)?)), "array_oid" => Ok(Attr::ArrayOid( name, parse_eq(input, POSTGRES_TYPE_NOTE_ID)?, )), "name" => Ok(Attr::Name(name, parse_eq(input, POSTGRES_TYPE_NOTE)?)), "schema" => Ok(Attr::Schema(name, parse_eq(input, POSTGRES_TYPE_NOTE)?)), _ => Err(unknown_attribute( &name, &["oid", "array_oid", "name", "schema"], )), } } } pub enum PostgresType { Fixed(LitInt, LitInt), Lookup(LitStr, Option), } impl Parse for PostgresType { fn parse(input: ParseStream) -> Result { let mut oid = None; let mut array_oid = None; let mut name = None; let mut schema = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::Oid(ident, value) => oid = Some((ident, value)), Attr::ArrayOid(ident, value) => array_oid = Some((ident, value)), Attr::Name(ident, value) => name = Some((ident, value)), Attr::Schema(ident, value) => schema = Some((ident, value)), } } Self::validate_and_build(input, oid, array_oid, name, schema) } } impl PostgresType { pub fn validate_and_build( input: ParseStream, oid: Option<(Ident, LitInt)>, array_oid: Option<(Ident, LitInt)>, name: Option<(Ident, LitStr)>, schema: Option<(Ident, LitStr)>, ) -> Result { let help = format!( "The correct format looks like either `#[diesel({POSTGRES_TYPE_NOTE})]` or `#[diesel({POSTGRES_TYPE_NOTE_ID})]`" ); if let Some((_, name)) = name { if let Some((oid, _)) = oid { Err(syn::Error::new( oid.span(), format!("unexpected `oid` when `name` is present\nhelp: {help}"), )) } else if let Some((array_oid, _)) = array_oid { Err(syn::Error::new( array_oid.span(), format!("unexpected `array_oid` when `name` is present\nhelp: {help}"), )) } else { Ok(PostgresType::Lookup(name, schema.map(|s| s.1))) } } else if let Some((schema, lit)) = schema { Err(syn::Error::new( schema.span(), format!( "expected `name` to be also present\n\ help: make sure `name` is present, `#[diesel(postgres_type(name = \"...\", schema = \"{}\"))]`", lit.value() ), )) } else if let (Some((_, oid)), Some((_, array_oid))) = (oid, array_oid) { Ok(PostgresType::Fixed(oid, array_oid)) } else { Err(syn::Error::new( input.span(), format!( "expected `oid` and `array_oid` attribute or `name` attribute\nhelp: {help}" ), )) } } } diesel_derives-2.2.3/src/parsers/sqlite_type.rs000064400000000000000000000024151046102023000177760ustar 00000000000000use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Ident, LitStr}; use crate::util::{parse_eq, unknown_attribute, SQLITE_TYPE_NOTE}; enum Attr { Name(LitStr), } impl Parse for Attr { fn parse(input: ParseStream) -> Result { let name: Ident = input.parse()?; let name_str = name.to_string(); match &*name_str { "name" => Ok(Attr::Name(parse_eq(input, SQLITE_TYPE_NOTE)?)), _ => Err(unknown_attribute(&name, &["name"])), } } } pub struct SqliteType { pub name: LitStr, } impl Parse for SqliteType { fn parse(input: ParseStream) -> Result { let mut name = None; for attr in Punctuated::::parse_terminated(input)? { match attr { Attr::Name(value) => name = Some(value), } } if let Some(name) = name { Ok(SqliteType { name }) } else { Err(syn::Error::new( input.span(), format!( "expected attribute `name`\n\ help: The correct format looks like #[diesel({})]", SQLITE_TYPE_NOTE ), )) } } } diesel_derives-2.2.3/src/query_id.rs000064400000000000000000000023501046102023000155740ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse_quote; use syn::DeriveInput; use crate::util::wrap_in_dummy_mod; pub fn derive(mut item: DeriveInput) -> TokenStream { for ty_param in item.generics.type_params_mut() { ty_param.bounds.push(parse_quote!(QueryId)); } let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let struct_name = &item.ident; let lifetimes = item.generics.lifetimes(); let ty_params = item .generics .type_params() .map(|ty_param| &ty_param.ident) .collect::>(); let query_id_ty_params = ty_params .iter() .map(|ty_param| quote!(<#ty_param as QueryId>::QueryId)); let has_static_query_id = ty_params .iter() .map(|ty_param| quote!(<#ty_param as QueryId>::HAS_STATIC_QUERY_ID)); wrap_in_dummy_mod(quote! { use diesel::query_builder::QueryId; #[allow(non_camel_case_types)] impl #impl_generics QueryId for #struct_name #ty_generics #where_clause { type QueryId = #struct_name<#(#lifetimes,)* #(#query_id_ty_params,)*>; const HAS_STATIC_QUERY_ID: bool = #(#has_static_query_id &&)* true; } }) } diesel_derives-2.2.3/src/queryable.rs000064400000000000000000000040561046102023000157510ustar 00000000000000use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{parse_quote, DeriveInput, Ident, Index, Result}; use crate::field::Field; use crate::model::Model; use crate::util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, true)?; let struct_name = &item.ident; let field_ty = &model .fields() .iter() .map(Field::ty_for_deserialize) .collect::>(); let build_expr = model.fields().iter().enumerate().map(|(i, f)| { let field_name = &f.name; let i = Index::from(i); quote!(#field_name: row.#i.try_into()?) }); let sql_type = &(0..model.fields().len()) .map(|i| { let i = Ident::new(&format!("__ST{i}"), Span::call_site()); quote!(#i) }) .collect::>(); let (_, ty_generics, _) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics .params .push(parse_quote!(__DB: diesel::backend::Backend)); for id in 0..model.fields().len() { let ident = Ident::new(&format!("__ST{id}"), Span::call_site()); generics.params.push(parse_quote!(#ident)); } { let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); where_clause .predicates .push(parse_quote!((#(#field_ty,)*): FromStaticSqlRow<(#(#sql_type,)*), __DB>)); } let (impl_generics, _, where_clause) = generics.split_for_impl(); Ok(wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, FromStaticSqlRow, Queryable}; use diesel::row::{Row as _, Field as _}; use std::convert::TryInto; impl #impl_generics Queryable<(#(#sql_type,)*), __DB> for #struct_name #ty_generics #where_clause { type Row = (#(#field_ty,)*); fn build(row: Self::Row) -> deserialize::Result { Ok(Self { #(#build_expr,)* }) } } })) } diesel_derives-2.2.3/src/queryable_by_name.rs000064400000000000000000000102621046102023000174370ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::{parse_quote, parse_quote_spanned, DeriveInput, Ident, LitStr, Result, Type}; use crate::attrs::AttributeSpanWrapper; use crate::field::{Field, FieldName}; use crate::model::Model; use crate::util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, false)?; let struct_name = &item.ident; let fields = &model.fields().iter().map(get_ident).collect::>(); let field_names = model.fields().iter().map(|f| &f.name); let initial_field_expr = model .fields() .iter() .map(|f| { let field_ty = &f.ty; if f.embed() { Ok(quote!(<#field_ty as QueryableByName<__DB>>::build(row)?)) } else { let st = sql_type(f, &model)?; let deserialize_ty = f.ty_for_deserialize(); let name = f.column_name()?; let name = LitStr::new(&name.to_string(), name.span()); Ok(quote!( { let field = diesel::row::NamedRow::get::<#st, #deserialize_ty>(row, #name)?; <#deserialize_ty as Into<#field_ty>>::into(field) } )) } }) .collect::>>()?; let (_, ty_generics, ..) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics .params .push(parse_quote!(__DB: diesel::backend::Backend)); for field in model.fields() { let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); let span = field.span; let field_ty = field.ty_for_deserialize(); if field.embed() { where_clause .predicates .push(parse_quote_spanned!(span=> #field_ty: QueryableByName<__DB>)); } else { let st = sql_type(field, &model)?; where_clause.predicates.push( parse_quote_spanned!(span=> #field_ty: diesel::deserialize::FromSql<#st, __DB>), ); } } let model = &model; let check_function = if let Some(ref backends) = model.check_for_backend { let field_check_bound = model.fields().iter().filter(|f| !f.embed()).flat_map(|f| { backends.iter().map(move |b| { let field_ty = f.ty_for_deserialize(); let span = f.span; let ty = sql_type(f, model).unwrap(); quote::quote_spanned! {span => #field_ty: diesel::deserialize::FromSqlRow<#ty, #b> } }) }); Some(quote::quote! { fn _check_field_compatibility() where #(#field_check_bound,)* {} }) } else { None }; let (impl_generics, _, where_clause) = generics.split_for_impl(); Ok(wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, QueryableByName}; use diesel::row::{NamedRow}; use diesel::sql_types::Untyped; impl #impl_generics QueryableByName<__DB> for #struct_name #ty_generics #where_clause { fn build<'__a>(row: &impl NamedRow<'__a, __DB>) -> deserialize::Result { #( let mut #fields = #initial_field_expr; )* deserialize::Result::Ok(Self { #( #field_names: #fields, )* }) } } #check_function })) } fn get_ident(field: &Field) -> Ident { match &field.name { FieldName::Named(n) => n.clone(), FieldName::Unnamed(i) => Ident::new(&format!("field_{}", i.index), i.span), } } fn sql_type(field: &Field, model: &Model) -> Result { let table_name = &model.table_names()[0]; match field.sql_type { Some(AttributeSpanWrapper { item: ref st, .. }) => Ok(st.clone()), None => { let column_name = field.column_name()?.to_ident()?; Ok(parse_quote!(diesel::dsl::SqlTypeOf<#table_name::#column_name>)) } } } diesel_derives-2.2.3/src/selectable.rs000064400000000000000000000133371046102023000160650ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::spanned::Spanned; use syn::DeriveInput; use syn::{parse_quote, Result}; use crate::field::Field; use crate::model::Model; use crate::util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, false, false)?; let (_, ty_generics, original_where_clause) = item.generics.split_for_impl(); let mut generics = item.generics.clone(); generics .params .push(parse_quote!(__DB: diesel::backend::Backend)); for embed_field in model.fields().iter().filter(|f| f.embed()) { let embed_ty = &embed_field.ty; generics .where_clause .get_or_insert_with(|| parse_quote!(where)) .predicates .push(parse_quote!(#embed_ty: Selectable<__DB>)); } let (impl_generics, _, where_clause) = generics.split_for_impl(); let struct_name = &item.ident; let mut compile_errors: Vec = Vec::new(); let field_columns_ty = model .fields() .iter() .map(|f| field_column_ty(f, &model, &mut compile_errors)) .collect::>>()?; let field_columns_inst = model .fields() .iter() .map(|f| field_column_inst(f, &model)) .collect::>>()?; let check_function = if let Some(ref backends) = model.check_for_backend { let field_check_bound = model .fields() .iter() .zip(&field_columns_ty) .filter(|(f, _)| !f.embed()) .flat_map(|(f, ty)| { backends.iter().map(move |b| { let span = f.ty.span(); let field_ty = to_field_ty_bound(f.ty_for_deserialize())?; Ok(syn::parse_quote_spanned! {span => #field_ty: diesel::deserialize::FromSqlRow, #b> }) }) }) .collect::>>()?; let where_clause = &mut original_where_clause.cloned(); let where_clause = where_clause.get_or_insert_with(|| parse_quote!(where)); for field_check in field_check_bound { where_clause.predicates.push(field_check); } Some(quote::quote! { fn _check_field_compatibility #impl_generics() #where_clause {} }) } else { None }; let errors: TokenStream = compile_errors .into_iter() .map(|e| e.into_compile_error()) .collect(); Ok(wrap_in_dummy_mod(quote! { use diesel::expression::Selectable; impl #impl_generics Selectable<__DB> for #struct_name #ty_generics #where_clause { type SelectExpression = (#(#field_columns_ty,)*); fn construct_selection() -> Self::SelectExpression { (#(#field_columns_inst,)*) } } #check_function #errors })) } fn to_field_ty_bound(field_ty: &syn::Type) -> Result { match field_ty { syn::Type::Reference(r) => { use crate::quote::ToTokens; // references are not supported for checking for now // // (How ever you can even have references in a `Queryable` struct anyway) Err(syn::Error::new( field_ty.span(), format!( "References are not supported in `Queryable` types\n\ Consider using `std::borrow::Cow<'{}, {}>` instead", r.lifetime .as_ref() .expect("It's a struct field so it must have a named lifetime") .ident, r.elem.to_token_stream() ), )) } field_ty => Ok(quote::quote! { #field_ty }), } } fn field_column_ty( field: &Field, model: &Model, compile_errors: &mut Vec, ) -> Result { if let Some(ref select_expression) = field.select_expression { use dsl_auto_type::auto_type::expression_type_inference as type_inference; let expr = &select_expression.item; let (inferred_type, errors) = type_inference::infer_expression_type( expr, field.select_expression_type.as_ref().map(|t| &t.item), &type_inference::InferrerSettings::builder() .dsl_path(parse_quote!(diesel::dsl)) .function_types_case(crate::AUTO_TYPE_DEFAULT_FUNCTION_TYPE_CASE) .method_types_case(crate::AUTO_TYPE_DEFAULT_METHOD_TYPE_CASE) .build(), ); compile_errors.extend(errors); Ok(quote::quote!(#inferred_type)) } else if let Some(ref select_expression_type) = field.select_expression_type { let ty = &select_expression_type.item; Ok(quote!(#ty)) } else if field.embed() { let embed_ty = &field.ty; Ok(quote!(<#embed_ty as Selectable<__DB>>::SelectExpression)) } else { let table_name = &model.table_names()[0]; let column_name = field.column_name()?.to_ident()?; Ok(quote!(#table_name::#column_name)) } } fn field_column_inst(field: &Field, model: &Model) -> Result { if let Some(ref select_expression) = field.select_expression { let expr = &select_expression.item; Ok(quote!(#expr)) } else if field.embed() { let embed_ty = &field.ty; Ok(quote!(<#embed_ty as Selectable<__DB>>::construct_selection())) } else { let table_name = &model.table_names()[0]; let column_name = field.column_name()?.to_ident()?; Ok(quote!(#table_name::#column_name)) } } diesel_derives-2.2.3/src/sql_function.rs000064400000000000000000000467351046102023000164760ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use quote::ToTokens; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::{ parenthesized, parse_quote, Attribute, GenericArgument, Generics, Ident, Meta, MetaNameValue, PathArguments, Token, Type, }; pub(crate) fn expand(input: SqlFunctionDecl, legacy_helper_type_and_module: bool) -> TokenStream { let SqlFunctionDecl { mut attributes, fn_token, fn_name, mut generics, args, return_type, } = input; let sql_name = attributes .iter() .find(|attr| attr.meta.path().is_ident("sql_name")) .and_then(|attr| { if let Meta::NameValue(MetaNameValue { value: syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(ref lit), .. }), .. }) = attr.meta { Some(lit.value()) } else { None } }) .unwrap_or_else(|| fn_name.to_string()); let is_aggregate = attributes .iter() .any(|attr| attr.meta.path().is_ident("aggregate")); attributes.retain(|attr| { !attr.meta.path().is_ident("sql_name") && !attr.meta.path().is_ident("aggregate") }); let args = &args; let (ref arg_name, ref arg_type): (Vec<_>, Vec<_>) = args .iter() .map(|StrictFnArg { name, ty, .. }| (name, ty)) .unzip(); let arg_struct_assign = args.iter().map( |StrictFnArg { name, colon_token, .. }| { let name2 = name.clone(); quote!(#name #colon_token #name2.as_expression()) }, ); let type_args = &generics .type_params() .map(|type_param| type_param.ident.clone()) .collect::>(); for StrictFnArg { name, .. } in args { generics.params.push(parse_quote!(#name)); } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); // Even if we force an empty where clause, it still won't print the where // token with no bounds. let where_clause = where_clause .map(|w| quote!(#w)) .unwrap_or_else(|| quote!(where)); let mut generics_with_internal = generics.clone(); generics_with_internal .params .push(parse_quote!(__DieselInternal)); let (impl_generics_internal, _, _) = generics_with_internal.split_for_impl(); let sql_type; let numeric_derive; if arg_name.is_empty() { sql_type = None; // FIXME: We can always derive once trivial bounds are stable numeric_derive = None; } else { sql_type = Some(quote!((#(#arg_name),*): Expression,)); numeric_derive = Some(quote!(#[derive(diesel::sql_types::DieselNumericOps)])); } let helper_type_doc = format!("The return type of [`{fn_name}()`](super::fn_name)"); let args_iter = args.iter(); let mut tokens = quote! { use diesel::{self, QueryResult}; use diesel::expression::{AsExpression, Expression, SelectableExpression, AppearsOnTable, ValidGrouping}; use diesel::query_builder::{QueryFragment, AstPass}; use diesel::sql_types::*; use super::*; #[derive(Debug, Clone, Copy, diesel::query_builder::QueryId)] #numeric_derive pub struct #fn_name #ty_generics { #(pub(in super) #args_iter,)* #(pub(in super) #type_args: ::std::marker::PhantomData<#type_args>,)* } #[doc = #helper_type_doc] pub type HelperType #ty_generics = #fn_name < #(#type_args,)* #(<#arg_name as AsExpression<#arg_type>>::Expression,)* >; impl #impl_generics Expression for #fn_name #ty_generics #where_clause #sql_type { type SqlType = #return_type; } // __DieselInternal is what we call QS normally impl #impl_generics_internal SelectableExpression<__DieselInternal> for #fn_name #ty_generics #where_clause #(#arg_name: SelectableExpression<__DieselInternal>,)* Self: AppearsOnTable<__DieselInternal>, { } // __DieselInternal is what we call QS normally impl #impl_generics_internal AppearsOnTable<__DieselInternal> for #fn_name #ty_generics #where_clause #(#arg_name: AppearsOnTable<__DieselInternal>,)* Self: Expression, { } // __DieselInternal is what we call DB normally impl #impl_generics_internal QueryFragment<__DieselInternal> for #fn_name #ty_generics where __DieselInternal: diesel::backend::Backend, #(#arg_name: QueryFragment<__DieselInternal>,)* { #[allow(unused_assignments)] fn walk_ast<'__b>(&'__b self, mut out: AstPass<'_, '__b, __DieselInternal>) -> QueryResult<()>{ out.push_sql(concat!(#sql_name, "(")); // we unroll the arguments manually here, to prevent borrow check issues let mut needs_comma = false; #( if !self.#arg_name.is_noop(out.backend())? { if needs_comma { out.push_sql(", "); } self.#arg_name.walk_ast(out.reborrow())?; needs_comma = true; } )* out.push_sql(")"); Ok(()) } } }; let is_supported_on_sqlite = cfg!(feature = "sqlite") && type_args.is_empty() && is_sqlite_type(&return_type) && arg_type.iter().all(|a| is_sqlite_type(a)); if is_aggregate { tokens = quote! { #tokens impl #impl_generics_internal ValidGrouping<__DieselInternal> for #fn_name #ty_generics { type IsAggregate = diesel::expression::is_aggregate::Yes; } }; if is_supported_on_sqlite { tokens = quote! { #tokens use diesel::sqlite::{Sqlite, SqliteConnection}; use diesel::serialize::ToSql; use diesel::deserialize::{FromSqlRow, StaticallySizedRow}; use diesel::sqlite::SqliteAggregateFunction; use diesel::sql_types::IntoNullable; }; match arg_name.len() { x if x > 1 => { tokens = quote! { #tokens #[allow(dead_code)] /// Registers an implementation for this aggregate function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). pub fn register_impl( conn: &mut SqliteConnection ) -> QueryResult<()> where A: SqliteAggregateFunction<(#(#arg_name,)*)> + Send + 'static + ::std::panic::UnwindSafe + ::std::panic::RefUnwindSafe, A::Output: ToSql<#return_type, Sqlite>, (#(#arg_name,)*): FromSqlRow<(#(#arg_type,)*), Sqlite> + StaticallySizedRow<(#(#arg_type,)*), Sqlite> + ::std::panic::UnwindSafe, { conn.register_aggregate_function::<(#(#arg_type,)*), #return_type, _, _, A>(#sql_name) } }; } 1 => { let arg_name = arg_name[0]; let arg_type = arg_type[0]; tokens = quote! { #tokens #[allow(dead_code)] /// Registers an implementation for this aggregate function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). pub fn register_impl( conn: &mut SqliteConnection ) -> QueryResult<()> where A: SqliteAggregateFunction<#arg_name> + Send + 'static + std::panic::UnwindSafe + std::panic::RefUnwindSafe, A::Output: ToSql<#return_type, Sqlite>, #arg_name: FromSqlRow<#arg_type, Sqlite> + StaticallySizedRow<#arg_type, Sqlite> + ::std::panic::UnwindSafe, { conn.register_aggregate_function::<#arg_type, #return_type, _, _, A>(#sql_name) } }; } _ => (), } } } else { tokens = quote! { #tokens #[derive(ValidGrouping)] pub struct __Derived<#(#arg_name,)*>(#(#arg_name,)*); impl #impl_generics_internal ValidGrouping<__DieselInternal> for #fn_name #ty_generics where __Derived<#(#arg_name,)*>: ValidGrouping<__DieselInternal>, { type IsAggregate = <__Derived<#(#arg_name,)*> as ValidGrouping<__DieselInternal>>::IsAggregate; } }; if is_supported_on_sqlite && !arg_name.is_empty() { tokens = quote! { #tokens use diesel::sqlite::{Sqlite, SqliteConnection}; use diesel::serialize::ToSql; use diesel::deserialize::{FromSqlRow, StaticallySizedRow}; #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). If /// the function is nondeterministic, call /// `register_nondeterministic_impl` instead. pub fn register_impl( conn: &mut SqliteConnection, f: F, ) -> QueryResult<()> where F: Fn(#(#arg_name,)*) -> Ret + std::panic::UnwindSafe + Send + 'static, (#(#arg_name,)*): FromSqlRow<(#(#arg_type,)*), Sqlite> + StaticallySizedRow<(#(#arg_type,)*), Sqlite>, Ret: ToSql<#return_type, Sqlite>, { conn.register_sql_function::<(#(#arg_type,)*), #return_type, _, _, _>( #sql_name, true, move |(#(#arg_name,)*)| f(#(#arg_name,)*), ) } #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. /// `register_nondeterministic_impl` should only be used if your /// function can return different results with the same arguments (e.g. /// `random`). If your function is deterministic, you should call /// `register_impl` instead. pub fn register_nondeterministic_impl( conn: &mut SqliteConnection, mut f: F, ) -> QueryResult<()> where F: FnMut(#(#arg_name,)*) -> Ret + std::panic::UnwindSafe + Send + 'static, (#(#arg_name,)*): FromSqlRow<(#(#arg_type,)*), Sqlite> + StaticallySizedRow<(#(#arg_type,)*), Sqlite>, Ret: ToSql<#return_type, Sqlite>, { conn.register_sql_function::<(#(#arg_type,)*), #return_type, _, _, _>( #sql_name, false, move |(#(#arg_name,)*)| f(#(#arg_name,)*), ) } }; } if is_supported_on_sqlite && arg_name.is_empty() { tokens = quote! { #tokens use diesel::sqlite::{Sqlite, SqliteConnection}; use diesel::serialize::ToSql; #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. The implementation must be /// deterministic (returns the same result given the same arguments). If /// the function is nondeterministic, call /// `register_nondeterministic_impl` instead. pub fn register_impl( conn: &SqliteConnection, f: F, ) -> QueryResult<()> where F: Fn() -> Ret + std::panic::UnwindSafe + Send + 'static, Ret: ToSql<#return_type, Sqlite>, { conn.register_noarg_sql_function::<#return_type, _, _>( #sql_name, true, f, ) } #[allow(dead_code)] /// Registers an implementation for this function on the given connection /// /// This function must be called for every `SqliteConnection` before /// this SQL function can be used on SQLite. /// `register_nondeterministic_impl` should only be used if your /// function can return different results with the same arguments (e.g. /// `random`). If your function is deterministic, you should call /// `register_impl` instead. pub fn register_nondeterministic_impl( conn: &SqliteConnection, mut f: F, ) -> QueryResult<()> where F: FnMut() -> Ret + std::panic::UnwindSafe + Send + 'static, Ret: ToSql<#return_type, Sqlite>, { conn.register_noarg_sql_function::<#return_type, _, _>( #sql_name, false, f, ) } }; } } let args_iter = args.iter(); let (outside_of_module_helper_type, return_type_path, internals_module_name) = if legacy_helper_type_and_module { (None, quote! { #fn_name::HelperType }, fn_name.clone()) } else { let internals_module_name = Ident::new(&format!("{fn_name}_utils"), fn_name.span()); ( Some(quote! { #[allow(non_camel_case_types, non_snake_case)] #[doc = #helper_type_doc] pub type #fn_name #ty_generics = #internals_module_name::#fn_name < #(#type_args,)* #(<#arg_name as ::diesel::expression::AsExpression<#arg_type>>::Expression,)* >; }), quote! { #fn_name }, internals_module_name, ) }; quote! { #(#attributes)* #[allow(non_camel_case_types)] pub #fn_token #fn_name #impl_generics (#(#args_iter,)*) -> #return_type_path #ty_generics #where_clause #(#arg_name: ::diesel::expression::AsExpression<#arg_type>,)* { #internals_module_name::#fn_name { #(#arg_struct_assign,)* #(#type_args: ::std::marker::PhantomData,)* } } #outside_of_module_helper_type #[doc(hidden)] #[allow(non_camel_case_types, non_snake_case, unused_imports)] pub(crate) mod #internals_module_name { #tokens } } } pub(crate) struct SqlFunctionDecl { attributes: Vec, fn_token: Token![fn], fn_name: Ident, generics: Generics, args: Punctuated, return_type: Type, } impl Parse for SqlFunctionDecl { fn parse(input: ParseStream) -> Result { let attributes = Attribute::parse_outer(input)?; let fn_token: Token![fn] = input.parse()?; let fn_name = Ident::parse(input)?; let generics = Generics::parse(input)?; let args; let _paren = parenthesized!(args in input); let args = args.parse_terminated(StrictFnArg::parse, Token![,])?; let return_type = if Option::]>::parse(input)?.is_some() { Type::parse(input)? } else { parse_quote!(diesel::expression::expression_types::NotSelectable) }; let _semi = Option::::parse(input)?; Ok(Self { attributes, fn_token, fn_name, generics, args, return_type, }) } } /// Essentially the same as ArgCaptured, but only allowing ident patterns struct StrictFnArg { name: Ident, colon_token: Token![:], ty: Type, } impl Parse for StrictFnArg { fn parse(input: ParseStream) -> Result { let name = input.parse()?; let colon_token = input.parse()?; let ty = input.parse()?; Ok(Self { name, colon_token, ty, }) } } impl ToTokens for StrictFnArg { fn to_tokens(&self, tokens: &mut TokenStream) { self.name.to_tokens(tokens); self.colon_token.to_tokens(tokens); self.name.to_tokens(tokens); } } fn is_sqlite_type(ty: &Type) -> bool { let last_segment = if let Type::Path(tp) = ty { if let Some(segment) = tp.path.segments.last() { segment } else { return false; } } else { return false; }; let ident = last_segment.ident.to_string(); if ident == "Nullable" { if let PathArguments::AngleBracketed(ref ab) = last_segment.arguments { if let Some(GenericArgument::Type(ty)) = ab.args.first() { return is_sqlite_type(ty); } } return false; } [ "BigInt", "Binary", "Bool", "Date", "Double", "Float", "Integer", "Numeric", "SmallInt", "Text", "Time", "Timestamp", ] .contains(&ident.as_str()) } diesel_derives-2.2.3/src/sql_type.rs000064400000000000000000000101661046102023000156170ustar 00000000000000use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::Result; use syn::{DeriveInput, Ident}; use crate::model::Model; use crate::parsers::PostgresType; use crate::util::wrap_in_dummy_mod; pub fn derive(item: DeriveInput) -> Result { let model = Model::from_item(&item, true, false)?; let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let sqlite_tokens = sqlite_tokens(&item, &model); let mysql_tokens = mysql_tokens(&item, &model); let pg_tokens = pg_tokens(&item, &model); Ok(wrap_in_dummy_mod(quote! { impl #impl_generics diesel::sql_types::SqlType for #struct_name #ty_generics #where_clause { type IsNull = diesel::sql_types::is_nullable::NotNull; } impl #impl_generics diesel::sql_types::SingleValue for #struct_name #ty_generics #where_clause { } #sqlite_tokens #mysql_tokens #pg_tokens })) } fn sqlite_tokens(item: &DeriveInput, model: &Model) -> Option { model .sqlite_type .as_ref() .map(|sqlite_type| Ident::new(&sqlite_type.name.value(), Span::call_site())) .and_then(|ty| { if cfg!(not(feature = "sqlite")) { return None; } let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); Some(quote! { impl #impl_generics diesel::sql_types::HasSqlType<#struct_name #ty_generics> for diesel::sqlite::Sqlite #where_clause { fn metadata(_: &mut ()) -> diesel::sqlite::SqliteType { diesel::sqlite::SqliteType::#ty } } }) }) } fn mysql_tokens(item: &DeriveInput, model: &Model) -> Option { model .mysql_type .as_ref() .map(|mysql_type| Ident::new(&mysql_type.name.value(), Span::call_site())) .and_then(|ty| { if cfg!(not(feature = "mysql")) { return None; } let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); Some(quote! { impl #impl_generics diesel::sql_types::HasSqlType<#struct_name #ty_generics> for diesel::mysql::Mysql #where_clause { fn metadata(_: &mut ()) -> diesel::mysql::MysqlType { diesel::mysql::MysqlType::#ty } } }) }) } fn pg_tokens(item: &DeriveInput, model: &Model) -> Option { model.postgres_type.as_ref().and_then(|ty| { if cfg!(not(feature = "postgres")) { return None; } let struct_name = &item.ident; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); let metadata_fn = match ty { PostgresType::Fixed(oid, array_oid) => quote!( fn metadata(_: &mut Self::MetadataLookup) -> PgTypeMetadata { PgTypeMetadata::new(#oid, #array_oid) } ), PostgresType::Lookup(type_name, Some(type_schema)) => quote!( fn metadata(lookup: &mut Self::MetadataLookup) -> PgTypeMetadata { lookup.lookup_type(#type_name, Some(#type_schema)) } ), PostgresType::Lookup(type_name, None) => quote!( fn metadata(lookup: &mut Self::MetadataLookup) -> PgTypeMetadata { lookup.lookup_type(#type_name, None) } ), }; Some(quote! { use diesel::pg::{PgMetadataLookup, PgTypeMetadata}; impl #impl_generics diesel::sql_types::HasSqlType<#struct_name #ty_generics> for diesel::pg::Pg #where_clause { #metadata_fn } }) }) } diesel_derives-2.2.3/src/table.rs000064400000000000000000001105711046102023000150470ustar 00000000000000use diesel_table_macro_syntax::{ColumnDef, TableDecl}; use proc_macro2::TokenStream; use syn::parse_quote; use syn::Ident; const DEFAULT_PRIMARY_KEY_NAME: &str = "id"; pub(crate) fn expand(input: TableDecl) -> TokenStream { if input.column_defs.len() > super::diesel_for_each_tuple::MAX_TUPLE_SIZE as usize { let txt = if input.column_defs.len() > 128 { "You reached the end. Diesel does not support tables with \ more than 128 columns. Consider using less columns." } else if input.column_defs.len() > 64 { "Table contains more than 64 columns. Consider enabling the \ `128-column-tables` feature to enable diesels support for \ tables with more than 64 columns." } else if input.column_defs.len() > 32 { "Table contains more than 32 columns. Consider enabling the \ `64-column-tables` feature to enable diesels support for \ tables with more than 32 columns." } else { "Table contains more than 16 columns. Consider enabling the \ `32-column-tables` feature to enable diesels support for \ tables with more than 16 columns." }; return quote::quote! { compile_error!(#txt); }; } let meta = &input.meta; let table_name = &input.table_name; let imports = if input.use_statements.is_empty() { vec![parse_quote!( use diesel::sql_types::*; )] } else { input.use_statements.clone() }; let column_names = input .column_defs .iter() .map(|c| &c.column_name) .collect::>(); let column_names = &column_names; let primary_key: TokenStream = match input.primary_keys.as_ref() { None if column_names.contains(&&syn::Ident::new( DEFAULT_PRIMARY_KEY_NAME, proc_macro2::Span::call_site(), )) => { let id = syn::Ident::new(DEFAULT_PRIMARY_KEY_NAME, proc_macro2::Span::call_site()); parse_quote! { #id } } None => { let mut message = format!( "Neither an explicit primary key found nor does an `id` column exist.\n\ Consider explicitly defining a primary key. \n\ For example for specifying `{}` as primary key:\n\n\ table! {{\n", column_names[0], ); message += &format!("\t{table_name} ({}) {{\n", &column_names[0]); for c in &input.column_defs { let tpe = c .tpe .path .segments .iter() .map(|p| p.ident.to_string()) .collect::>() .join("::"); message += &format!("\t\t{} -> {tpe},\n", c.column_name); } message += "\t}\n}"; let span = input.table_name.span(); return quote::quote_spanned! {span=> compile_error!(#message); }; } Some(a) if a.keys.len() == 1 => { let k = a.keys.first().unwrap(); parse_quote! { #k } } Some(a) => { let keys = a.keys.iter(); parse_quote! { (#(#keys,)*) } } }; let column_defs = input.column_defs.iter().map(expand_column_def); let column_ty = input.column_defs.iter().map(|c| &c.tpe); let valid_grouping_for_table_columns = generate_valid_grouping_for_table_columns(&input); let sql_name = &input.sql_name; let static_query_fragment_impl_for_table = if let Some(schema) = input.schema { let schema_name = schema.to_string(); quote::quote! { impl diesel::internal::table_macro::StaticQueryFragment for table { type Component = diesel::internal::table_macro::InfixNode< diesel::internal::table_macro::Identifier<'static>, diesel::internal::table_macro::Identifier<'static>, &'static str >; const STATIC_COMPONENT: &'static Self::Component = &diesel::internal::table_macro::InfixNode::new( diesel::internal::table_macro::Identifier(#schema_name), diesel::internal::table_macro::Identifier(#sql_name), "." ); } } } else { quote::quote! { impl diesel::internal::table_macro::StaticQueryFragment for table { type Component = diesel::internal::table_macro::Identifier<'static>; const STATIC_COMPONENT: &'static Self::Component = &diesel::internal::table_macro::Identifier(#sql_name); } } }; let reexport_column_from_dsl = input.column_defs.iter().map(|c| { let column_name = &c.column_name; if c.column_name == *table_name { let span = c.column_name.span(); let message = format!( "Column `{column_name}` cannot be named the same as it's table.\n\ You may use `#[sql_name = \"{column_name}\"]` to reference the table's \ `{column_name}` column \n\ Docs available at: `https://docs.diesel.rs/master/diesel/macro.table.html`\n" ); quote::quote_spanned! { span => compile_error!(#message); } } else { quote::quote! { pub use super::columns::#column_name; } } }); let backend_specific_table_impls = if cfg!(feature = "postgres") { Some(quote::quote! { impl diesel::JoinTo> for table where diesel::query_builder::Only: diesel::JoinTo, { type FromClause = diesel::query_builder::Only; type OnClause = as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::query_builder::Only) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::query_builder::Only::::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } impl diesel::query_source::AppearsInFromClause> for table { type Count = diesel::query_source::Once; } impl diesel::query_source::AppearsInFromClause
for diesel::query_builder::Only
{ type Count = diesel::query_source::Once; } impl diesel::JoinTo> for table where diesel::query_builder::Tablesample: diesel::JoinTo
, TSM: diesel::internal::table_macro::TablesampleMethod { type FromClause = diesel::query_builder::Tablesample; type OnClause = as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::query_builder::Tablesample) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::query_builder::Tablesample::::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } impl diesel::query_source::AppearsInFromClause> for table where TSM: diesel::internal::table_macro::TablesampleMethod { type Count = diesel::query_source::Once; } impl diesel::query_source::AppearsInFromClause
for diesel::query_builder::Tablesample where TSM: diesel::internal::table_macro::TablesampleMethod { type Count = diesel::query_source::Once; } }) } else { None }; let imports_for_column_module = imports.iter().map(fix_import_for_submodule); quote::quote! { #(#meta)* #[allow(unused_imports, dead_code, unreachable_pub, unused_qualifications)] pub mod #table_name { use ::diesel; pub use self::columns::*; #(#imports)* /// Re-exports all of the columns of this table, as well as the /// table struct renamed to the module name. This is meant to be /// glob imported for functions which only deal with one table. pub mod dsl { #(#reexport_column_from_dsl)* pub use super::table as #table_name; } #[allow(non_upper_case_globals, dead_code)] /// A tuple of all of the columns on this table pub const all_columns: (#(#column_names,)*) = (#(#column_names,)*); #[allow(non_camel_case_types)] #[derive(Debug, Clone, Copy, diesel::query_builder::QueryId, Default)] /// The actual table struct /// /// This is the type which provides the base methods of the query /// builder, such as `.select` and `.filter`. pub struct table; impl table { #[allow(dead_code)] /// Represents `table_name.*`, which is sometimes necessary /// for efficient count queries. It cannot be used in place of /// `all_columns` pub fn star(&self) -> star { star } } /// The SQL type of all of the columns on this table pub type SqlType = (#(#column_ty,)*); /// Helper type for representing a boxed query from this table pub type BoxedQuery<'a, DB, ST = SqlType> = diesel::internal::table_macro::BoxedSelectStatement<'a, ST, diesel::internal::table_macro::FromClause
, DB>; impl diesel::QuerySource for table { type FromClause = diesel::internal::table_macro::StaticQueryFragmentInstance
; type DefaultSelection = ::AllColumns; fn from_clause(&self) -> Self::FromClause { diesel::internal::table_macro::StaticQueryFragmentInstance::new() } fn default_selection(&self) -> Self::DefaultSelection { use diesel::Table; Self::all_columns() } } impl diesel::query_builder::QueryFragment for table where DB: diesel::backend::Backend,
::Component: diesel::query_builder::QueryFragment { fn walk_ast<'b>(&'b self, __diesel_internal_pass: diesel::query_builder::AstPass<'_, 'b, DB>) -> diesel::result::QueryResult<()> {
::STATIC_COMPONENT.walk_ast(__diesel_internal_pass) } } #static_query_fragment_impl_for_table impl diesel::query_builder::AsQuery for table { type SqlType = SqlType; type Query = diesel::internal::table_macro::SelectStatement>; fn as_query(self) -> Self::Query { diesel::internal::table_macro::SelectStatement::simple(self) } } impl diesel::Table for table { type PrimaryKey = #primary_key; type AllColumns = (#(#column_names,)*); fn primary_key(&self) -> Self::PrimaryKey { #primary_key } fn all_columns() -> Self::AllColumns { (#(#column_names,)*) } } impl diesel::associations::HasTable for table { type Table = Self; fn table() -> Self::Table { table } } impl diesel::query_builder::IntoUpdateTarget for table { type WhereClause = <::Query as diesel::query_builder::IntoUpdateTarget>::WhereClause; fn into_update_target(self) -> diesel::query_builder::UpdateTarget { use diesel::query_builder::AsQuery; let q: diesel::internal::table_macro::SelectStatement> = self.as_query(); q.into_update_target() } } impl diesel::query_source::AppearsInFromClause
for table { type Count = diesel::query_source::Once; } // impl> AppearsInFromClause
for Alias impl diesel::internal::table_macro::AliasAppearsInFromClause for table where S: diesel::query_source::AliasSource, { type Count = diesel::query_source::Never; } // impl, S2: AliasSource> AppearsInFromClause> for Alias // Those are specified by the `alias!` macro, but this impl will allow it to implement this trait even in downstream // crates from the schema impl diesel::internal::table_macro::AliasAliasAppearsInFromClause for table where S1: diesel::query_source::AliasSource, S2: diesel::query_source::AliasSource, S1: diesel::internal::table_macro::AliasAliasAppearsInFromClauseSameTable, { type Count = >::Count; } impl diesel::query_source::AppearsInFromClause> for table where S: diesel::query_source::AliasSource, { type Count = diesel::query_source::Never; } impl diesel::internal::table_macro::FieldAliasMapperAssociatedTypesDisjointnessTrick for table where S: diesel::query_source::AliasSource + ::std::clone::Clone, C: diesel::query_source::Column
, { type Out = diesel::query_source::AliasedField; fn map(__diesel_internal_column: C, __diesel_internal_alias: &diesel::query_source::Alias) -> Self::Out { __diesel_internal_alias.field(__diesel_internal_column) } } impl diesel::query_source::AppearsInFromClause
for diesel::internal::table_macro::NoFromClause { type Count = diesel::query_source::Never; } impl diesel::JoinTo> for table where diesel::internal::table_macro::Join: diesel::JoinTo
, Left: diesel::query_source::QuerySource, Right: diesel::query_source::QuerySource, { type FromClause = diesel::internal::table_macro::Join; type OnClause = as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::internal::table_macro::Join) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::internal::table_macro::Join::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } impl diesel::JoinTo> for table where diesel::internal::table_macro::JoinOn: diesel::JoinTo
, { type FromClause = diesel::internal::table_macro::JoinOn; type OnClause = as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::internal::table_macro::JoinOn) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::internal::table_macro::JoinOn::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } impl diesel::JoinTo, S, D, W, O, L, Of, G>> for table where diesel::internal::table_macro::SelectStatement, S, D, W, O, L, Of, G>: diesel::JoinTo
, F: diesel::query_source::QuerySource { type FromClause = diesel::internal::table_macro::SelectStatement, S, D, W, O, L, Of, G>; type OnClause = , S, D, W, O, L, Of, G> as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::internal::table_macro::SelectStatement, S, D, W, O, L, Of, G>) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::internal::table_macro::SelectStatement::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } impl<'a, QS, ST, DB> diesel::JoinTo, ST, DB>> for table where diesel::internal::table_macro::BoxedSelectStatement<'a, diesel::internal::table_macro::FromClause, ST, DB>: diesel::JoinTo
, QS: diesel::query_source::QuerySource, { type FromClause = diesel::internal::table_macro::BoxedSelectStatement<'a, diesel::internal::table_macro::FromClause, ST, DB>; type OnClause = , ST, DB> as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::internal::table_macro::BoxedSelectStatement<'a, diesel::internal::table_macro::FromClause, ST, DB>) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::internal::table_macro::BoxedSelectStatement::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } impl diesel::JoinTo> for table where diesel::query_source::Alias: diesel::JoinTo
, { type FromClause = diesel::query_source::Alias; type OnClause = as diesel::JoinTo
>::OnClause; fn join_target(__diesel_internal_rhs: diesel::query_source::Alias) -> (Self::FromClause, Self::OnClause) { let (_, __diesel_internal_on_clause) = diesel::query_source::Alias::::join_target(table); (__diesel_internal_rhs, __diesel_internal_on_clause) } } // This impl should be able to live in Diesel, // but Rust tries to recurse for no reason impl diesel::insertable::Insertable for table where
::Query: diesel::insertable::Insertable, { type Values = <
::Query as diesel::insertable::Insertable>::Values; fn values(self) -> Self::Values { use diesel::query_builder::AsQuery; self.as_query().values() } } impl<'a, T> diesel::insertable::Insertable for &'a table where table: diesel::insertable::Insertable, { type Values =
>::Values; fn values(self) -> Self::Values { (*self).values() } } #backend_specific_table_impls /// Contains all of the columns of this table pub mod columns { use ::diesel; use super::table; #(#imports_for_column_module)* #[allow(non_camel_case_types, dead_code)] #[derive(Debug, Clone, Copy, diesel::query_builder::QueryId)] /// Represents `table_name.*`, which is sometimes needed for /// efficient count queries. It cannot be used in place of /// `all_columns`, and has a `SqlType` of `()` to prevent it /// being used that way pub struct star; impl<__GB> diesel::expression::ValidGrouping<__GB> for star where (#(#column_names,)*): diesel::expression::ValidGrouping<__GB>, { type IsAggregate = <(#(#column_names,)*) as diesel::expression::ValidGrouping<__GB>>::IsAggregate; } impl diesel::Expression for star { type SqlType = diesel::expression::expression_types::NotSelectable; } impl diesel::query_builder::QueryFragment for star where
::FromClause: diesel::query_builder::QueryFragment, { #[allow(non_snake_case)] fn walk_ast<'b>(&'b self, mut __diesel_internal_out: diesel::query_builder::AstPass<'_, 'b, DB>) -> diesel::result::QueryResult<()> { use diesel::QuerySource; if !__diesel_internal_out.should_skip_from() { const FROM_CLAUSE: diesel::internal::table_macro::StaticQueryFragmentInstance
= diesel::internal::table_macro::StaticQueryFragmentInstance::new(); FROM_CLAUSE.walk_ast(__diesel_internal_out.reborrow())?; __diesel_internal_out.push_sql("."); } __diesel_internal_out.push_sql("*"); Ok(()) } } impl diesel::SelectableExpression
for star { } impl diesel::AppearsOnTable
for star { } #(#column_defs)* #(#valid_grouping_for_table_columns)* } } } } fn generate_valid_grouping_for_table_columns(table: &TableDecl) -> Vec { let mut ret = Vec::with_capacity(table.column_defs.len() * table.column_defs.len()); let primary_key = if let Some(ref pk) = table.primary_keys { if pk.keys.len() == 1 { pk.keys.first().map(ToString::to_string) } else { None } } else { Some(DEFAULT_PRIMARY_KEY_NAME.into()) }; for (id, right_col) in table.column_defs.iter().enumerate() { for left_col in table.column_defs.iter().skip(id) { let right_to_left = if Some(left_col.column_name.to_string()) == primary_key { Ident::new("Yes", proc_macro2::Span::call_site()) } else { Ident::new("No", proc_macro2::Span::call_site()) }; let left_to_right = if Some(right_col.column_name.to_string()) == primary_key { Ident::new("Yes", proc_macro2::Span::call_site()) } else { Ident::new("No", proc_macro2::Span::call_site()) }; let left_col = &left_col.column_name; let right_col = &right_col.column_name; if left_col != right_col { ret.push(quote::quote! { impl diesel::expression::IsContainedInGroupBy<#right_col> for #left_col { type Output = diesel::expression::is_contained_in_group_by::#right_to_left; } impl diesel::expression::IsContainedInGroupBy<#left_col> for #right_col { type Output = diesel::expression::is_contained_in_group_by::#left_to_right; } }); } } } ret } fn fix_import_for_submodule(import: &syn::ItemUse) -> syn::ItemUse { let mut ret = import.clone(); if let syn::UseTree::Path(ref mut path) = ret.tree { // prepend another `super` to the any import // that starts with `super` so that it now refers to the correct // module if path.ident == "super" { let inner = path.clone(); path.tree = Box::new(syn::UseTree::Path(inner)); } } ret } fn is_numeric(ty: &syn::TypePath) -> bool { const NUMERIC_TYPES: &[&str] = &[ "SmallInt", "Int2", "Smallint", "SmallSerial", "Integer", "Int4", "Serial", "BigInt", "Int8", "Bigint", "BigSerial", "Decimal", "Float", "Float4", "Float8", "Double", "Numeric", ]; if let Some(last) = ty.path.segments.last() { match &last.arguments { syn::PathArguments::AngleBracketed(t) if (last.ident == "Nullable" || last.ident == "Unsigned") && t.args.len() == 1 => { if let Some(syn::GenericArgument::Type(syn::Type::Path(t))) = t.args.first() { NUMERIC_TYPES.iter().any(|i| { t.path.segments.last().map(|s| s.ident.to_string()) == Some(String::from(*i)) }) } else { false } } _ => NUMERIC_TYPES.iter().any(|i| last.ident == *i), } } else { false } } fn is_date_time(ty: &syn::TypePath) -> bool { const DATE_TYPES: &[&str] = &["Time", "Date", "Timestamp", "Timestamptz"]; if let Some(last) = ty.path.segments.last() { match &last.arguments { syn::PathArguments::AngleBracketed(t) if last.ident == "Nullable" && t.args.len() == 1 => { if let Some(syn::GenericArgument::Type(syn::Type::Path(t))) = t.args.first() { DATE_TYPES.iter().any(|i| { t.path.segments.last().map(|s| s.ident.to_string()) == Some(String::from(*i)) }) } else { false } } _ => DATE_TYPES.iter().any(|i| last.ident == *i), } } else { false } } fn is_network(ty: &syn::TypePath) -> bool { const NETWORK_TYPES: &[&str] = &["Cidr", "Inet"]; if let Some(last) = ty.path.segments.last() { match &last.arguments { syn::PathArguments::AngleBracketed(t) if last.ident == "Nullable" && t.args.len() == 1 => { if let Some(syn::GenericArgument::Type(syn::Type::Path(t))) = t.args.first() { NETWORK_TYPES.iter().any(|i| { t.path.segments.last().map(|s| s.ident.to_string()) == Some(String::from(*i)) }) } else { false } } _ => NETWORK_TYPES.iter().any(|i| last.ident == *i), } } else { false } } fn generate_op_impl(op: &str, tpe: &syn::Ident) -> TokenStream { let fn_name = syn::Ident::new(&op.to_lowercase(), tpe.span()); let op = syn::Ident::new(op, tpe.span()); quote::quote! { impl ::std::ops::#op for #tpe where Rhs: diesel::expression::AsExpression< <<#tpe as diesel::Expression>::SqlType as diesel::sql_types::ops::#op>::Rhs, >, { type Output = diesel::internal::table_macro::ops::#op; fn #fn_name(self, __diesel_internal_rhs: Rhs) -> Self::Output { diesel::internal::table_macro::ops::#op::new(self, __diesel_internal_rhs.as_expression()) } } } } fn expand_column_def(column_def: &ColumnDef) -> TokenStream { // TODO get a better span here as soon as that's // possible using stable rust let span = column_def.column_name.span(); let meta = &column_def.meta; let column_name = &column_def.column_name; let sql_name = &column_def.sql_name; let sql_type = &column_def.tpe; let backend_specific_column_impl = if cfg!(feature = "postgres") { Some(quote::quote! { impl diesel::query_source::AppearsInFromClause> for #column_name { type Count = diesel::query_source::Once; } impl diesel::SelectableExpression> for #column_name {} impl diesel::query_source::AppearsInFromClause> for #column_name where TSM: diesel::internal::table_macro::TablesampleMethod { type Count = diesel::query_source::Once; } impl diesel::SelectableExpression> for #column_name where TSM: diesel::internal::table_macro::TablesampleMethod {} }) } else { None }; let ops_impls = if is_numeric(&column_def.tpe) { let add = generate_op_impl("Add", column_name); let sub = generate_op_impl("Sub", column_name); let div = generate_op_impl("Div", column_name); let mul = generate_op_impl("Mul", column_name); Some(quote::quote! { #add #sub #div #mul }) } else if is_date_time(&column_def.tpe) || is_network(&column_def.tpe) { let add = generate_op_impl("Add", column_name); let sub = generate_op_impl("Sub", column_name); Some(quote::quote! { #add #sub }) } else { None }; let max_length = column_def.max_length.as_ref().map(|column_max_length| { quote::quote! { impl self::diesel::query_source::SizeRestrictedColumn for #column_name { const MAX_LENGTH: usize = #column_max_length; } } }); quote::quote_spanned! {span=> #(#meta)* #[allow(non_camel_case_types, dead_code)] #[derive(Debug, Clone, Copy, diesel::query_builder::QueryId, Default)] pub struct #column_name; impl diesel::expression::Expression for #column_name { type SqlType = #sql_type; } impl diesel::query_builder::QueryFragment for #column_name where DB: diesel::backend::Backend, diesel::internal::table_macro::StaticQueryFragmentInstance
: diesel::query_builder::QueryFragment, { #[allow(non_snake_case)] fn walk_ast<'b>(&'b self, mut __diesel_internal_out: diesel::query_builder::AstPass<'_, 'b, DB>) -> diesel::result::QueryResult<()> { if !__diesel_internal_out.should_skip_from() { const FROM_CLAUSE: diesel::internal::table_macro::StaticQueryFragmentInstance
= diesel::internal::table_macro::StaticQueryFragmentInstance::new(); FROM_CLAUSE.walk_ast(__diesel_internal_out.reborrow())?; __diesel_internal_out.push_sql("."); } __diesel_internal_out.push_identifier(#sql_name) } } impl diesel::SelectableExpression for #column_name { } impl diesel::AppearsOnTable for #column_name where QS: diesel::query_source::AppearsInFromClause, { } impl diesel::SelectableExpression< diesel::internal::table_macro::Join, > for #column_name where #column_name: diesel::AppearsOnTable>, Self: diesel::SelectableExpression, // If our table is on the right side of this join, only // `Nullable` can be selected Right: diesel::query_source::AppearsInFromClause + diesel::query_source::QuerySource, Left: diesel::query_source::QuerySource { } impl diesel::SelectableExpression< diesel::internal::table_macro::Join, > for #column_name where #column_name: diesel::AppearsOnTable>, Left: diesel::query_source::AppearsInFromClause + diesel::query_source::QuerySource, Right: diesel::query_source::AppearsInFromClause + diesel::query_source::QuerySource, (Left::Count, Right::Count): diesel::internal::table_macro::Pick, Self: diesel::SelectableExpression< <(Left::Count, Right::Count) as diesel::internal::table_macro::Pick>::Selection, >, { } // FIXME: Remove this when overlapping marker traits are stable impl diesel::SelectableExpression> for #column_name where #column_name: diesel::SelectableExpression + diesel::AppearsOnTable>, { } // FIXME: Remove this when overlapping marker traits are stable impl diesel::SelectableExpression>> for #column_name where From: diesel::query_source::QuerySource, #column_name: diesel::SelectableExpression + diesel::AppearsOnTable>>, { } impl<__GB> diesel::expression::ValidGrouping<__GB> for #column_name where __GB: diesel::expression::IsContainedInGroupBy<#column_name, Output = diesel::expression::is_contained_in_group_by::Yes>, { type IsAggregate = diesel::expression::is_aggregate::Yes; } impl diesel::expression::ValidGrouping<()> for #column_name { type IsAggregate = diesel::expression::is_aggregate::No; } impl diesel::expression::IsContainedInGroupBy<#column_name> for #column_name { type Output = diesel::expression::is_contained_in_group_by::Yes; } impl diesel::query_source::Column for #column_name { type Table = super::table; const NAME: &'static str = #sql_name; } impl diesel::EqAll for #column_name where T: diesel::expression::AsExpression<#sql_type>, diesel::dsl::Eq<#column_name, T::Expression>: diesel::Expression, { type Output = diesel::dsl::Eq; fn eq_all(self, __diesel_internal_rhs: T) -> Self::Output { use diesel::expression_methods::ExpressionMethods; self.eq(__diesel_internal_rhs) } } #max_length #ops_impls #backend_specific_column_impl } } diesel_derives-2.2.3/src/util.rs000064400000000000000000000134421046102023000147340ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse::{Parse, ParseStream, Peek, Result}; use syn::token::Eq; use syn::{parenthesized, parse_quote, Data, DeriveInput, GenericArgument, Ident, Type}; use crate::model::Model; pub const COLUMN_NAME_NOTE: &str = "column_name = foo"; pub const SQL_TYPE_NOTE: &str = "sql_type = Foo"; pub const SERIALIZE_AS_NOTE: &str = "serialize_as = Foo"; pub const DESERIALIZE_AS_NOTE: &str = "deserialize_as = Foo"; pub const TABLE_NAME_NOTE: &str = "table_name = foo"; pub const TREAT_NONE_AS_DEFAULT_VALUE_NOTE: &str = "treat_none_as_default_value = true"; pub const TREAT_NONE_AS_NULL_NOTE: &str = "treat_none_as_null = true"; pub const BELONGS_TO_NOTE: &str = "belongs_to(Foo, foreign_key = foo_id)"; pub const MYSQL_TYPE_NOTE: &str = "mysql_type(name = \"foo\")"; pub const SQLITE_TYPE_NOTE: &str = "sqlite_type(name = \"foo\")"; pub const POSTGRES_TYPE_NOTE: &str = "postgres_type(name = \"foo\", schema = \"public\")"; pub const POSTGRES_TYPE_NOTE_ID: &str = "postgres_type(oid = 37, array_oid = 54)"; pub const SELECT_EXPRESSION_NOTE: &str = "select_expression = schema::table_name::column_name.is_not_null()"; pub const SELECT_EXPRESSION_TYPE_NOTE: &str = "select_expression_type = dsl::IsNotNull"; pub const CHECK_FOR_BACKEND_NOTE: &str = "diesel::pg::Pg"; pub fn unknown_attribute(name: &Ident, valid: &[&str]) -> syn::Error { let prefix = if valid.len() == 1 { "" } else { " one of" }; syn::Error::new( name.span(), format!( "unknown attribute, expected{prefix} `{}`", valid.join("`, `") ), ) } pub fn parse_eq(input: ParseStream, help: &str) -> Result { if input.is_empty() { return Err(syn::Error::new( input.span(), format!( "unexpected end of input, expected `=`\n\ help: The correct format looks like `#[diesel({help})]`", ), )); } input.parse::()?; input.parse() } pub fn parse_paren(input: ParseStream, help: &str) -> Result { if input.is_empty() { return Err(syn::Error::new( input.span(), format!( "unexpected end of input, expected parentheses\n\ help: The correct format looks like `#[diesel({help})]`", ), )); } let content; parenthesized!(content in input); content.parse() } pub fn parse_paren_list( input: ParseStream, help: &str, sep: D, ) -> Result::Token>> where T: Parse, D: Peek, D::Token: Parse, { if input.is_empty() { return Err(syn::Error::new( input.span(), format!( "unexpected end of input, expected parentheses\n\ help: The correct format looks like `#[diesel({help})]`", ), )); } let content; parenthesized!(content in input); content.parse_terminated(T::parse, sep) } pub fn wrap_in_dummy_mod(item: TokenStream) -> TokenStream { quote! { #[allow(unused_imports)] const _: () = { // This import is not actually redundant. When using diesel_derives // inside of diesel, `diesel` doesn't exist as an extern crate, and // to work around that it contains a private // `mod diesel { pub use super::*; }` that this import will then // refer to. In all other cases, this imports refers to the extern // crate diesel. use diesel; #item }; } } pub fn inner_of_option_ty(ty: &Type) -> &Type { option_ty_arg(ty).unwrap_or(ty) } pub fn is_option_ty(ty: &Type) -> bool { option_ty_arg(ty).is_some() } fn option_ty_arg(mut ty: &Type) -> Option<&Type> { use syn::PathArguments::AngleBracketed; // Check the inner equivalent type loop { match ty { Type::Group(group) => ty = &group.elem, Type::Paren(paren) => ty = &paren.elem, _ => break, } } match *ty { Type::Path(ref ty) => { let last_segment = ty.path.segments.iter().last().unwrap(); match last_segment.arguments { AngleBracketed(ref args) if last_segment.ident == "Option" => { match args.args.iter().last() { Some(GenericArgument::Type(ty)) => Some(ty), _ => None, } } _ => None, } } _ => None, } } pub fn ty_for_foreign_derive(item: &DeriveInput, model: &Model) -> Result { if model.foreign_derive { match item.data { Data::Struct(ref body) => match body.fields.iter().next() { Some(field) => Ok(field.ty.clone()), None => Err(syn::Error::new( proc_macro2::Span::call_site(), "foreign_derive requires at least one field", )), }, _ => Err(syn::Error::new( proc_macro2::Span::call_site(), "foreign_derive can only be used with structs", )), } } else { let ident = &item.ident; let (_, ty_generics, ..) = item.generics.split_for_impl(); Ok(parse_quote!(#ident #ty_generics)) } } pub fn camel_to_snake(name: &str) -> String { let mut result = String::with_capacity(name.len()); result.push_str(&name[..1].to_lowercase()); for character in name[1..].chars() { if character.is_uppercase() { result.push('_'); for lowercase in character.to_lowercase() { result.push(lowercase); } } else { result.push(character); } } result } diesel_derives-2.2.3/src/valid_grouping.rs000064400000000000000000000045731046102023000167750ustar 00000000000000use proc_macro2::TokenStream; use quote::quote; use syn::parse_quote; use syn::DeriveInput; use syn::Result; use crate::model::Model; use crate::util::{ty_for_foreign_derive, wrap_in_dummy_mod}; pub fn derive(mut item: DeriveInput) -> Result { let model = Model::from_item(&item, true, false)?; let struct_ty = ty_for_foreign_derive(&item, &model)?; let type_params = item .generics .type_params() .map(|param| param.ident.clone()) .collect::>(); for type_param in type_params { let where_clause = item.generics.make_where_clause(); where_clause .predicates .push(parse_quote!(#type_param: ValidGrouping<__GroupByClause>)); } if model.aggregate { item.generics.params.push(parse_quote!(__GroupByClause)); let (impl_generics, _, where_clause) = item.generics.split_for_impl(); Ok(wrap_in_dummy_mod(quote! { use diesel::expression::{ValidGrouping, MixedAggregates, is_aggregate}; impl #impl_generics ValidGrouping<__GroupByClause> for #struct_ty #where_clause { type IsAggregate = is_aggregate::Yes; } })) } else { let mut aggregates = item .generics .type_params() .map(|t| quote!(#t::IsAggregate)) .collect::>() .into_iter(); let is_aggregate = aggregates .next() .map(|first| { let where_clause = item.generics.make_where_clause(); aggregates.fold(first, |left, right| { where_clause.predicates.push(parse_quote!( #left: MixedAggregates<#right> )); quote!(<#left as MixedAggregates<#right>>::Output) }) }) .unwrap_or_else(|| quote!(is_aggregate::Never)); item.generics.params.push(parse_quote!(__GroupByClause)); let (impl_generics, _, where_clause) = item.generics.split_for_impl(); Ok(wrap_in_dummy_mod(quote! { use diesel::expression::{ValidGrouping, MixedAggregates, is_aggregate}; impl #impl_generics ValidGrouping<__GroupByClause> for #struct_ty #where_clause { type IsAggregate = #is_aggregate; } })) } } diesel_derives-2.2.3/tests/as_changeset.rs000064400000000000000000000512661046102023000167640ustar 00000000000000use crate::helpers::*; use crate::schema::*; use diesel::deserialize::FromSqlRow; use diesel::expression::AsExpression; use diesel::*; #[test] fn named_ref_struct() { #[derive(AsChangeset)] struct User { name: String, hair_color: String, r#type: String, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&User { name: String::from("Jim"), hair_color: String::from("blue"), r#type: String::from("super"), }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn named_struct() { #[derive(AsChangeset)] struct User { name: String, hair_color: String, r#type: String, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(User { name: String::from("Jim"), hair_color: String::from("blue"), r#type: String::from("super"), }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_explicit_table_name() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm { name: String, hair_color: String, r#type: String, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: String::from("Jim"), hair_color: String::from("blue"), r#type: String::from("super"), }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_path_in_table_name() { #[derive(AsChangeset)] #[diesel(table_name = crate::schema::users)] struct UserForm { name: String, hair_color: String, r#type: String, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: String::from("Jim"), hair_color: String::from("blue"), r#type: String::from("super"), }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_lifetime() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { name: &'a str, hair_color: &'a str, r#type: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_multiple_lifetimes() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a, 'b, 'c> { name: &'a str, hair_color: &'b str, r#type: &'c str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_lifetime_constraints() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a, 'b: 'a, 'c: 'b> { name: &'a str, hair_color: &'b str, r#type: &'c str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_explicit_column_names() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { #[diesel(column_name = name)] nombre: &'a str, #[diesel(column_name = hair_color)] color_de_pelo: &'a str, #[diesel(column_name = "type")] tipe: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { nombre: "Jim", color_de_pelo: "blue", tipe: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_explicit_column_names_raw_type() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { #[diesel(column_name = name)] nombre: &'a str, #[diesel(column_name = hair_color)] color_de_pelo: &'a str, #[diesel(column_name = r#type)] tipe: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { nombre: "Jim", color_de_pelo: "blue", tipe: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn with_serialize_as() { #[derive(Debug, FromSqlRow, AsExpression)] #[diesel(sql_type = sql_types::Text)] struct UppercaseString(pub String); impl From for UppercaseString { fn from(val: String) -> Self { UppercaseString(val.to_uppercase()) } } impl serialize::ToSql for UppercaseString where DB: backend::Backend, String: serialize::ToSql, { fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, DB>) -> serialize::Result { self.0.to_sql(out) } } #[derive(AsChangeset)] struct User { #[diesel(serialize_as = UppercaseString)] name: String, #[diesel(serialize_as = UppercaseString)] hair_color: Option, #[diesel(serialize_as = UppercaseString)] r#type: Option, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(User { name: String::from("Jim"), hair_color: Some(String::from("blue")), r#type: Some(String::from("super")), }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("JIM"), Some(String::from("BLUE")), Some(String::from("SUPER")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn tuple_struct() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a>( #[diesel(column_name = name)] &'a str, #[diesel(column_name = hair_color)] &'a str, #[diesel(column_name = "type")] &'a str, ); let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm("Jim", "blue", "super")) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn tuple_struct_raw_type() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a>( #[diesel(column_name = name)] &'a str, #[diesel(column_name = hair_color)] &'a str, #[diesel(column_name = r#type)] &'a str, ); let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm("Jim", "blue", "super")) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn struct_containing_single_field() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { name: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: "Jim" }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("black")), Some(String::from("regular")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn tuple_struct_containing_single_field() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a>(#[diesel(column_name = name)] &'a str); let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm("Jim")) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("black")), Some(String::from("regular")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn primary_key_is_not_updated() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { #[allow(dead_code)] id: i32, name: &'a str, hair_color: &'a str, r#type: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { id: 3, name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn primary_key_is_based_on_column_name() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { #[diesel(column_name = id)] _id: i32, name: &'a str, hair_color: &'a str, r#type: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { _id: 3, name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn primary_key_is_not_updated_with_custom_pk() { #[derive(AsChangeset)] #[diesel(table_name = users)] #[diesel(primary_key(name))] struct UserForm<'a> { #[allow(dead_code)] name: &'a str, hair_color: &'a str, r#type: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Sean"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn primary_key_is_not_updated_with_custom_composite_pk() { #[derive(AsChangeset)] #[diesel(table_name = users)] #[diesel(primary_key(id, name))] #[allow(dead_code)] struct UserForm<'a> { id: i32, name: &'a str, hair_color: &'a str, r#type: &'a str, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { id: 3, name: "Jim", hair_color: "blue", r#type: "super", }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Sean"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Tess"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn option_fields_are_skipped() { #[derive(AsChangeset)] #[diesel(table_name = users)] struct UserForm<'a> { name: &'a str, hair_color: Option<&'a str>, r#type: Option<&'a str>, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: "Jim", hair_color: Some("blue"), r#type: Some("super"), }) .execute(connection) .unwrap(); update(users::table.find(2)) .set(&UserForm { name: "Ruby", hair_color: None, r#type: None, }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Jim"), Some(String::from("blue")), Some(String::from("super")), ), ( 2, String::from("Ruby"), Some(String::from("brown")), Some(String::from("admin")), ), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] fn option_fields_are_assigned_null_when_specified() { #[derive(AsChangeset)] #[diesel(table_name = users)] #[diesel(treat_none_as_null = true)] struct UserForm<'a> { #[diesel(treat_none_as_null = false)] name: Option<&'a str>, hair_color: Option<&'a str>, #[diesel(treat_none_as_null = false)] r#type: Option<&'a str>, } let connection = &mut connection_with_sean_and_tess_in_users_table(); update(users::table.find(1)) .set(&UserForm { name: None, hair_color: Some("blue"), r#type: None, }) .execute(connection) .unwrap(); update(users::table.find(2)) .set(&UserForm { name: Some("Ruby"), hair_color: None, r#type: None, }) .execute(connection) .unwrap(); let expected = vec![ ( 1, String::from("Sean"), Some(String::from("blue")), Some(String::from("regular")), ), (2, String::from("Ruby"), None, Some(String::from("admin"))), ]; let actual = users::table.order(users::id).load(connection); assert_eq!(Ok(expected), actual); } #[test] #[allow(unused_parens)] fn option_fields_are_correctly_detected() { diesel::table! { test_table (id) { id -> Int8, test -> Text, } } macro_rules! define { ($field_ty:ty) => { #[derive(diesel::AsChangeset)] #[diesel(table_name = test_table)] pub struct S1 { pub test: (($field_ty)), } #[derive(diesel::AsChangeset)] #[diesel(table_name = test_table)] pub struct S2 { pub test: (((Option))), } }; } // Causes a compile error if the field is not detected as `Option` define!((((Option)))); } diesel_derives-2.2.3/tests/as_expression.rs000064400000000000000000000034051046102023000172120ustar 00000000000000use diesel::backend::Backend; use diesel::deserialize::{FromSql, FromSqlRow}; use diesel::expression::AsExpression; use diesel::serialize::{Output, ToSql}; use diesel::sql_types::Binary; use diesel::sql_types::Text; use diesel::*; use crate::helpers::connection; table! { my_structs (foo) { foo -> Integer, bar -> Text, } } #[derive(Debug, AsExpression, FromSqlRow, Clone, Copy, PartialEq)] #[diesel(sql_type = Text)] struct StringArray(pub [u8; N]); impl FromSql for StringArray where DB: Backend, String: FromSql, { fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result { let string = >::from_sql(bytes)?; let bytes_array: [u8; N] = string.into_bytes().try_into().unwrap(); Ok(StringArray(bytes_array)) } } impl ToSql for StringArray where DB: Backend, str: ToSql, { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result { let string = std::str::from_utf8(&self.0).unwrap(); string.to_sql(out) } } #[test] fn struct_with_sql_type() { #[derive(Debug, Clone, PartialEq, Queryable, Selectable)] #[diesel(table_name = my_structs)] struct MyStruct { foo: i32, bar: StringArray<4>, } let conn = &mut connection(); let data = my_structs::table .select(MyStruct::as_select()) .get_result(conn); assert!(data.is_err()); } // check that defaulted type parameters compile correctly // This is a regression test for https://github.com/diesel-rs/diesel/issues/3902 #[derive(AsExpression, FromSqlRow)] #[diesel(sql_type = Binary)] pub struct Ewkb = Vec>(pub B); diesel_derives-2.2.3/tests/associations.rs000064400000000000000000000204631046102023000170320ustar 00000000000000use crate::helpers::*; use diesel::*; type Backend = ::Backend; // https://github.com/rust-lang/rust/issues/124396 #[allow(unknown_lints, non_local_definitions)] #[test] fn simple_belongs_to() { table! { users { id -> Integer, name -> Text, } } table! { posts { id -> Integer, user_id -> Integer, title -> Text, } } allow_tables_to_appear_in_same_query!(users, posts); #[derive(Identifiable)] pub struct User { id: i32, } #[derive(Associations, Identifiable)] #[diesel(belongs_to(User))] pub struct Post { id: i32, user_id: i32, } joinable!(posts -> users(user_id)); let _can_join_tables = posts::table .inner_join(users::table) .select((users::id, users::name, posts::id)) .filter( posts::id .eq(1) .and(posts::user_id.eq(2)) .and(posts::title.eq("Bar")), ); let _can_reverse_join_tables = users::table .inner_join(posts::table) .select((posts::id, posts::user_id, posts::title)) .filter(users::id.eq(1).and(users::name.eq("Sean"))); let t = User { id: 42 }; let belong_to = Post::belonging_to(&t); let filter = posts::table.filter(posts::user_id.eq(42)); assert_eq!( debug_query::(&belong_to).to_string(), debug_query::(&filter).to_string() ); } #[test] fn table_in_different_module() { mod schema { table! { users { id -> Integer, name -> Text, } } table! { posts { id -> Integer, user_id -> Integer, title -> Text, } } allow_tables_to_appear_in_same_query!(users, posts); joinable!(posts -> users(user_id)); } #[derive(Identifiable)] #[diesel(table_name = schema::users)] pub struct User { id: i32, } #[derive(Associations, Identifiable)] #[diesel(table_name = schema::posts)] #[diesel(belongs_to(User))] pub struct Post { id: i32, user_id: i32, } let _can_join_tables = schema::posts::table .inner_join(schema::users::table) .select((schema::users::id, schema::users::name, schema::posts::id)) .filter( schema::posts::id .eq(1) .and(schema::posts::user_id.eq(2)) .and(schema::posts::title.eq("Bar")), ); let _can_reverse_join_tables = schema::users::table .inner_join(schema::posts::table) .select(( schema::posts::id, schema::posts::user_id, schema::posts::title, )) .filter(schema::users::id.eq(1).and(schema::users::name.eq("Sean"))); let t = User { id: 42 }; let belong_to = Post::belonging_to(&t); let filter = schema::posts::table.filter(schema::posts::user_id.eq(42)); assert_eq!( debug_query::(&belong_to).to_string(), debug_query::(&filter).to_string() ); } // https://github.com/rust-lang/rust/issues/124396 #[allow(unknown_lints, non_local_definitions)] #[test] fn custom_foreign_key() { table! { users { id -> Integer, name -> Text, } } table! { posts { id -> Integer, belongs_to_user -> Integer, title -> Text, } } allow_tables_to_appear_in_same_query!(users, posts); #[derive(Identifiable)] pub struct User { id: i32, } #[derive(Associations, Identifiable)] #[diesel(belongs_to(User, foreign_key = belongs_to_user))] pub struct Post { id: i32, belongs_to_user: i32, } joinable!(posts -> users(belongs_to_user)); let _can_join_tables = posts::table .inner_join(users::table) .select((users::id, users::name)) .filter( posts::id .eq(1) .and(posts::belongs_to_user.eq(2)) .and(posts::title.eq("Bar")), ); let _can_reverse_join_tables = users::table .inner_join(posts::table) .select((posts::id, posts::belongs_to_user, posts::title)) .filter(users::id.eq(1).and(users::name.eq("Sean"))); let t = User { id: 42 }; let belong_to = Post::belonging_to(&t); let filter = posts::table.filter(posts::belongs_to_user.eq(42)); assert_eq!( debug_query::(&belong_to).to_string(), debug_query::(&filter).to_string() ); } #[test] fn self_referential() { table! { trees { id -> Integer, parent_id -> Nullable, } } #[derive(Associations, Identifiable)] #[diesel(belongs_to(Tree, foreign_key = parent_id))] pub struct Tree { id: i32, parent_id: Option, } let t = Tree { id: 42, parent_id: None, }; let belong_to = Tree::belonging_to(&t); let filter = trees::table.filter(trees::parent_id.eq(42)); assert_eq!( debug_query::(&belong_to).to_string(), debug_query::(&filter).to_string() ); } #[test] fn multiple_associations() { table! { users { id -> Integer, } } table! { posts { id -> Integer, } } table! { comments { id -> Integer, user_id -> Integer, post_id -> Integer, } } #[derive(Identifiable)] struct User { id: i32, } #[derive(Identifiable)] struct Post { id: i32, } #[derive(Identifiable, Associations)] #[diesel(belongs_to(User))] #[diesel(belongs_to(Post))] struct Comment { id: i32, user_id: i32, post_id: i32, } let user = User { id: 1 }; let post = Post { id: 2 }; let query = Comment::belonging_to(&user); let expected = comments::table.filter(comments::user_id.eq(1)); assert_eq!( debug_query::(&query).to_string(), debug_query::(&expected).to_string() ); let query = Comment::belonging_to(&post); let expected = comments::table.filter(comments::post_id.eq(2)); assert_eq!( debug_query::(&query).to_string(), debug_query::(&expected).to_string() ); } #[test] fn foreign_key_field_with_column_rename() { table! { users { id -> Integer, } } table! { posts { id -> Integer, user_id -> Integer, } } #[derive(Identifiable, Clone, Copy)] pub struct User { id: i32, } #[derive(Associations, Identifiable, Clone, Copy, PartialEq, Debug, Eq)] #[diesel(belongs_to(User))] pub struct Post { id: i32, #[diesel(column_name = user_id)] author_id: i32, } let user1 = User { id: 1 }; let user2 = User { id: 2 }; let post1 = Post { id: 1, author_id: 2, }; let post2 = Post { id: 2, author_id: 1, }; let query = Post::belonging_to(&user1); let expected = posts::table.filter(posts::user_id.eq(1)); assert_eq!( debug_query::(&query).to_string(), debug_query::(&expected).to_string() ); let users = vec![user1, user2]; let posts = vec![post1, post2].grouped_by(&users); assert_eq!(vec![vec![post2], vec![post1]], posts); } #[test] fn tuple_struct() { table! { users { id -> Integer, } } table! { posts { id -> Integer, user_id -> Integer, } } #[derive(Identifiable)] pub struct User { id: i32, } #[derive(Associations, Identifiable)] #[diesel(belongs_to(User))] pub struct Post( #[diesel(column_name = id)] i32, #[diesel(column_name = user_id)] i32, ); let user = User { id: 1 }; let query = Post::belonging_to(&user); let expected = posts::table.filter(posts::user_id.eq(1)); assert_eq!( debug_query::(&query).to_string(), debug_query::(&expected).to_string() ); } diesel_derives-2.2.3/tests/auto_type.rs000064400000000000000000000225221046102023000163420ustar 00000000000000#![allow(dead_code)] // this is a compile pass test use diesel::dsl::*; use diesel::helper_types::*; use diesel::prelude::*; use diesel::sql_types; table! { users { id -> Integer, name -> Text, time -> Timestamp, } } table! { posts { id -> Integer, user_id -> Integer, } } table! { posts2 { id -> Integer, user_id -> Integer, } } table! { posts3 { id -> Integer, user_id -> Integer, } } #[cfg(feature = "postgres")] table! { pg_extras(id) { id -> Integer, json -> Json, jsonb -> Jsonb, net -> Inet, array -> Array, blob -> Binary, timestamp -> Timestamp, range -> Range, } } joinable!(posts -> users(user_id)); joinable!(posts2 -> users(user_id)); joinable!(posts3 -> users(user_id)); allow_tables_to_appear_in_same_query!(users, posts, posts2, posts3); #[auto_type] fn test_all_query_dsl() -> _ { users::table .distinct() .filter(users::id.eq(42_i32)) .find(42_i32) .or_filter(users::id.eq(42_i32)) .limit(23_i64) .offset(12_i64) .order(users::id) .order_by(users::id) .then_order_by(users::id) .select(users::id) .group_by(users::id) .having(users::id.eq(32_i32)) .inner_join(posts::table) .left_join(posts2::table) .inner_join(posts3::table.on(users::id.eq(posts3::user_id))) //.into_boxed() } #[auto_type] fn single_value() -> _ { users::table.select(users::id).find(42_i32).single_value() } #[cfg(feature = "postgres")] #[auto_type] fn test_distinct_on() -> _ { users::table.distinct_on(users::id) } #[auto_type] fn test_lock_dsl1() -> _ { users::table.for_key_share().no_wait().skip_locked() } #[auto_type] fn test_lock_dsl2() -> _ { users::table.for_no_key_update() } #[auto_type] fn test_lock_dsl3() -> _ { users::table.for_share() } #[auto_type] fn test_lock_dsl4() -> _ { users::table.for_update() } // #[auto_type] // fn test_count_query() -> _ { // users::table.find(1_i32).count() // } #[auto_type] fn test_expression_methods() -> _ { let v = 42_i32; let v2: &'static [i32] = &[42]; users::id .eq(v) .and(users::id.ne(v)) .and(users::id.eq_any(v2)) .and(users::id.ne_all(v2)) .and(users::id.gt(v)) .and(users::id.lt(v)) .and(users::id.is_not_null()) .and(users::id.is_null()) .and(users::id.le(v)) .and(users::id.ge(v)) .and(users::id.between(v, v)) .and(users::id.not_between(v, v)) } #[auto_type] fn test_boolean_expression_methods() -> _ { let v = 42_i32; users::id.eq(v).and(users::id.eq(v)).or(users::id.eq(v)) } #[auto_type] fn test_nullable_expression_methods() -> _ { users::id.nullable().assume_not_null() } #[auto_type] fn test_text_expression_methods() -> _ { let a: &'static str = "foo"; users::name .like(a) .and(users::name.not_like(a)) .and(users::name.concat(a).eq(a)) } #[auto_type] fn test_delete() -> _ { delete(users::table) } #[auto_type] fn test_delete_2() -> _ { delete(users::table.find({ // Test that type ascriptions via nested blocks work let id: i32 = 1; id })) } #[auto_type] fn test_delete_3() -> _ { delete(users::table).filter(users::id.eq(1_i32)) } // #[auto_type] // fn test_update() -> _ { // update(users::table).set(users::id.eq(42_i32)) // } #[auto_type] fn test_insert1() -> _ { insert_into(users::table).values(users::id.eq(42_i32)) } /*#[auto_type] fn test_insert2() -> _ { users::table .insert_into(users::table) .into_columns(users::all_columns) }*/ #[auto_type] fn test_insert_or_ignore() -> _ { insert_or_ignore_into(users::table).values(users::id.eq(42_i32)) } #[auto_type] fn test_insert_or_replace() -> _ { replace_into(users::table).values(users::id.eq(42_i32)) } #[auto_type] fn test_bare_select() -> _ { select(1_i32.into_sql::()) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_expression_methods() -> _ { let v = 42_i32; users::id .is_not_distinct_from(v) .and(users::id.is_distinct_from(v)) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_text_expression_methods() -> _ { let a: &'static str = "foo"; users::name .ilike(a) .and(users::name.not_ilike(a)) .and(users::name.similar_to(a)) .and(users::name.not_similar_to(a)) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_net_expression_methods() -> _ { // cannot be supported on diesel 2.x as the contains operator for net // is different than the "normal" contains operator // We could probably rename this function to `contains_net` to make it work //pg_extras::net.contains(pg_extras::net) pg_extras::net .contains_or_eq(pg_extras::net) // cannot be supported on diesel 2.x due to similar reasons // as `contains` //.and(pg_extras::net.is_contained_by(pg_extras::net)) .and(pg_extras::net.is_contained_by_or_eq(pg_extras::net)) .and(pg_extras::net.overlaps_with(pg_extras::net)) // `.and()` and `or()` for inet cannot be supported as that name collides // with `BoolExpressionMethods` //.and(pg_extras::net.and(pg_extras::net).contains_or_eq(pg_extras::net)) //.and(pg_extras::net.or(pg_extras::net).contains(pg_extras::net)) .and(pg_extras::net.diff(pg_extras::net).eq(42_i64)) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_array_expression_methods() -> _ { let v = 42_i32; pg_extras::array .overlaps_with(pg_extras::array) .and(pg_extras::array.contains(pg_extras::array)) .and(pg_extras::array.is_contained_by(pg_extras::array)) .and(pg_extras::array.index(v).eq(v)) .and( pg_extras::array .concat(pg_extras::array) .eq(pg_extras::array), ) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_jsonb_expression_methods() -> _ { let s: &'static str = ""; let v: &'static [&'static str] = &[]; pg_extras::jsonb .concat(pg_extras::jsonb) .eq(pg_extras::jsonb) .and(pg_extras::jsonb.has_any_key(v)) .and(pg_extras::jsonb.has_all_keys(v)) .and(pg_extras::jsonb.has_key(s)) .and(pg_extras::jsonb.contains(pg_extras::jsonb)) .and(pg_extras::jsonb.remove(1_i32).eq(pg_extras::jsonb)) .and(pg_extras::jsonb.remove_by_path(v).eq(pg_extras::jsonb)) .and(pg_extras::jsonb.is_contained_by(pg_extras::jsonb)) } // `.contains()` cannot be supported here as // the type level constraints are slightly different // for `Range<>` than for the other types that provide a `contains()` // function. We could likely support it by // renaming the function to `.range_contains()` (or something similar) /* #[cfg(feature = "postgres")] #[auto_type] fn test_pg_range_expression_methods() -> _ { pg_extras::range.contains(42_i32) }*/ #[cfg(feature = "postgres")] #[auto_type] fn test_pg_binary_expression_methods() -> _ { let b: &'static [u8] = &[]; pg_extras::blob .concat(pg_extras::blob) .like(pg_extras::blob) .and(pg_extras::blob.not_like(b)) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_any_json_expression_methods() -> _ { let s: &'static str = ""; let s2: &'static [&'static str] = &[]; pg_extras::jsonb .retrieve_as_object(s) .retrieve_as_text(s) .eq(s) .and( pg_extras::jsonb .retrieve_by_path_as_object(s2) .retrieve_by_path_as_text(s2) .eq(s), ) } #[cfg(feature = "postgres")] #[auto_type] fn test_pg_timestamp_expression_methods() -> _ { let s: &'static str = ""; pg_extras::timestamp.at_time_zone(s) } #[cfg(feature = "sqlite")] #[auto_type] fn test_sqlite_expression_methods() -> _ { users::id.is(42_i32).or(users::id.is_not(42_i32)) } #[auto_type] fn test_aggregate_functions() -> _ { users::table.select(( avg(users::id), count(users::id), count_distinct(users::id), count_star(), max(users::id), min(users::id), sum(users::id), )) } #[auto_type] fn test_normal_functions() -> _ { users::table.select(( date(users::time), exists(posts::table.select(posts::id)), not(users::id.eq(1_i32)), case_when(users::id.eq(1_i32), users::id), case_when(users::id.eq(1_i32), users::id).when(users::id.eq(42_i32), users::id), case_when(users::id.eq(1_i32), users::id) .when(users::id.eq(42_i32), users::id) .otherwise(users::id), case_when(users::id.eq(1_i32), users::id).otherwise(users::id), )) } #[auto_type] fn with_lifetime<'a>(name: &'a str) -> _ { users::table.filter(users::name.eq(name)) } #[auto_type] fn with_type_generics<'a, T>(name: &'a T) -> _ where &'a T: diesel::expression::AsExpression, { users::name.eq(name) } #[auto_type] fn with_const_generics() -> _ { users::id.eq(N) } // #[auto_type] // fn test_sql_fragment() -> _ { // sql("foo") // } // #[auto_type] // fn test_sql_query_1() -> _ { // sql_query("bar") // } // #[auto_type] // fn test_sql_query_2() -> _ { // sql_query("bar").bind::(1) // } diesel_derives-2.2.3/tests/helpers.rs000064400000000000000000000110571046102023000157740ustar 00000000000000use diesel::prelude::*; use diesel::sql_query; #[allow(dead_code)] // that's used in one of the compile tests pub type TestBackend = ::Backend; cfg_if! { if #[cfg(feature = "sqlite")] { pub type TestConnection = SqliteConnection; pub fn connection() -> TestConnection { let mut conn = SqliteConnection::establish(":memory:").unwrap(); sql_query("CREATE TABLE users (\ id INTEGER PRIMARY KEY AUTOINCREMENT, \ name VARCHAR NOT NULL, \ hair_color VARCHAR DEFAULT 'Green', type VARCHAR DEFAULT 'regular')") .execute(&mut conn) .unwrap(); sql_query("CREATE TABLE users_ (\ id INTEGER PRIMARY KEY AUTOINCREMENT, \ name VARCHAR NOT NULL, \ hair_color VARCHAR DEFAULT 'Green', type VARCHAR DEFAULT 'regular')") .execute(&mut conn) .unwrap(); conn } } else if #[cfg(feature = "postgres")] { extern crate dotenvy; pub type TestConnection = PgConnection; pub fn connection() -> TestConnection { let database_url = dotenvy::var("PG_DATABASE_URL") .or_else(|_| dotenvy::var("DATABASE_URL")) .expect("DATABASE_URL must be set in order to run tests"); let mut conn = PgConnection::establish(&database_url).unwrap(); conn.begin_test_transaction().unwrap(); sql_query("DROP TABLE IF EXISTS users CASCADE").execute(&mut conn).unwrap(); sql_query("DROP TABLE IF EXISTS users_ CASCADE").execute(&mut conn).unwrap(); sql_query("CREATE TABLE users (\ id SERIAL PRIMARY KEY, \ name VARCHAR NOT NULL, \ hair_color VARCHAR DEFAULT 'Green', type VARCHAR DEFAULT 'regular')") .execute(&mut conn) .unwrap(); sql_query("CREATE TABLE users_ (\ id SERIAL PRIMARY KEY, \ name VARCHAR NOT NULL, \ hair_color VARCHAR DEFAULT 'Green', type VARCHAR DEFAULT 'regular')") .execute(&mut conn) .unwrap(); conn } } else if #[cfg(feature = "mysql")] { extern crate dotenvy; pub type TestConnection = MysqlConnection; pub fn connection() -> TestConnection { let database_url = dotenvy::var("MYSQL_UNIT_TEST_DATABASE_URL") .or_else(|_| dotenvy::var("DATABASE_URL")) .expect("DATABASE_URL must be set in order to run tests"); let mut conn = MysqlConnection::establish(&database_url).unwrap(); sql_query("DROP TABLE IF EXISTS users CASCADE").execute(&mut conn).unwrap(); sql_query("DROP TABLE IF EXISTS users_ CASCADE").execute(&mut conn).unwrap(); sql_query("CREATE TABLE users (\ id INTEGER PRIMARY KEY AUTO_INCREMENT, \ name TEXT NOT NULL, \ hair_color VARCHAR(255) DEFAULT 'Green', type VARCHAR(255) DEFAULT 'regular')") .execute(&mut conn) .unwrap(); sql_query("CREATE TABLE users_ (\ id INTEGER PRIMARY KEY AUTO_INCREMENT, \ name TEXT NOT NULL, \ hair_color VARCHAR(255) DEFAULT 'Green', type VARCHAR(255) DEFAULT 'regular')") .execute(&mut conn) .unwrap(); conn.begin_test_transaction().unwrap(); conn } } else { compile_error!( "At least one backend must be used to test this crate.\n \ Pass argument `--features \"\"` with one or more of the following backends, \ 'mysql', 'postgres', or 'sqlite'. \n\n \ ex. cargo test --features \"mysql postgres sqlite\"\n" ); } } pub fn connection_with_sean_and_tess_in_users_table() -> TestConnection { use crate::schema::users::dsl::*; let mut connection = connection(); diesel::insert_into(users) .values(&vec![ ( id.eq(1), name.eq("Sean"), hair_color.eq("black"), r#type.eq("regular"), ), ( id.eq(2), name.eq("Tess"), hair_color.eq("brown"), r#type.eq("admin"), ), ]) .execute(&mut connection) .unwrap(); connection } diesel_derives-2.2.3/tests/identifiable.rs000064400000000000000000000056221046102023000167520ustar 00000000000000use diesel::associations::Identifiable; table! { foos { id -> Integer, } } table! { bars { id -> VarChar, } } #[test] fn derive_identifiable_on_simple_struct() { #[derive(Identifiable)] struct Foo { id: i32, #[allow(dead_code)] foo: i32, } let foo1 = Foo { id: 1, foo: 2 }; let foo2 = Foo { id: 2, foo: 3 }; assert_eq!(&1, foo1.id()); assert_eq!(&2, foo2.id()); } #[test] fn derive_identifiable_on_tuple_struct() { #[derive(Identifiable)] struct Foo( #[diesel(column_name = id)] i32, #[allow(dead_code)] #[diesel(column_name = lol)] i32, ); let foo1 = Foo(1, 2); let foo2 = Foo(2, 3); assert_eq!(&1, foo1.id()); assert_eq!(&2, foo2.id()); } #[test] fn derive_identifiable_when_id_is_not_first_field() { #[derive(Identifiable)] struct Foo { #[allow(dead_code)] foo: i32, id: i32, } let foo1 = Foo { id: 1, foo: 2 }; let foo2 = Foo { id: 2, foo: 3 }; assert_eq!(&1, foo1.id()); assert_eq!(&2, foo2.id()); } #[test] fn derive_identifiable_on_struct_with_non_integer_pk() { #[derive(Identifiable)] #[diesel(table_name = bars)] struct Foo { id: &'static str, #[allow(dead_code)] foo: i32, } let foo1 = Foo { id: "hi", foo: 2 }; let foo2 = Foo { id: "there", foo: 3, }; assert_eq!(&"hi", foo1.id()); assert_eq!(&"there", foo2.id()); } #[test] fn derive_identifiable_on_struct_with_lifetime() { #[derive(Identifiable)] #[diesel(table_name = bars)] struct Foo<'a> { id: &'a str, #[allow(dead_code)] foo: i32, } let foo1 = Foo { id: "hi", foo: 2 }; let foo2 = Foo { id: "there", foo: 3, }; assert_eq!(&"hi", foo1.id()); assert_eq!(&"there", foo2.id()); } #[test] fn derive_identifiable_with_non_standard_pk() { #[allow(dead_code)] #[derive(Identifiable)] #[diesel(table_name = bars)] #[diesel(primary_key(foo_id))] struct Foo<'a> { id: i32, foo_id: &'a str, foo: i32, } let foo1 = Foo { id: 1, foo_id: "hi", foo: 2, }; let foo2 = Foo { id: 2, foo_id: "there", foo: 3, }; assert_eq!(&"hi", foo1.id()); assert_eq!(&"there", foo2.id()); } #[test] fn derive_identifiable_with_composite_pk() { #[allow(dead_code)] #[derive(Identifiable)] #[diesel(table_name = bars)] #[diesel(primary_key(foo_id, bar_id))] struct Foo { id: i32, foo_id: i32, bar_id: i32, foo: i32, } let foo1 = Foo { id: 1, foo_id: 2, bar_id: 3, foo: 4, }; let foo2 = Foo { id: 5, foo_id: 6, bar_id: 7, foo: 8, }; assert_eq!((&2, &3), foo1.id()); assert_eq!((&6, &7), foo2.id()); } diesel_derives-2.2.3/tests/insertable.rs000064400000000000000000000352011046102023000164570ustar 00000000000000use crate::helpers::*; use crate::schema::*; use diesel::serialize::Output; use diesel::*; #[test] fn simple_struct_definition() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser { name: String, hair_color: String, r#type: String, } let conn = &mut connection(); let new_user = NewUser { name: "Sean".into(), hair_color: "Black".into(), r#type: "regular".into(), }; insert_into(users::table) .values(new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn with_implicit_table_name() { #[derive(Insertable)] struct User { name: String, hair_color: String, r#type: String, } let conn = &mut connection(); let new_user = User { name: "Sean".into(), hair_color: "Black".into(), r#type: "regular".into(), }; insert_into(users::table) .values(new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn with_path_in_table_name() { #[derive(Insertable)] #[diesel(table_name = crate::schema::users)] struct NewUser { name: String, hair_color: String, r#type: String, } let conn = &mut connection(); let new_user = NewUser { name: "Sean".into(), hair_color: "Black".into(), r#type: "regular".into(), }; insert_into(users::table) .values(new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn simple_reference_definition() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser { name: String, hair_color: String, r#type: String, } let conn = &mut connection(); let new_user = NewUser { name: "Sean".into(), hair_color: "Black".into(), r#type: "regular".into(), }; insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn multiple_tables() { #[derive(Clone, Insertable)] #[diesel(table_name = users)] #[diesel(table_name = users_)] struct NewUser { name: String, hair_color: String, r#type: String, } let conn = &mut connection(); let new_user = NewUser { name: "Sean".into(), hair_color: "Black".into(), r#type: "regular".into(), }; insert_into(users::table) .values(new_user.clone()) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected.clone()), saved); insert_into(users_::table) .values(new_user) .execute(conn) .unwrap(); let saved = users_::table .select((users_::name, users_::hair_color, users_::r#type)) .load::<(String, Option, Option)>(conn); assert_eq!(Ok(expected), saved); } macro_rules! test_struct_definition { ($test_name:ident, $struct_def:item) => { #[test] fn $test_name() { #[derive(Insertable)] #[diesel(table_name = users)] $struct_def let conn = &mut connection(); let new_user = NewUser { name: "Sean".into(), hair_color: None, r#type: Some("regular".into()) }; insert_into(users::table).values(&new_user).execute(conn).unwrap(); let saved = users::table.select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![("Sean".to_string(), Some("Green".to_string()), Some("regular".to_string()))]; assert_eq!(Ok(expected), saved); } } } test_struct_definition! { struct_with_option_field, struct NewUser { name: String, hair_color: Option, r#type: Option, } } test_struct_definition! { pub_struct_definition, pub struct NewUser { name: String, hair_color: Option, r#type: Option, } } test_struct_definition! { struct_with_pub_field, pub struct NewUser { pub name: String, hair_color: Option, r#type: Option, } } test_struct_definition! { struct_with_pub_option_field, pub struct NewUser { name: String, pub hair_color: Option, r#type: Option, } } test_struct_definition! { named_struct_with_borrowed_body, struct NewUser<'a> { name: &'a str, hair_color: Option<&'a str>, r#type: Option<&'a str>, } } #[test] fn named_struct_with_renamed_field() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser { #[diesel(column_name = name)] my_name: String, hair_color: String, r#type: String, } let conn = &mut connection(); let new_user = NewUser { my_name: "Sean".into(), hair_color: "Black".into(), r#type: "regular".into(), }; insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn named_struct_with_renamed_option_field() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser { #[diesel(column_name = name)] my_name: String, #[diesel(column_name = hair_color)] my_hair_color: Option, #[diesel(column_name = "type")] my_type: String, } let conn = &mut connection(); let new_user = NewUser { my_name: "Sean".into(), my_hair_color: None, my_type: "regular".into(), }; insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Green".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn named_struct_with_renamed_option_field_raw_type() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser { #[diesel(column_name = name)] my_name: String, #[diesel(column_name = hair_color)] my_hair_color: Option, #[diesel(column_name = r#type)] my_type: String, } let conn = &mut connection(); let new_user = NewUser { my_name: "Sean".into(), my_hair_color: None, my_type: "regular".into(), }; insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Green".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn tuple_struct() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser<'a>( #[diesel(column_name = name)] &'a str, #[diesel(column_name = hair_color)] Option<&'a str>, #[diesel(column_name = "type")] Option<&'a str>, ); let conn = &mut connection(); let new_user = NewUser("Sean", None, Some("regular")); insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Green".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn tuple_struct_raw_type() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser<'a>( #[diesel(column_name = name)] &'a str, #[diesel(column_name = hair_color)] Option<&'a str>, #[diesel(column_name = r#type)] Option<&'a str>, ); let conn = &mut connection(); let new_user = NewUser("Sean", None, Some("regular")); insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load::<(String, Option, Option)>(conn); let expected = vec![( "Sean".to_string(), Some("Green".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn named_struct_with_unusual_reference_type() { #[derive(Insertable)] #[diesel(table_name = users)] struct NewUser<'a> { name: &'a String, hair_color: Option<&'a String>, r#type: Option<&'a String>, } let conn = &mut connection(); let sean = "Sean".to_string(); let black = "Black".to_string(); let regular = "regular".to_string(); let new_user = NewUser { name: &sean, hair_color: Some(&black), r#type: Some(®ular), }; insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::name, users::hair_color, users::r#type)) .load(conn); let expected = vec![(sean.clone(), Some(black.clone()), Some(regular.clone()))]; assert_eq!(Ok(expected), saved); } #[test] #[cfg(all(feature = "postgres", not(feature = "sqlite")))] fn insertable_with_slice_of_borrowed() { table! { posts { id -> Serial, tags -> Array, } } #[derive(Insertable)] #[diesel(table_name = posts)] struct NewPost<'a> { tags: &'a [&'a str], } let conn = &mut connection(); sql_query("DROP TABLE IF EXISTS posts CASCADE") .execute(conn) .unwrap(); sql_query("CREATE TABLE posts (id SERIAL PRIMARY KEY, tags TEXT[] NOT NULL)") .execute(conn) .unwrap(); let new_post = NewPost { tags: &["hi", "there"], }; insert_into(posts::table) .values(&new_post) .execute(conn) .unwrap(); let saved = posts::table.select(posts::tags).load::>(conn); let expected = vec![vec![String::from("hi"), String::from("there")]]; assert_eq!(Ok(expected), saved); } #[test] fn embedded_struct() { #[derive(Insertable)] #[diesel(table_name = users)] struct UserAttributes<'a> { name: &'a str, hair_color: &'a str, r#type: &'a str, } #[derive(Insertable)] struct User<'a> { id: i32, #[diesel(embed)] attributes: UserAttributes<'a>, } let conn = &mut connection(); let new_user = User { id: 1, attributes: UserAttributes { name: "Sean", hair_color: "Black", r#type: "regular", }, }; insert_into(users::table) .values(&new_user) .execute(conn) .unwrap(); let saved = users::table.load::<(i32, String, Option, Option)>(conn); let expected = vec![( 1, "Sean".to_string(), Some("Black".to_string()), Some("regular".to_string()), )]; assert_eq!(Ok(expected), saved); } #[test] fn serialize_as_with_option() { use diesel::backend::Backend; use diesel::serialize::ToSql; use diesel::sql_types::Text; struct OptionalString(Option<&'static str>); impl From for Option<&'static str> { fn from(s: OptionalString) -> Self { s.0 } } struct OtherString(&'static str); impl From> for MyString { fn from(value: Option) -> Self { MyString(value.unwrap().0.to_owned()) } } #[derive(Debug, AsExpression)] #[diesel(sql_type = Text)] struct MyString(String); impl ToSql for MyString where String: ToSql, DB: Backend, { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { >::to_sql(&self.0, out) } } #[derive(Insertable)] struct User { id: i32, #[diesel(serialize_as = MyString)] name: Option, #[diesel(serialize_as = Option<&'static str>)] hair_color: OptionalString, } let conn = &mut connection(); let new_user = User { id: 1, name: Some(OtherString("Sean")), hair_color: OptionalString(Some("Black")), }; insert_into(users::table) .values(new_user) .execute(conn) .unwrap(); let saved = users::table .select((users::id, users::name, users::hair_color)) .load::<(i32, String, Option)>(conn); let expected = vec![(1, "Sean".to_string(), Some("Black".to_string()))]; assert_eq!(Ok(expected), saved); } diesel_derives-2.2.3/tests/multiconnection.rs000064400000000000000000000332041046102023000175420ustar 00000000000000use crate::schema::users; use diesel::connection::Instrumentation; use diesel::prelude::*; #[derive(diesel::MultiConnection)] pub enum InferConnection { #[cfg(feature = "postgres")] Pg(PgConnection), #[cfg(feature = "sqlite")] Sqlite(SqliteConnection), #[cfg(feature = "mysql")] Mysql(MysqlConnection), } #[derive(Queryable, Selectable, Insertable, AsChangeset)] pub struct User { pub id: i32, pub name: String, } #[test] fn check_queries_work() { let mut conn = establish_connection(); // checks that this trait is implemented conn.set_instrumentation(None::>); let _ = conn.instrumentation(); diesel::sql_query( "CREATE TEMPORARY TABLE users(\ id INTEGER NOT NULL PRIMARY KEY, \ name TEXT NOT NULL)", ) .execute(&mut conn) .unwrap(); conn.begin_test_transaction().unwrap(); // these are mostly compile pass tests // simple query let _ = users::table .select((users::id, users::name)) .load::(&mut conn) .unwrap(); // with bind let _ = users::table .select((users::id, users::name)) .find(42) .load::(&mut conn) .unwrap(); // simple boxed query let _ = users::table .into_boxed() .select((users::id, users::name)) .load::(&mut conn) .unwrap(); // with bind let _ = users::table .into_boxed() .select((users::id, users::name)) .filter(users::id.eq(42)) .load::(&mut conn) .unwrap(); // as_select let _ = users::table .select(User::as_select()) .load(&mut conn) .unwrap(); // boxable expression let b = Box::new(users::name.eq("John")) as Box< dyn BoxableExpression< users::table, self::multi_connection_impl::MultiBackend, SqlType = _, >, >; let _ = users::table .filter(b) .select(users::id) .load::(&mut conn) .unwrap(); // insert diesel::insert_into(users::table) .values((users::id.eq(42), users::name.eq("John"))) .execute(&mut conn) .unwrap(); diesel::insert_into(users::table) .values(User { id: 43, name: "Jane".into(), }) .execute(&mut conn) .unwrap(); // update diesel::update(users::table) .set(users::name.eq("John")) .execute(&mut conn) .unwrap(); diesel::update(users::table.find(42)) .set(User { id: 42, name: "Jane".into(), }) .execute(&mut conn) .unwrap(); // delete diesel::delete(users::table).execute(&mut conn).unwrap(); } fn establish_connection() -> InferConnection { let database_url = if cfg!(feature = "mysql") { dotenvy::var("MYSQL_UNIT_TEST_DATABASE_URL").or_else(|_| dotenvy::var("DATABASE_URL")) } else if cfg!(feature = "postgres") { dotenvy::var("PG_DATABASE_URL").or_else(|_| dotenvy::var("DATABASE_URL")) } else { Ok(dotenvy::var("DATABASE_URL").unwrap_or_else(|_| ":memory:".to_owned())) }; let database_url = database_url.expect("DATABASE_URL must be set in order to run tests"); InferConnection::establish(&database_url).unwrap() } #[cfg(all(feature = "chrono", feature = "time"))] fn make_test_table(conn: &mut InferConnection) { match conn { #[cfg(feature = "postgres")] InferConnection::Pg(ref mut conn) => { diesel::sql_query( "CREATE TEMPORARY TABLE type_test( \ small_int SMALLINT,\ integer INTEGER,\ big_int BIGINT,\ float FLOAT4,\ double FLOAT8,\ string TEXT,\ blob BYTEA,\ timestamp1 TIMESTAMP,\ date1 DATE,\ time1 TIME,\ timestamp2 TIMESTAMP,\ date2 DATE,\ time2 TIME )", ) .execute(conn) .unwrap(); } #[cfg(feature = "sqlite")] InferConnection::Sqlite(ref mut conn) => { diesel::sql_query( "CREATE TEMPORARY TABLE type_test( \ small_int SMALLINT,\ integer INTEGER,\ big_int BIGINT,\ float FLOAT4,\ double FLOAT8,\ string TEXT,\ blob BLOB,\ timestamp1 TIMESTAMP,\ date1 DATE,\ time1 TIME,\ timestamp2 TIMESTAMP,\ date2 DATE,\ time2 TIME )", ) .execute(conn) .unwrap(); } #[cfg(feature = "mysql")] InferConnection::Mysql(ref mut conn) => { diesel::sql_query( "CREATE TEMPORARY TABLE type_test( \ `small_int` SMALLINT,\ `integer` INT,\ `big_int` BIGINT,\ `float` FLOAT,\ `double` DOUBLE,\ `string` TEXT,\ `blob` BLOB,\ `timestamp1` DATETIME, `date1` DATE,\ `time1` TIME,\ `timestamp2` DATETIME, `date2` DATE,\ `time2` TIME )", ) .execute(conn) .unwrap(); } } } #[cfg(all(feature = "chrono", feature = "time"))] #[test] fn type_checks() { use diesel::internal::derives::multiconnection::{chrono, time}; table! { type_test(integer) { small_int -> SmallInt, integer -> Integer, big_int -> BigInt, float -> Float, double -> Double, string -> Text, blob -> Blob, timestamp1 -> Timestamp, time1 -> Time, date1 -> Date, timestamp2 -> Timestamp, time2 -> Time, date2 -> Date, } } let mut conn = establish_connection(); make_test_table(&mut conn); conn.begin_test_transaction().unwrap(); let small_int = 1_i16; let integer = 2_i32; let big_int = 3_i64; let float = 4.0_f32; let double = 5.0_f64; let string = String::from("bar"); let blob = vec![1_u8, 2, 3, 4]; let date1 = chrono::NaiveDate::from_ymd_opt(2023, 08, 17).unwrap(); let time1 = chrono::NaiveTime::from_hms_opt(07, 50, 12).unwrap(); let timestamp1 = chrono::NaiveDateTime::new(date1, time1); let time2 = time::Time::from_hms(12, 22, 23).unwrap(); let date2 = time::Date::from_calendar_date(2023, time::Month::August, 26).unwrap(); let timestamp2 = time::PrimitiveDateTime::new(date2, time2); diesel::insert_into(type_test::table) .values(( type_test::small_int.eq(small_int), type_test::integer.eq(integer), type_test::big_int.eq(big_int), type_test::float.eq(float), type_test::double.eq(double), type_test::string.eq(&string), type_test::blob.eq(&blob), type_test::timestamp1.eq(timestamp1), type_test::time1.eq(time1), type_test::date1.eq(date1), type_test::timestamp2.eq(timestamp2), type_test::time2.eq(time2), type_test::date2.eq(date2), )) .execute(&mut conn) .unwrap(); let result = type_test::table .get_result::<( i16, //0 i32, //1 i64, //2 f32, //3 f64, //4 String, //5 Vec, //6 chrono::NaiveDateTime, //7 chrono::NaiveTime, //8 chrono::NaiveDate, //9 time::PrimitiveDateTime, //10 time::Time, //11 time::Date, //12 )>(&mut conn) .unwrap(); assert_eq!(small_int, result.0); assert_eq!(integer, result.1); assert_eq!(big_int, result.2); assert_eq!(float, result.3); assert_eq!(double, result.4); assert_eq!(string, result.5); assert_eq!(blob, result.6); assert_eq!(timestamp1, result.7); assert_eq!(time1, result.8); assert_eq!(date1, result.9); assert_eq!(timestamp2, result.10); assert_eq!(time2, result.11); assert_eq!(date2, result.12); } #[cfg(all(feature = "chrono", feature = "time"))] #[test] fn nullable_type_checks() { use diesel::internal::derives::multiconnection::{chrono, time}; table! { type_test(integer) { small_int -> Nullable, integer -> Nullable, big_int -> Nullable, float -> Nullable, double -> Nullable, string -> Nullable, blob -> Nullable, timestamp1 -> Nullable, time1 -> Nullable