structopt-derive-0.4.18/.cargo_vcs_info.json0000644000000001120000000000100144430ustar { "git": { "sha1": "da1fff81aded1c239ffcbd0a27ccdc7f28f74ff2" } } structopt-derive-0.4.18/Cargo.toml0000644000000022660000000000100124550ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "structopt-derive" version = "0.4.18" authors = ["Guillaume Pinot "] description = "Parse command line argument by defining a struct, derive crate." documentation = "https://docs.rs/structopt-derive" keywords = ["clap", "cli", "derive", "docopt"] categories = ["command-line-interface"] license = "Apache-2.0/MIT" repository = "https://github.com/TeXitoi/structopt" [lib] proc-macro = true [dependencies.heck] version = "0.3.0" [dependencies.proc-macro-error] version = "1.0.0" [dependencies.proc-macro2] version = "1" [dependencies.quote] version = "1" [dependencies.syn] version = "1" features = ["full"] [features] paw = [] [badges.travis-ci] repository = "TeXitoi/structopt" structopt-derive-0.4.18/Cargo.toml.orig000064400000000000000000000012220072674642500161550ustar 00000000000000[package] name = "structopt-derive" version = "0.4.18" edition = "2018" authors = ["Guillaume Pinot "] description = "Parse command line argument by defining a struct, derive crate." documentation = "https://docs.rs/structopt-derive" repository = "https://github.com/TeXitoi/structopt" keywords = ["clap", "cli", "derive", "docopt"] categories = ["command-line-interface"] license = "Apache-2.0/MIT" [badges] travis-ci = { repository = "TeXitoi/structopt" } [dependencies] syn = { version = "1", features = ["full"] } quote = "1" proc-macro2 = "1" heck = "0.3.0" proc-macro-error = "1.0.0" [features] paw = [] [lib] proc-macro = true structopt-derive-0.4.18/LICENSE-APACHE000064400000000000000000000261350072674642500152240ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. structopt-derive-0.4.18/LICENSE-MIT000064400000000000000000000021200072674642500147200ustar 00000000000000MIT License Copyright (c) 2018 Guillaume Pinot (@TeXitoi) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. structopt-derive-0.4.18/src/attrs.rs000064400000000000000000000542300072674642500155670ustar 00000000000000// Copyright 2018 Guillaume Pinot (@TeXitoi) // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. use crate::doc_comments::process_doc_comment; use crate::{parse::*, spanned::Sp, ty::Ty}; use std::env; use heck::{CamelCase, KebabCase, MixedCase, ShoutySnakeCase, SnakeCase}; use proc_macro2::{Span, TokenStream}; use proc_macro_error::abort; use quote::{quote, quote_spanned, ToTokens}; use syn::{ self, ext::IdentExt, spanned::Spanned, Attribute, Expr, Ident, LitStr, MetaNameValue, Type, }; #[derive(Clone)] pub enum Kind { Arg(Sp), Subcommand(Sp), ExternalSubcommand, Flatten, Skip(Option), } #[derive(Clone)] pub struct Method { name: Ident, args: TokenStream, } #[derive(Clone)] pub struct Parser { pub kind: Sp, pub func: TokenStream, } #[derive(Debug, PartialEq, Clone)] pub enum ParserKind { FromStr, TryFromStr, FromOsStr, TryFromOsStr, FromOccurrences, FromFlag, } /// Defines the casing for the attributes long representation. #[derive(Copy, Clone, Debug, PartialEq)] pub enum CasingStyle { /// Indicate word boundaries with uppercase letter, excluding the first word. Camel, /// Keep all letters lowercase and indicate word boundaries with hyphens. Kebab, /// Indicate word boundaries with uppercase letter, including the first word. Pascal, /// Keep all letters uppercase and indicate word boundaries with underscores. ScreamingSnake, /// Keep all letters lowercase and indicate word boundaries with underscores. Snake, /// Use the original attribute name defined in the code. Verbatim, /// Keep all letters lowercase and remove word boundaries. Lower, /// Keep all letters uppercase and remove word boundaries. Upper, } #[derive(Clone)] pub enum Name { Derived(Ident), Assigned(TokenStream), } #[derive(Clone)] pub struct Attrs { name: Name, casing: Sp, env_casing: Sp, ty: Option, doc_comment: Vec, methods: Vec, parser: Sp, author: Option, about: Option, version: Option, no_version: Option, verbatim_doc_comment: Option, has_custom_parser: bool, kind: Sp, } impl Method { pub fn new(name: Ident, args: TokenStream) -> Self { Method { name, args } } fn from_lit_or_env(ident: Ident, lit: Option, env_var: &str) -> Self { let mut lit = match lit { Some(lit) => lit, None => match env::var(env_var) { Ok(val) => LitStr::new(&val, ident.span()), Err(_) => { abort!(ident, "cannot derive `{}` from Cargo.toml", ident; note = "`{}` environment variable is not set", env_var; help = "use `{} = \"...\"` to set {} manually", ident, ident; ); } }, }; if ident == "author" { let edited = process_author_str(&lit.value()); lit = LitStr::new(&edited, lit.span()); } Method::new(ident, quote!(#lit)) } } impl ToTokens for Method { fn to_tokens(&self, ts: &mut TokenStream) { let Method { ref name, ref args } = self; quote!(.#name(#args)).to_tokens(ts); } } impl Parser { fn default_spanned(span: Span) -> Sp { let kind = Sp::new(ParserKind::TryFromStr, span); let func = quote_spanned!(span=> ::std::str::FromStr::from_str); Sp::new(Parser { kind, func }, span) } fn from_spec(parse_ident: Ident, spec: ParserSpec) -> Sp { use ParserKind::*; let kind = match &*spec.kind.to_string() { "from_str" => FromStr, "try_from_str" => TryFromStr, "from_os_str" => FromOsStr, "try_from_os_str" => TryFromOsStr, "from_occurrences" => FromOccurrences, "from_flag" => FromFlag, s => abort!(spec.kind, "unsupported parser `{}`", s), }; let func = match spec.parse_func { None => match kind { FromStr | FromOsStr => { quote_spanned!(spec.kind.span()=> ::std::convert::From::from) } TryFromStr => quote_spanned!(spec.kind.span()=> ::std::str::FromStr::from_str), TryFromOsStr => abort!( spec.kind, "you must set parser for `try_from_os_str` explicitly" ), FromOccurrences => quote_spanned!(spec.kind.span()=> { |v| v as _ }), FromFlag => quote_spanned!(spec.kind.span()=> ::std::convert::From::from), }, Some(func) => match func { syn::Expr::Path(_) => quote!(#func), _ => abort!(func, "`parse` argument must be a function path"), }, }; let kind = Sp::new(kind, spec.kind.span()); let parser = Parser { kind, func }; Sp::new(parser, parse_ident.span()) } } impl CasingStyle { fn from_lit(name: LitStr) -> Sp { use CasingStyle::*; let normalized = name.value().to_camel_case().to_lowercase(); let cs = |kind| Sp::new(kind, name.span()); match normalized.as_ref() { "camel" | "camelcase" => cs(Camel), "kebab" | "kebabcase" => cs(Kebab), "pascal" | "pascalcase" => cs(Pascal), "screamingsnake" | "screamingsnakecase" => cs(ScreamingSnake), "snake" | "snakecase" => cs(Snake), "verbatim" | "verbatimcase" => cs(Verbatim), "lower" | "lowercase" => cs(Lower), "upper" | "uppercase" => cs(Upper), s => abort!(name, "unsupported casing: `{}`", s), } } } impl Name { pub fn translate(self, style: CasingStyle) -> TokenStream { use CasingStyle::*; match self { Name::Assigned(tokens) => tokens, Name::Derived(ident) => { let s = ident.unraw().to_string(); let s = match style { Pascal => s.to_camel_case(), Kebab => s.to_kebab_case(), Camel => s.to_mixed_case(), ScreamingSnake => s.to_shouty_snake_case(), Snake => s.to_snake_case(), Verbatim => s, Lower => s.to_snake_case().replace("_", ""), Upper => s.to_shouty_snake_case().replace("_", ""), }; quote_spanned!(ident.span()=> #s) } } } } impl Attrs { fn new( default_span: Span, name: Name, parent_attrs: Option<&Attrs>, ty: Option, casing: Sp, env_casing: Sp, ) -> Self { let no_version = parent_attrs .as_ref() .map(|attrs| attrs.no_version.clone()) .unwrap_or(None); Self { name, ty, casing, env_casing, doc_comment: vec![], methods: vec![], parser: Parser::default_spanned(default_span), about: None, author: None, version: None, no_version, verbatim_doc_comment: None, has_custom_parser: false, kind: Sp::new(Kind::Arg(Sp::new(Ty::Other, default_span)), default_span), } } fn push_method(&mut self, name: Ident, arg: impl ToTokens) { if name == "name" { self.name = Name::Assigned(quote!(#arg)); } else if name == "version" { self.version = Some(Method::new(name, quote!(#arg))); } else { self.methods.push(Method::new(name, quote!(#arg))) } } fn push_attrs(&mut self, attrs: &[Attribute]) { use crate::parse::StructOptAttr::*; for attr in parse_structopt_attributes(attrs) { match attr { Short(ident) | Long(ident) => { self.push_method(ident, self.name.clone().translate(*self.casing)); } Env(ident) => { self.push_method(ident, self.name.clone().translate(*self.env_casing)); } Subcommand(ident) => { let ty = Sp::call_site(Ty::Other); let kind = Sp::new(Kind::Subcommand(ty), ident.span()); self.set_kind(kind); } ExternalSubcommand(ident) => { self.kind = Sp::new(Kind::ExternalSubcommand, ident.span()); } Flatten(ident) => { let kind = Sp::new(Kind::Flatten, ident.span()); self.set_kind(kind); } Skip(ident, expr) => { let kind = Sp::new(Kind::Skip(expr), ident.span()); self.set_kind(kind); } NoVersion(ident) => self.no_version = Some(ident), VerbatimDocComment(ident) => self.verbatim_doc_comment = Some(ident), DefaultValue(ident, lit) => { let val = if let Some(lit) = lit { quote!(#lit) } else { let ty = if let Some(ty) = self.ty.as_ref() { ty } else { abort!( ident, "#[structopt(default_value)] (without an argument) can be used \ only on field level"; note = "see \ https://docs.rs/structopt/0.3.5/structopt/#magical-methods") }; quote_spanned!(ident.span()=> { ::structopt::lazy_static::lazy_static! { static ref DEFAULT_VALUE: &'static str = { let val = <#ty as ::std::default::Default>::default(); let s = ::std::string::ToString::to_string(&val); ::std::boxed::Box::leak(s.into_boxed_str()) }; } *DEFAULT_VALUE }) }; self.methods.push(Method::new(ident, val)); } About(ident, about) => { self.about = Some(Method::from_lit_or_env( ident, about, "CARGO_PKG_DESCRIPTION", )); } Author(ident, author) => { self.author = Some(Method::from_lit_or_env(ident, author, "CARGO_PKG_AUTHORS")); } Version(ident, version) => { self.push_method(ident, version); } NameLitStr(name, lit) => { self.push_method(name, lit); } NameExpr(name, expr) => { self.push_method(name, expr); } MethodCall(name, args) => self.push_method(name, quote!(#(#args),*)), RenameAll(_, casing_lit) => { self.casing = CasingStyle::from_lit(casing_lit); } RenameAllEnv(_, casing_lit) => { self.env_casing = CasingStyle::from_lit(casing_lit); } Parse(ident, spec) => { self.has_custom_parser = true; self.parser = Parser::from_spec(ident, spec); } } } } fn push_doc_comment(&mut self, attrs: &[Attribute], name: &str) { use crate::Lit::*; use crate::Meta::*; let comment_parts: Vec<_> = attrs .iter() .filter(|attr| attr.path.is_ident("doc")) .filter_map(|attr| { if let Ok(NameValue(MetaNameValue { lit: Str(s), .. })) = attr.parse_meta() { Some(s.value()) } else { // non #[doc = "..."] attributes are not our concern // we leave them for rustc to handle None } }) .collect(); self.doc_comment = process_doc_comment(comment_parts, name, self.verbatim_doc_comment.is_none()); } pub fn from_struct( span: Span, attrs: &[Attribute], name: Name, parent_attrs: Option<&Attrs>, argument_casing: Sp, env_casing: Sp, allow_skip: bool, ) -> Self { let mut res = Self::new(span, name, parent_attrs, None, argument_casing, env_casing); res.push_attrs(attrs); res.push_doc_comment(attrs, "about"); if res.has_custom_parser { abort!( res.parser.span(), "`parse` attribute is only allowed on fields" ); } match &*res.kind { Kind::Subcommand(_) => abort!(res.kind.span(), "subcommand is only allowed on fields"), Kind::Skip(_) if !allow_skip => { abort!(res.kind.span(), "skip is only allowed on fields") } Kind::Arg(_) | Kind::ExternalSubcommand | Kind::Flatten | Kind::Skip(_) => res, } } pub fn from_field( field: &syn::Field, parent_attrs: Option<&Attrs>, struct_casing: Sp, env_casing: Sp, ) -> Self { let name = field.ident.clone().unwrap(); let mut res = Self::new( field.span(), Name::Derived(name), parent_attrs, Some(field.ty.clone()), struct_casing, env_casing, ); res.push_attrs(&field.attrs); res.push_doc_comment(&field.attrs, "help"); match &*res.kind { Kind::Flatten => { if res.has_custom_parser { abort!( res.parser.span(), "parse attribute is not allowed for flattened entry" ); } if res.has_explicit_methods() { abort!( res.kind.span(), "methods are not allowed for flattened entry" ); } if res.has_doc_methods() { res.doc_comment = vec![]; } } Kind::ExternalSubcommand => {} Kind::Subcommand(_) => { if res.has_custom_parser { abort!( res.parser.span(), "parse attribute is not allowed for subcommand" ); } if res.has_explicit_methods() { abort!( res.kind.span(), "methods in attributes are not allowed for subcommand" ); } let ty = Ty::from_syn_ty(&field.ty); match *ty { Ty::OptionOption => { abort!( field.ty, "Option> type is not allowed for subcommand" ); } Ty::OptionVec => { abort!( field.ty, "Option> type is not allowed for subcommand" ); } _ => (), } res.kind = Sp::new(Kind::Subcommand(ty), res.kind.span()); } Kind::Skip(_) => { if res.has_explicit_methods() { abort!( res.kind.span(), "methods are not allowed for skipped fields" ); } } Kind::Arg(orig_ty) => { let mut ty = Ty::from_syn_ty(&field.ty); if res.has_custom_parser { match *ty { Ty::Option | Ty::Vec | Ty::OptionVec => (), _ => ty = Sp::new(Ty::Other, ty.span()), } } match *ty { Ty::Bool => { if res.is_positional() && !res.has_custom_parser { abort!(field.ty, "`bool` cannot be used as positional parameter with default parser"; help = "if you want to create a flag add `long` or `short`"; help = "If you really want a boolean parameter \ add an explicit parser, for example `parse(try_from_str)`"; note = "see also https://github.com/TeXitoi/structopt/tree/master/examples/true_or_false.rs"; ) } if let Some(m) = res.find_method("default_value") { abort!(m.name, "default_value is meaningless for bool") } if let Some(m) = res.find_method("required") { abort!(m.name, "required is meaningless for bool") } } Ty::Option => { if let Some(m) = res.find_method("default_value") { abort!(m.name, "default_value is meaningless for Option") } if let Some(m) = res.find_method("required") { abort!(m.name, "required is meaningless for Option") } } Ty::OptionOption => { if res.is_positional() { abort!( field.ty, "Option> type is meaningless for positional argument" ) } } Ty::OptionVec => { if res.is_positional() { abort!( field.ty, "Option> type is meaningless for positional argument" ) } } _ => (), } res.kind = Sp::new(Kind::Arg(ty), orig_ty.span()); } } res } fn set_kind(&mut self, kind: Sp) { if let Kind::Arg(_) = *self.kind { self.kind = kind; } else { abort!( kind.span(), "subcommand, flatten and skip cannot be used together" ); } } pub fn has_method(&self, name: &str) -> bool { self.find_method(name).is_some() } pub fn find_method(&self, name: &str) -> Option<&Method> { self.methods.iter().find(|m| m.name == name) } /// generate methods from attributes on top of struct or enum pub fn top_level_methods(&self) -> TokenStream { let author = &self.author; let about = &self.about; let methods = &self.methods; let doc_comment = &self.doc_comment; quote!( #(#doc_comment)* #author #about #(#methods)* ) } /// generate methods on top of a field pub fn field_methods(&self) -> TokenStream { let methods = &self.methods; let doc_comment = &self.doc_comment; quote!( #(#doc_comment)* #(#methods)* ) } pub fn version(&self) -> TokenStream { match (&self.no_version, &self.version) { (None, Some(m)) => m.to_token_stream(), (None, None) => std::env::var("CARGO_PKG_VERSION") .map(|version| quote!( .version(#version) )) .unwrap_or_default(), _ => quote!(), } } pub fn cased_name(&self) -> TokenStream { self.name.clone().translate(*self.casing) } pub fn parser(&self) -> &Sp { &self.parser } pub fn kind(&self) -> Sp { self.kind.clone() } pub fn casing(&self) -> Sp { self.casing.clone() } pub fn env_casing(&self) -> Sp { self.env_casing.clone() } pub fn is_positional(&self) -> bool { self.methods .iter() .all(|m| m.name != "long" && m.name != "short") } pub fn has_explicit_methods(&self) -> bool { self.methods .iter() .any(|m| m.name != "help" && m.name != "long_help") } pub fn has_doc_methods(&self) -> bool { !self.doc_comment.is_empty() || self.methods.iter().any(|m| { m.name == "help" || m.name == "long_help" || m.name == "about" || m.name == "long_about" }) } } /// replace all `:` with `, ` when not inside the `<>` /// /// `"author1:author2:author3" => "author1, author2, author3"` /// `"author1 :author2" => "author1 , author2" fn process_author_str(author: &str) -> String { let mut res = String::with_capacity(author.len()); let mut inside_angle_braces = 0usize; for ch in author.chars() { if inside_angle_braces > 0 && ch == '>' { inside_angle_braces -= 1; res.push(ch); } else if ch == '<' { inside_angle_braces += 1; res.push(ch); } else if inside_angle_braces == 0 && ch == ':' { res.push_str(", "); } else { res.push(ch); } } res } structopt-derive-0.4.18/src/doc_comments.rs000064400000000000000000000054100072674642500171000ustar 00000000000000//! The preprocessing we apply to doc comments. //! //! structopt works in terms of "paragraphs". Paragraph is a sequence of //! non-empty adjacent lines, delimited by sequences of blank (whitespace only) lines. use crate::attrs::Method; use quote::{format_ident, quote}; use std::iter; pub fn process_doc_comment(lines: Vec, name: &str, preprocess: bool) -> Vec { // multiline comments (`/** ... */`) may have LFs (`\n`) in them, // we need to split so we could handle the lines correctly // // we also need to remove leading and trailing blank lines let mut lines: Vec<&str> = lines .iter() .skip_while(|s| is_blank(s)) .flat_map(|s| s.split('\n')) .collect(); while let Some(true) = lines.last().map(|s| is_blank(s)) { lines.pop(); } // remove one leading space no matter what for line in lines.iter_mut() { if line.starts_with(' ') { *line = &line[1..]; } } if lines.is_empty() { return vec![]; } let short_name = format_ident!("{}", name); let long_name = format_ident!("long_{}", name); if let Some(first_blank) = lines.iter().position(|s| is_blank(s)) { let (short, long) = if preprocess { let paragraphs = split_paragraphs(&lines); let short = paragraphs[0].clone(); let long = paragraphs.join("\n\n"); (remove_period(short), long) } else { let short = lines[..first_blank].join("\n"); let long = lines.join("\n"); (short, long) }; vec![ Method::new(short_name, quote!(#short)), Method::new(long_name, quote!(#long)), ] } else { let short = if preprocess { let s = merge_lines(&lines); remove_period(s) } else { lines.join("\n") }; vec![Method::new(short_name, quote!(#short))] } } fn split_paragraphs(lines: &[&str]) -> Vec { let mut last_line = 0; iter::from_fn(|| { let slice = &lines[last_line..]; let start = slice.iter().position(|s| !is_blank(s)).unwrap_or(0); let slice = &slice[start..]; let len = slice .iter() .position(|s| is_blank(s)) .unwrap_or_else(|| slice.len()); last_line += start + len; if len != 0 { Some(merge_lines(&slice[..len])) } else { None } }) .collect() } fn remove_period(mut s: String) -> String { if s.ends_with('.') && !s.ends_with("..") { s.pop(); } s } fn is_blank(s: &str) -> bool { s.trim().is_empty() } fn merge_lines(lines: &[&str]) -> String { lines.iter().map(|s| s.trim()).collect::>().join(" ") } structopt-derive-0.4.18/src/lib.rs000064400000000000000000001054470072674642500152070ustar 00000000000000// Copyright 2018 Guillaume Pinot (@TeXitoi) // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This crate is custom derive for `StructOpt`. It should not be used //! directly. See [structopt documentation](https://docs.rs/structopt) //! for the usage of `#[derive(StructOpt)]`. #![allow(clippy::large_enum_variant)] // FIXME: remove when and if our MSRV hits 1.42 #![allow(clippy::match_like_matches_macro)] #![forbid(unsafe_code)] extern crate proc_macro; mod attrs; mod doc_comments; mod parse; mod spanned; mod ty; use crate::{ attrs::{Attrs, CasingStyle, Kind, Name, ParserKind}, spanned::Sp, ty::{is_simple_ty, sub_type, subty_if_name, Ty}, }; use proc_macro2::{Span, TokenStream}; use proc_macro_error::{abort, abort_call_site, proc_macro_error, set_dummy}; use quote::{format_ident, quote, quote_spanned}; use syn::{punctuated::Punctuated, spanned::Spanned, token::Comma, *}; /// Default casing style for generated arguments. const DEFAULT_CASING: CasingStyle = CasingStyle::Kebab; /// Default casing style for environment variables const DEFAULT_ENV_CASING: CasingStyle = CasingStyle::ScreamingSnake; /// Output for the `gen_xxx()` methods were we need more than a simple stream of tokens. /// /// The output of a generation method is not only the stream of new tokens but also the attribute /// information of the current element. These attribute information may contain valuable information /// for any kind of child arguments. struct GenOutput { tokens: TokenStream, attrs: Attrs, } /// Generates the `StructOpt` impl. #[proc_macro_derive(StructOpt, attributes(structopt))] #[proc_macro_error] pub fn structopt(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input: DeriveInput = syn::parse(input).unwrap(); let gen = impl_structopt(&input); gen.into() } /// Generate a block of code to add arguments/subcommands corresponding to /// the `fields` to an app. fn gen_augmentation( fields: &Punctuated, app_var: &Ident, parent_attribute: &Attrs, ) -> TokenStream { let mut subcmds = fields.iter().filter_map(|field| { let attrs = Attrs::from_field( field, Some(parent_attribute), parent_attribute.casing(), parent_attribute.env_casing(), ); let kind = attrs.kind(); if let Kind::Subcommand(ty) = &*kind { let subcmd_type = match (**ty, sub_type(&field.ty)) { (Ty::Option, Some(sub_type)) => sub_type, _ => &field.ty, }; let required = if **ty == Ty::Option { quote!() } else { quote_spanned! { kind.span()=> let #app_var = #app_var.setting( ::structopt::clap::AppSettings::SubcommandRequiredElseHelp ); } }; let span = field.span(); let ts = quote! { let #app_var = <#subcmd_type as ::structopt::StructOptInternal>::augment_clap( #app_var ); #required }; Some((span, ts)) } else { None } }); let subcmd = subcmds.next().map(|(_, ts)| ts); if let Some((span, _)) = subcmds.next() { abort!( span, "multiple subcommand sets are not allowed, that's the second" ); } let args = fields.iter().filter_map(|field| { let attrs = Attrs::from_field( field, Some(parent_attribute), parent_attribute.casing(), parent_attribute.env_casing(), ); let kind = attrs.kind(); match &*kind { Kind::ExternalSubcommand => abort!( kind.span(), "`external_subcommand` is only allowed on enum variants" ), Kind::Subcommand(_) | Kind::Skip(_) => None, Kind::Flatten => { let ty = &field.ty; Some(quote_spanned! { kind.span()=> let #app_var = <#ty as ::structopt::StructOptInternal>::augment_clap(#app_var); let #app_var = if <#ty as ::structopt::StructOptInternal>::is_subcommand() { #app_var.setting(::structopt::clap::AppSettings::SubcommandRequiredElseHelp) } else { #app_var }; }) } Kind::Arg(ty) => { let convert_type = match **ty { Ty::Vec | Ty::Option => sub_type(&field.ty).unwrap_or(&field.ty), Ty::OptionOption | Ty::OptionVec => { sub_type(&field.ty).and_then(sub_type).unwrap_or(&field.ty) } _ => &field.ty, }; let occurrences = *attrs.parser().kind == ParserKind::FromOccurrences; let flag = *attrs.parser().kind == ParserKind::FromFlag; let parser = attrs.parser(); let func = &parser.func; let validator = match *parser.kind { ParserKind::TryFromStr => quote_spanned! { func.span()=> .validator(|s| { #func(s.as_str()) .map(|_: #convert_type| ()) .map_err(|e| e.to_string()) }) }, ParserKind::TryFromOsStr => quote_spanned! { func.span()=> .validator_os(|s| #func(&s).map(|_: #convert_type| ())) }, _ => quote!(), }; let modifier = match **ty { Ty::Bool => quote_spanned! { ty.span()=> .takes_value(false) .multiple(false) }, Ty::Option => quote_spanned! { ty.span()=> .takes_value(true) .multiple(false) #validator }, Ty::OptionOption => quote_spanned! { ty.span()=> .takes_value(true) .multiple(false) .min_values(0) .max_values(1) #validator }, Ty::OptionVec => quote_spanned! { ty.span()=> .takes_value(true) .multiple(true) .min_values(0) #validator }, Ty::Vec => quote_spanned! { ty.span()=> .takes_value(true) .multiple(true) #validator }, Ty::Other if occurrences => quote_spanned! { ty.span()=> .takes_value(false) .multiple(true) }, Ty::Other if flag => quote_spanned! { ty.span()=> .takes_value(false) .multiple(false) }, Ty::Other => { let required = !attrs.has_method("default_value"); quote_spanned! { ty.span()=> .takes_value(true) .multiple(false) .required(#required) #validator } } }; let name = attrs.cased_name(); let methods = attrs.field_methods(); Some(quote_spanned! { field.span()=> let #app_var = #app_var.arg( ::structopt::clap::Arg::with_name(#name) #modifier #methods ); }) } } }); let app_methods = parent_attribute.top_level_methods(); let version = parent_attribute.version(); quote! {{ let #app_var = #app_var#app_methods; #( #args )* #subcmd #app_var#version }} } fn gen_constructor(fields: &Punctuated, parent_attribute: &Attrs) -> TokenStream { // This ident is used in several match branches below, // and the `quote[_spanned]` invocations have different spans. // // Given that this ident is used in several places and // that the branches are located inside of a loop, it is possible that // this ident will be given _different_ spans in different places, and // thus will not be the _same_ ident anymore. To make sure the `matches` // is always the same, we factor it out. let matches = format_ident!("matches"); let fields = fields.iter().map(|field| { let attrs = Attrs::from_field( field, Some(parent_attribute), parent_attribute.casing(), parent_attribute.env_casing(), ); let field_name = field.ident.as_ref().unwrap(); let kind = attrs.kind(); match &*kind { Kind::ExternalSubcommand => abort!( kind.span(), "`external_subcommand` is allowed only on enum variants" ), Kind::Subcommand(ty) => { let subcmd_type = match (**ty, sub_type(&field.ty)) { (Ty::Option, Some(sub_type)) => sub_type, _ => &field.ty, }; let unwrapper = match **ty { Ty::Option => quote!(), _ => quote_spanned!( ty.span()=> .unwrap() ), }; quote_spanned! { kind.span()=> #field_name: <#subcmd_type as ::structopt::StructOptInternal>::from_subcommand( #matches.subcommand()) #unwrapper } } Kind::Flatten => quote_spanned! { kind.span()=> #field_name: ::structopt::StructOpt::from_clap(#matches) }, Kind::Skip(val) => match val { None => quote_spanned!(kind.span()=> #field_name: Default::default()), Some(val) => quote_spanned!(kind.span()=> #field_name: (#val).into()), }, Kind::Arg(ty) => { use crate::attrs::ParserKind::*; let parser = attrs.parser(); let func = &parser.func; let span = parser.kind.span(); let (value_of, values_of, parse) = match *parser.kind { FromStr => ( quote_spanned!(span=> value_of), quote_spanned!(span=> values_of), func.clone(), ), TryFromStr => ( quote_spanned!(span=> value_of), quote_spanned!(span=> values_of), quote_spanned!(func.span()=> |s| #func(s).unwrap()), ), FromOsStr => ( quote_spanned!(span=> value_of_os), quote_spanned!(span=> values_of_os), func.clone(), ), TryFromOsStr => ( quote_spanned!(span=> value_of_os), quote_spanned!(span=> values_of_os), quote_spanned!(func.span()=> |s| #func(s).unwrap()), ), FromOccurrences => ( quote_spanned!(span=> occurrences_of), quote!(), func.clone(), ), FromFlag => (quote!(), quote!(), func.clone()), }; let flag = *attrs.parser().kind == ParserKind::FromFlag; let occurrences = *attrs.parser().kind == ParserKind::FromOccurrences; let name = attrs.cased_name(); let convert_type = match **ty { Ty::Vec | Ty::Option => sub_type(&field.ty).unwrap_or(&field.ty), Ty::OptionOption | Ty::OptionVec => { sub_type(&field.ty).and_then(sub_type).unwrap_or(&field.ty) } _ => &field.ty, }; let field_value = match **ty { Ty::Bool => quote_spanned!(ty.span()=> #matches.is_present(#name)), Ty::Option => quote_spanned! { ty.span()=> #matches.#value_of(#name) .map(#parse) }, Ty::OptionOption => quote_spanned! { ty.span()=> if #matches.is_present(#name) { Some(#matches.#value_of(#name).map(#parse)) } else { None } }, Ty::OptionVec => quote_spanned! { ty.span()=> if #matches.is_present(#name) { Some(#matches.#values_of(#name) .map_or_else(Vec::new, |v| v.map::<#convert_type, _>(#parse).collect())) } else { None } }, Ty::Vec => quote_spanned! { ty.span()=> #matches.#values_of(#name) .map_or_else(Vec::new, |v| v.map::<#convert_type, _>(#parse).collect()) }, Ty::Other if occurrences => quote_spanned! { ty.span()=> #parse(#matches.#value_of(#name)) }, Ty::Other if flag => quote_spanned! { ty.span()=> #parse(#matches.is_present(#name)) }, Ty::Other => quote_spanned! { ty.span()=> #matches.#value_of(#name) .map(#parse) .unwrap() }, }; quote_spanned!(field.span()=> #field_name: #field_value ) } } }); quote! {{ #( #fields ),* }} } fn gen_from_clap( struct_name: &Ident, fields: &Punctuated, parent_attribute: &Attrs, ) -> TokenStream { let field_block = gen_constructor(fields, parent_attribute); quote! { fn from_clap(matches: &::structopt::clap::ArgMatches) -> Self { #struct_name #field_block } } } fn gen_clap(attrs: &[Attribute]) -> GenOutput { let name = std::env::var("CARGO_PKG_NAME").ok().unwrap_or_default(); let attrs = Attrs::from_struct( Span::call_site(), attrs, Name::Assigned(quote!(#name)), None, Sp::call_site(DEFAULT_CASING), Sp::call_site(DEFAULT_ENV_CASING), false, ); let tokens = { let name = attrs.cased_name(); quote!(::structopt::clap::App::new(#name)) }; GenOutput { tokens, attrs } } fn gen_clap_struct(struct_attrs: &[Attribute]) -> GenOutput { let initial_clap_app_gen = gen_clap(struct_attrs); let clap_tokens = initial_clap_app_gen.tokens; let augmented_tokens = quote! { fn clap<'a, 'b>() -> ::structopt::clap::App<'a, 'b> { let app = #clap_tokens; ::augment_clap(app) } }; GenOutput { tokens: augmented_tokens, attrs: initial_clap_app_gen.attrs, } } fn gen_augment_clap(fields: &Punctuated, parent_attribute: &Attrs) -> TokenStream { let app_var = Ident::new("app", Span::call_site()); let augmentation = gen_augmentation(fields, &app_var, parent_attribute); quote! { fn augment_clap<'a, 'b>( #app_var: ::structopt::clap::App<'a, 'b> ) -> ::structopt::clap::App<'a, 'b> { #augmentation } } } fn gen_clap_enum(enum_attrs: &[Attribute]) -> GenOutput { let initial_clap_app_gen = gen_clap(enum_attrs); let clap_tokens = initial_clap_app_gen.tokens; let tokens = quote! { fn clap<'a, 'b>() -> ::structopt::clap::App<'a, 'b> { let app = #clap_tokens .setting(::structopt::clap::AppSettings::SubcommandRequiredElseHelp); ::augment_clap(app) } }; GenOutput { tokens, attrs: initial_clap_app_gen.attrs, } } fn gen_augment_clap_enum( variants: &Punctuated, parent_attribute: &Attrs, ) -> TokenStream { use syn::Fields::*; let subcommands = variants.iter().filter_map(|variant| { let attrs = Attrs::from_struct( variant.span(), &variant.attrs, Name::Derived(variant.ident.clone()), Some(parent_attribute), parent_attribute.casing(), parent_attribute.env_casing(), true, ); let kind = attrs.kind(); match &*kind { Kind::Skip(_) => None, Kind::ExternalSubcommand => { let app_var = Ident::new("app", Span::call_site()); Some(quote_spanned! { attrs.kind().span()=> let #app_var = #app_var.setting( ::structopt::clap::AppSettings::AllowExternalSubcommands ); }) }, Kind::Flatten => { match variant.fields { Unnamed(FieldsUnnamed { ref unnamed, .. }) if unnamed.len() == 1 => { let ty = &unnamed[0]; Some(quote! { let app = <#ty as ::structopt::StructOptInternal>::augment_clap(app); }) }, _ => abort!( variant, "`flatten` is usable only with single-typed tuple variants" ), } }, _ => { let app_var = Ident::new("subcommand", Span::call_site()); let from_attrs = attrs.top_level_methods(); let version = attrs.version(); let arg_block = match variant.fields { // If the variant is named, then gen_augmentation already generates the // top level methods (#from_attrs) and version. Named(ref fields) => gen_augmentation(&fields.named, &app_var, &attrs), Unit => quote!( #app_var#from_attrs#version ), Unnamed(FieldsUnnamed { ref unnamed, .. }) if unnamed.len() == 1 => { let ty = &unnamed[0]; quote_spanned! { ty.span()=> { let #app_var = <#ty as ::structopt::StructOptInternal>::augment_clap( #app_var ); if <#ty as ::structopt::StructOptInternal>::is_subcommand() { #app_var.setting( ::structopt::clap::AppSettings::SubcommandRequiredElseHelp ) } else { #app_var }#from_attrs#version } } } Unnamed(..) => abort!(variant, "non single-typed tuple enums are not supported"), }; let name = attrs.cased_name(); Some(quote! { let app = app.subcommand({ let #app_var = ::structopt::clap::SubCommand::with_name(#name); #arg_block }); }) }, } }); let app_methods = parent_attribute.top_level_methods(); let version = parent_attribute.version(); quote! { fn augment_clap<'a, 'b>( app: ::structopt::clap::App<'a, 'b> ) -> ::structopt::clap::App<'a, 'b> { let app = app #app_methods; #( #subcommands )*; app #version } } } fn gen_from_clap_enum() -> TokenStream { quote! { fn from_clap(matches: &::structopt::clap::ArgMatches) -> Self { ::from_subcommand(matches.subcommand()) .expect("structopt misuse: You likely tried to #[flatten] a struct \ that contains #[subcommand]. This is forbidden.") } } } fn gen_from_subcommand( name: &Ident, variants: &Punctuated, parent_attribute: &Attrs, ) -> TokenStream { use syn::Fields::*; let mut ext_subcmd = None; let (flatten_variants, variants): (Vec<_>, Vec<_>) = variants .iter() .filter_map(|variant| { let attrs = Attrs::from_struct( variant.span(), &variant.attrs, Name::Derived(variant.ident.clone()), Some(parent_attribute), parent_attribute.casing(), parent_attribute.env_casing(), true, ); let variant_name = &variant.ident; match *attrs.kind() { Kind::ExternalSubcommand => { if ext_subcmd.is_some() { abort!( attrs.kind().span(), "Only one variant can be marked with `external_subcommand`, \ this is the second" ); } let ty = match variant.fields { Unnamed(ref fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty, _ => abort!( variant, "The enum variant marked with `external_attribute` must be \ a single-typed tuple, and the type must be either `Vec` \ or `Vec`." ), }; let (span, str_ty, values_of) = match subty_if_name(ty, "Vec") { Some(subty) => { if is_simple_ty(subty, "String") { ( subty.span(), quote!(::std::string::String), quote!(values_of), ) } else { ( subty.span(), quote!(::std::ffi::OsString), quote!(values_of_os), ) } } None => abort!( ty, "The type must be either `Vec` or `Vec` \ to be used with `external_subcommand`." ), }; ext_subcmd = Some((span, variant_name, str_ty, values_of)); None } Kind::Skip(_) => None, _ => Some((variant, attrs)), } }) .partition(|(_, attrs)| match &*attrs.kind() { Kind::Flatten => true, _ => false, }); let other = format_ident!("other"); let matches = format_ident!("matches"); let external = match ext_subcmd { Some((span, var_name, str_ty, values_of)) => quote_spanned! { span=> match #other { ("", ::std::option::Option::None) => None, (external, Some(#matches)) => { ::std::option::Option::Some(#name::#var_name( ::std::iter::once(#str_ty::from(external)) .chain( #matches.#values_of("").into_iter().flatten().map(#str_ty::from) ) .collect::<::std::vec::Vec<_>>() )) } (external, None) => { ::std::option::Option::Some(#name::#var_name( ::std::iter::once(#str_ty::from(external)) .collect::<::std::vec::Vec<_>>() )) } } }, None => quote!(None), }; let match_arms = variants.iter().map(|(variant, attrs)| { let sub_name = attrs.cased_name(); let variant_name = &variant.ident; let constructor_block = match variant.fields { Named(ref fields) => gen_constructor(&fields.named, &attrs), Unit => quote!(), Unnamed(ref fields) if fields.unnamed.len() == 1 => { let ty = &fields.unnamed[0]; quote!( ( <#ty as ::structopt::StructOpt>::from_clap(#matches) ) ) } Unnamed(..) => abort!( variant.ident, "non single-typed tuple enums are not supported" ), }; quote! { (#sub_name, Some(#matches)) => { Some(#name :: #variant_name #constructor_block) } } }); let child_subcommands = flatten_variants.iter().map(|(variant, _attrs)| { let variant_name = &variant.ident; match variant.fields { Unnamed(ref fields) if fields.unnamed.len() == 1 => { let ty = &fields.unnamed[0]; quote! { if let Some(res) = <#ty as ::structopt::StructOptInternal>::from_subcommand(#other) { return Some(#name :: #variant_name (res)); } } } _ => abort!( variant, "`flatten` is usable only with single-typed tuple variants" ), } }); quote! { fn from_subcommand<'a, 'b>( sub: (&'b str, Option<&'b ::structopt::clap::ArgMatches<'a>>) ) -> Option { match sub { #( #match_arms, )* #other => { #( #child_subcommands )else*; #external } } } } } #[cfg(feature = "paw")] fn gen_paw_impl( impl_generics: &ImplGenerics, name: &Ident, ty_generics: &TypeGenerics, where_clause: &TokenStream, ) -> TokenStream { quote! { impl #impl_generics ::structopt::paw::ParseArgs for #name #ty_generics #where_clause { type Error = std::io::Error; fn parse_args() -> std::result::Result { Ok(<#name as ::structopt::StructOpt>::from_args()) } } } } #[cfg(not(feature = "paw"))] fn gen_paw_impl(_: &ImplGenerics, _: &Ident, _: &TypeGenerics, _: &TokenStream) -> TokenStream { TokenStream::new() } fn split_structopt_generics_for_impl( generics: &Generics, ) -> (ImplGenerics, TypeGenerics, TokenStream) { use syn::{token::Add, TypeParamBound::Trait}; fn path_ends_with(path: &Path, ident: &str) -> bool { path.segments.last().unwrap().ident == ident } fn type_param_bounds_contains(bounds: &Punctuated, ident: &str) -> bool { for bound in bounds { if let Trait(bound) = bound { if path_ends_with(&bound.path, ident) { return true; } } } return false; } struct TraitBoundAmendments { tokens: TokenStream, need_where: bool, need_comma: bool, } impl TraitBoundAmendments { fn new(where_clause: Option<&WhereClause>) -> Self { let tokens = TokenStream::new(); let (need_where, need_comma) = if let Some(where_clause) = where_clause { if where_clause.predicates.trailing_punct() { (false, false) } else { (false, true) } } else { (true, false) }; Self { tokens, need_where, need_comma, } } fn add(&mut self, amendment: TokenStream) { if self.need_where { self.tokens.extend(quote! { where }); self.need_where = false; } if self.need_comma { self.tokens.extend(quote! { , }); } self.tokens.extend(amendment); self.need_comma = true; } fn into_tokens(self) -> TokenStream { self.tokens } } let mut trait_bound_amendments = TraitBoundAmendments::new(generics.where_clause.as_ref()); for param in &generics.params { if let GenericParam::Type(param) = param { let param_ident = ¶m.ident; if type_param_bounds_contains(¶m.bounds, "StructOpt") { trait_bound_amendments .add(quote! { #param_ident : ::structopt::StructOptInternal }); } } } if let Some(where_clause) = &generics.where_clause { for predicate in &where_clause.predicates { if let WherePredicate::Type(predicate) = predicate { let predicate_bounded_ty = &predicate.bounded_ty; if type_param_bounds_contains(&predicate.bounds, "StructOpt") { trait_bound_amendments .add(quote! { #predicate_bounded_ty : ::structopt::StructOptInternal }); } } } } let trait_bound_amendments = trait_bound_amendments.into_tokens(); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let where_clause = quote! { #where_clause #trait_bound_amendments }; (impl_generics, ty_generics, where_clause) } fn impl_structopt_for_struct( name: &Ident, fields: &Punctuated, attrs: &[Attribute], generics: &Generics, ) -> TokenStream { let (impl_generics, ty_generics, where_clause) = split_structopt_generics_for_impl(&generics); let basic_clap_app_gen = gen_clap_struct(attrs); let augment_clap = gen_augment_clap(fields, &basic_clap_app_gen.attrs); let from_clap = gen_from_clap(name, fields, &basic_clap_app_gen.attrs); let paw_impl = gen_paw_impl(&impl_generics, name, &ty_generics, &where_clause); let clap_tokens = basic_clap_app_gen.tokens; quote! { #[allow(unused_variables)] #[allow(unknown_lints)] #[allow( clippy::style, clippy::complexity, clippy::pedantic, clippy::restriction, clippy::perf, clippy::deprecated, clippy::nursery, clippy::cargo )] #[deny(clippy::correctness)] #[allow(dead_code, unreachable_code)] impl #impl_generics ::structopt::StructOpt for #name #ty_generics #where_clause { #clap_tokens #from_clap } #[allow(unused_variables)] #[allow(unknown_lints)] #[allow( clippy::style, clippy::complexity, clippy::pedantic, clippy::restriction, clippy::perf, clippy::deprecated, clippy::nursery, clippy::cargo )] #[deny(clippy::correctness)] #[allow(dead_code, unreachable_code)] impl #impl_generics ::structopt::StructOptInternal for #name #ty_generics #where_clause { #augment_clap fn is_subcommand() -> bool { false } } #paw_impl } } fn impl_structopt_for_enum( name: &Ident, variants: &Punctuated, attrs: &[Attribute], generics: &Generics, ) -> TokenStream { let (impl_generics, ty_generics, where_clause) = split_structopt_generics_for_impl(&generics); let basic_clap_app_gen = gen_clap_enum(attrs); let clap_tokens = basic_clap_app_gen.tokens; let attrs = basic_clap_app_gen.attrs; let augment_clap = gen_augment_clap_enum(variants, &attrs); let from_clap = gen_from_clap_enum(); let from_subcommand = gen_from_subcommand(name, variants, &attrs); let paw_impl = gen_paw_impl(&impl_generics, name, &ty_generics, &where_clause); quote! { #[allow(unknown_lints)] #[allow(unused_variables, dead_code, unreachable_code)] #[allow( clippy::style, clippy::complexity, clippy::pedantic, clippy::restriction, clippy::perf, clippy::deprecated, clippy::nursery, clippy::cargo )] #[deny(clippy::correctness)] impl #impl_generics ::structopt::StructOpt for #name #ty_generics #where_clause { #clap_tokens #from_clap } #[allow(unused_variables)] #[allow(unknown_lints)] #[allow( clippy::style, clippy::complexity, clippy::pedantic, clippy::restriction, clippy::perf, clippy::deprecated, clippy::nursery, clippy::cargo )] #[deny(clippy::correctness)] #[allow(dead_code, unreachable_code)] impl #impl_generics ::structopt::StructOptInternal for #name #ty_generics #where_clause { #augment_clap #from_subcommand fn is_subcommand() -> bool { true } } #paw_impl } } fn impl_structopt(input: &DeriveInput) -> TokenStream { use syn::Data::*; let struct_name = &input.ident; set_dummy(quote! { impl ::structopt::StructOpt for #struct_name { fn clap<'a, 'b>() -> ::structopt::clap::App<'a, 'b> { unimplemented!() } fn from_clap(_matches: &::structopt::clap::ArgMatches) -> Self { unimplemented!() } } impl ::structopt::StructOptInternal for #struct_name {} }); match input.data { Struct(DataStruct { fields: syn::Fields::Named(ref fields), .. }) => impl_structopt_for_struct(struct_name, &fields.named, &input.attrs, &input.generics), Enum(ref e) => { impl_structopt_for_enum(struct_name, &e.variants, &input.attrs, &input.generics) } _ => abort_call_site!("structopt only supports non-tuple structs and enums"), } } structopt-derive-0.4.18/src/parse.rs000064400000000000000000000213410072674642500155410ustar 00000000000000use std::iter::FromIterator; use proc_macro_error::{abort, ResultExt}; use quote::ToTokens; use syn::{ self, parenthesized, parse::{Parse, ParseBuffer, ParseStream}, punctuated::Punctuated, Attribute, Expr, ExprLit, Ident, Lit, LitBool, LitStr, Token, }; pub enum StructOptAttr { // single-identifier attributes Short(Ident), Long(Ident), Env(Ident), Flatten(Ident), Subcommand(Ident), ExternalSubcommand(Ident), NoVersion(Ident), VerbatimDocComment(Ident), // ident [= "string literal"] About(Ident, Option), Author(Ident, Option), DefaultValue(Ident, Option), // ident = "string literal" Version(Ident, LitStr), RenameAllEnv(Ident, LitStr), RenameAll(Ident, LitStr), NameLitStr(Ident, LitStr), // parse(parser_kind [= parser_func]) Parse(Ident, ParserSpec), // ident [= arbitrary_expr] Skip(Ident, Option), // ident = arbitrary_expr NameExpr(Ident, Expr), // ident(arbitrary_expr,*) MethodCall(Ident, Vec), } impl Parse for StructOptAttr { fn parse(input: ParseStream<'_>) -> syn::Result { use self::StructOptAttr::*; let name: Ident = input.parse()?; let name_str = name.to_string(); if input.peek(Token![=]) { // `name = value` attributes. let assign_token = input.parse::()?; // skip '=' if input.peek(LitStr) { let lit: LitStr = input.parse()?; let lit_str = lit.value(); let check_empty_lit = |s| { if lit_str.is_empty() { abort!( lit, "`#[structopt({} = \"\")]` is deprecated in structopt 0.3, \ now it's default behavior", s ); } }; match &*name_str { "rename_all" => Ok(RenameAll(name, lit)), "rename_all_env" => Ok(RenameAllEnv(name, lit)), "default_value" => Ok(DefaultValue(name, Some(lit))), "version" => { check_empty_lit("version"); Ok(Version(name, lit)) } "author" => { check_empty_lit("author"); Ok(Author(name, Some(lit))) } "about" => { check_empty_lit("about"); Ok(About(name, Some(lit))) } "skip" => { let expr = ExprLit { attrs: vec![], lit: Lit::Str(lit), }; let expr = Expr::Lit(expr); Ok(Skip(name, Some(expr))) } _ => Ok(NameLitStr(name, lit)), } } else { match input.parse::() { Ok(expr) => { if name_str == "skip" { Ok(Skip(name, Some(expr))) } else { Ok(NameExpr(name, expr)) } } Err(_) => abort! { assign_token, "expected `string literal` or `expression` after `=`" }, } } } else if input.peek(syn::token::Paren) { // `name(...)` attributes. let nested; parenthesized!(nested in input); match name_str.as_ref() { "parse" => { let parser_specs: Punctuated = nested.parse_terminated(ParserSpec::parse)?; if parser_specs.len() == 1 { Ok(Parse(name, parser_specs[0].clone())) } else { abort!(name, "`parse` must have exactly one argument") } } "raw" => match nested.parse::() { Ok(bool_token) => { let expr = ExprLit { attrs: vec![], lit: Lit::Bool(bool_token), }; let expr = Expr::Lit(expr); Ok(MethodCall(name, vec![expr])) } Err(_) => { abort!(name, "`#[structopt(raw(...))` attributes are removed in structopt 0.3, \ they are replaced with raw methods"; help = "if you meant to call `clap::Arg::raw()` method \ you should use bool literal, like `raw(true)` or `raw(false)`"; note = raw_method_suggestion(nested); ); } }, _ => { let method_args: Punctuated<_, Token![,]> = nested.parse_terminated(Expr::parse)?; Ok(MethodCall(name, Vec::from_iter(method_args))) } } } else { // Attributes represented with a sole identifier. match name_str.as_ref() { "long" => Ok(Long(name)), "short" => Ok(Short(name)), "env" => Ok(Env(name)), "flatten" => Ok(Flatten(name)), "subcommand" => Ok(Subcommand(name)), "external_subcommand" => Ok(ExternalSubcommand(name)), "no_version" => Ok(NoVersion(name)), "verbatim_doc_comment" => Ok(VerbatimDocComment(name)), "default_value" => Ok(DefaultValue(name, None)), "about" => (Ok(About(name, None))), "author" => (Ok(Author(name, None))), "skip" => Ok(Skip(name, None)), "version" => abort!( name, "#[structopt(version)] is invalid attribute, \ structopt 0.3 inherits version from Cargo.toml by default, \ no attribute needed" ), _ => abort!(name, "unexpected attribute: {}", name_str), } } } } #[derive(Clone)] pub struct ParserSpec { pub kind: Ident, pub eq_token: Option, pub parse_func: Option, } impl Parse for ParserSpec { fn parse(input: ParseStream<'_>) -> syn::Result { let kind = input .parse() .map_err(|_| input.error("parser specification must start with identifier"))?; let eq_token = input.parse()?; let parse_func = match eq_token { None => None, Some(_) => Some(input.parse()?), }; Ok(ParserSpec { kind, eq_token, parse_func, }) } } fn raw_method_suggestion(ts: ParseBuffer) -> String { let do_parse = move || -> Result<(Ident, Punctuated), syn::Error> { let name = ts.parse()?; let _eq: Token![=] = ts.parse()?; let val: LitStr = ts.parse()?; let exprs = val.parse_with(Punctuated::::parse_terminated)?; Ok((name, exprs)) }; fn to_string(val: &T) -> String { val.to_token_stream() .to_string() .replace(" ", "") .replace(",", ", ") } if let Ok((name, exprs)) = do_parse() { let suggestion = if exprs.len() == 1 { let val = to_string(&exprs[0]); format!(" = {}", val) } else { let val = exprs .into_iter() .map(|expr| to_string(&expr)) .collect::>() .join(", "); format!("({})", val) }; format!( "if you need to call `clap::Arg/App::{}` method you \ can do it like this: #[structopt({}{})]", name, name, suggestion ) } else { "if you need to call some method from `clap::Arg/App` \ you should use raw method, see \ https://docs.rs/structopt/0.3/structopt/#raw-methods" .into() } } pub fn parse_structopt_attributes(all_attrs: &[Attribute]) -> Vec { all_attrs .iter() .filter(|attr| attr.path.is_ident("structopt")) .flat_map(|attr| { attr.parse_args_with(Punctuated::::parse_terminated) .unwrap_or_abort() }) .collect() } structopt-derive-0.4.18/src/spanned.rs000064400000000000000000000035570072674642500160700ustar 00000000000000use proc_macro2::{Ident, Span, TokenStream}; use quote::ToTokens; use std::ops::{Deref, DerefMut}; use syn::LitStr; /// An entity with a span attached. #[derive(Debug, Clone)] pub struct Sp { span: Span, val: T, } impl Sp { pub fn new(val: T, span: Span) -> Self { Sp { val, span } } pub fn call_site(val: T) -> Self { Sp { val, span: Span::call_site(), } } pub fn span(&self) -> Span { self.span } } impl Deref for Sp { type Target = T; fn deref(&self) -> &T { &self.val } } impl DerefMut for Sp { fn deref_mut(&mut self) -> &mut T { &mut self.val } } impl From for Sp { fn from(ident: Ident) -> Self { Sp { val: ident.to_string(), span: ident.span(), } } } impl From for Sp { fn from(lit: LitStr) -> Self { Sp { val: lit.value(), span: lit.span(), } } } impl<'a> From> for Sp { fn from(sp: Sp<&'a str>) -> Self { Sp::new(sp.val.into(), sp.span) } } impl PartialEq for Sp { fn eq(&self, other: &T) -> bool { self.val == *other } } impl PartialEq for Sp { fn eq(&self, other: &Sp) -> bool { self.val == **other } } impl> AsRef for Sp { fn as_ref(&self) -> &str { self.val.as_ref() } } impl ToTokens for Sp { fn to_tokens(&self, stream: &mut TokenStream) { // this is the simplest way out of correct ones to change span on // arbitrary token tree I can come up with let tt = self.val.to_token_stream().into_iter().map(|mut tt| { tt.set_span(self.span); tt }); stream.extend(tt); } } structopt-derive-0.4.18/src/ty.rs000064400000000000000000000063040072674642500150650ustar 00000000000000//! Special types handling use crate::spanned::Sp; use syn::{ spanned::Spanned, GenericArgument, Path, PathArguments, PathArguments::AngleBracketed, PathSegment, Type, TypePath, }; #[derive(Copy, Clone, PartialEq, Debug)] pub enum Ty { Bool, Vec, Option, OptionOption, OptionVec, Other, } impl Ty { pub fn from_syn_ty(ty: &syn::Type) -> Sp { use Ty::*; let t = |kind| Sp::new(kind, ty.span()); if is_simple_ty(ty, "bool") { t(Bool) } else if is_generic_ty(ty, "Vec") { t(Vec) } else if let Some(subty) = subty_if_name(ty, "Option") { if is_generic_ty(subty, "Option") { t(OptionOption) } else if is_generic_ty(subty, "Vec") { t(OptionVec) } else { t(Option) } } else { t(Other) } } } pub fn sub_type(ty: &syn::Type) -> Option<&syn::Type> { subty_if(ty, |_| true) } fn only_last_segment(ty: &syn::Type) -> Option<&PathSegment> { match ty { Type::Path(TypePath { qself: None, path: Path { leading_colon: None, segments, }, }) => only_one(segments.iter()), _ => None, } } fn subty_if(ty: &syn::Type, f: F) -> Option<&syn::Type> where F: FnOnce(&PathSegment) -> bool, { let ty = strip_group(ty); only_last_segment(ty) .filter(|segment| f(segment)) .and_then(|segment| { if let AngleBracketed(args) = &segment.arguments { only_one(args.args.iter()).and_then(|genneric| { if let GenericArgument::Type(ty) = genneric { Some(ty) } else { None } }) } else { None } }) } pub fn subty_if_name<'a>(ty: &'a syn::Type, name: &str) -> Option<&'a syn::Type> { subty_if(ty, |seg| seg.ident == name) } pub fn is_simple_ty(ty: &syn::Type, name: &str) -> bool { let ty = strip_group(ty); only_last_segment(ty) .map(|segment| { if let PathArguments::None = segment.arguments { segment.ident == name } else { false } }) .unwrap_or(false) } // If the struct is placed inside of a macro_rules! declaration, // in some circumstances, the tokens inside will be enclosed // in `proc_macro::Group` delimited by invisible `proc_macro::Delimiter::None`. // // In syn speak, this is encoded via `*::Group` variants. We don't really care about // that, so let's just strip it. // // Details: https://doc.rust-lang.org/proc_macro/enum.Delimiter.html#variant.None // See also: https://github.com/TeXitoi/structopt/issues/439 fn strip_group(mut ty: &syn::Type) -> &syn::Type { while let Type::Group(group) = ty { ty = &*group.elem; } ty } fn is_generic_ty(ty: &syn::Type, name: &str) -> bool { subty_if_name(ty, name).is_some() } fn only_one(mut iter: I) -> Option where I: Iterator, { iter.next().filter(|_| iter.next().is_none()) }