prost-build-0.11.9/.cargo_vcs_info.json0000644000000001510000000000100133650ustar { "git": { "sha1": "cab3c9459630bac040aecefa8d3afde7e598e50f" }, "path_in_vcs": "prost-build" }prost-build-0.11.9/Cargo.toml0000644000000041600000000000100113670ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.60" name = "prost-build" version = "0.11.9" authors = [ "Dan Burkert ", "Lucio Franco ", "Tokio Contributors ", ] description = "A Protocol Buffers implementation for the Rust Language." documentation = "https://docs.rs/prost-build" readme = "README.md" license = "Apache-2.0" repository = "https://github.com/tokio-rs/prost" [dependencies.bytes] version = "1" default-features = false [dependencies.heck] version = "0.4" [dependencies.itertools] version = "0.10" features = ["use_alloc"] default-features = false [dependencies.lazy_static] version = "1.4.0" [dependencies.log] version = "0.4" [dependencies.multimap] version = "0.8" default-features = false [dependencies.petgraph] version = "0.6" default-features = false [dependencies.prettyplease] version = "0.1" optional = true [dependencies.prost] version = "0.11.9" default-features = false [dependencies.prost-types] version = "0.11.9" default-features = false [dependencies.pulldown-cmark] version = "0.9.1" optional = true default-features = false [dependencies.pulldown-cmark-to-cmark] version = "10.0.1" optional = true [dependencies.regex] version = "1.5.5" features = [ "std", "unicode-bool", ] default-features = false [dependencies.syn] version = "1" features = ["full"] optional = true [dependencies.tempfile] version = "3" [dependencies.which] version = "4" [dev-dependencies.env_logger] version = "0.8" default-features = false [features] cleanup-markdown = [ "pulldown-cmark", "pulldown-cmark-to-cmark", ] default = ["format"] format = [ "prettyplease", "syn", ] prost-build-0.11.9/Cargo.toml.orig000064400000000000000000000031371046102023000150530ustar 00000000000000[package] name = "prost-build" version = "0.11.9" authors = [ "Dan Burkert ", "Lucio Franco ", "Tokio Contributors ", ] license = "Apache-2.0" repository = "https://github.com/tokio-rs/prost" documentation = "https://docs.rs/prost-build" readme = "README.md" description = "A Protocol Buffers implementation for the Rust Language." edition = "2021" rust-version = "1.60" [features] default = ["format"] format = ["prettyplease", "syn"] # When MSRV moves to 1.60, these can change to dep: cleanup-markdown = ["pulldown-cmark", "pulldown-cmark-to-cmark"] [dependencies] bytes = { version = "1", default-features = false } heck = "0.4" itertools = { version = "0.10", default-features = false, features = ["use_alloc"] } log = "0.4" multimap = { version = "0.8", default-features = false } petgraph = { version = "0.6", default-features = false } prost = { version = "0.11.9", path = "..", default-features = false } prost-types = { version = "0.11.9", path = "../prost-types", default-features = false } tempfile = "3" lazy_static = "1.4.0" regex = { version = "1.5.5", default-features = false, features = ["std", "unicode-bool"] } which = "4" prettyplease = { version = "0.1", optional = true } syn = { version = "1", features = ["full"], optional = true } # These two must be kept in sync, used for `cleanup-markdown` feature. pulldown-cmark = { version = "0.9.1", optional = true, default-features = false } pulldown-cmark-to-cmark = { version = "10.0.1", optional = true } [dev-dependencies] env_logger = { version = "0.8", default-features = false } prost-build-0.11.9/LICENSE000064400000000000000000000251371046102023000131750ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. prost-build-0.11.9/README.md000064400000000000000000000020031046102023000134320ustar 00000000000000[![Documentation](https://docs.rs/prost-build/badge.svg)](https://docs.rs/prost-build/) [![Crate](https://img.shields.io/crates/v/prost-build.svg)](https://crates.io/crates/prost-build) # `prost-build` `prost-build` makes it easy to generate Rust code from `.proto` files as part of a Cargo build. See the crate [documentation](https://docs.rs/prost-build/) for examples of how to integrate `prost-build` into a Cargo project. ## `protoc` `prost-build` uses `protoc` to parse the proto files. There are two ways to make `protoc` available for `prost-build`: * Include `protoc` in your `PATH`. This can be done by following the [`protoc` install instructions]. * Pass the `PROTOC=` environment variable with the path to `protoc`. [`protoc` install instructions]: https://github.com/protocolbuffers/protobuf#protocol-compiler-installation ## License `prost-build` is distributed under the terms of the Apache License (Version 2.0). See [LICENSE](../LICENSE) for details. Copyright 2017 Dan Burkert prost-build-0.11.9/src/ast.rs000064400000000000000000000315341046102023000141120ustar 00000000000000use lazy_static::lazy_static; use prost_types::source_code_info::Location; #[cfg(feature = "cleanup-markdown")] use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag}; use regex::Regex; /// Comments on a Protobuf item. #[derive(Debug, Default, Clone)] pub struct Comments { /// Leading detached blocks of comments. pub leading_detached: Vec>, /// Leading comments. pub leading: Vec, /// Trailing comments. pub trailing: Vec, } impl Comments { pub(crate) fn from_location(location: &Location) -> Comments { let leading_detached = location .leading_detached_comments .iter() .map(get_lines) .collect(); let leading = location .leading_comments .as_ref() .map_or(Vec::new(), get_lines); let trailing = location .trailing_comments .as_ref() .map_or(Vec::new(), get_lines); Comments { leading_detached, leading, trailing, } } /// Appends the comments to a buffer with indentation. /// /// Each level of indentation corresponds to four space (' ') characters. pub fn append_with_indent(&self, indent_level: u8, buf: &mut String) { // Append blocks of detached comments. for detached_block in &self.leading_detached { for line in detached_block { for _ in 0..indent_level { buf.push_str(" "); } buf.push_str("//"); buf.push_str(&Self::sanitize_line(line)); buf.push('\n'); } buf.push('\n'); } // Append leading comments. for line in &self.leading { for _ in 0..indent_level { buf.push_str(" "); } buf.push_str("///"); buf.push_str(&Self::sanitize_line(line)); buf.push('\n'); } // Append an empty comment line if there are leading and trailing comments. if !self.leading.is_empty() && !self.trailing.is_empty() { for _ in 0..indent_level { buf.push_str(" "); } buf.push_str("///\n"); } // Append trailing comments. for line in &self.trailing { for _ in 0..indent_level { buf.push_str(" "); } buf.push_str("///"); buf.push_str(&Self::sanitize_line(line)); buf.push('\n'); } } /// Checks whether a RustDoc line should be indented. /// /// Lines should be indented if: /// - they are non-empty, AND /// - they don't already start with a space /// OR /// - they start with several spaces. /// /// The last condition can happen in the case of multi-line Markdown lists /// such as: /// /// - this is a list /// where some elements spans multiple lines /// - but not all elements fn should_indent(sanitized_line: &str) -> bool { let mut chars = sanitized_line.chars(); chars .next() .map_or(false, |c| c != ' ' || chars.next() == Some(' ')) } /// Sanitizes the line for rustdoc by performing the following operations: /// - escape urls as /// - escape `[` & `]` fn sanitize_line(line: &str) -> String { lazy_static! { static ref RULE_URL: Regex = Regex::new(r"https?://[^\s)]+").unwrap(); static ref RULE_BRACKETS: Regex = Regex::new(r"(\[)(\S+)(])").unwrap(); } let mut s = RULE_URL.replace_all(line, r"<$0>").to_string(); s = RULE_BRACKETS.replace_all(&s, r"\$1$2\$3").to_string(); if Self::should_indent(&s) { s.insert(0, ' '); } s } } /// A service descriptor. #[derive(Debug, Clone)] pub struct Service { /// The service name in Rust style. pub name: String, /// The service name as it appears in the .proto file. pub proto_name: String, /// The package name as it appears in the .proto file. pub package: String, /// The service comments. pub comments: Comments, /// The service methods. pub methods: Vec, /// The service options. pub options: prost_types::ServiceOptions, } /// A service method descriptor. #[derive(Debug, Clone)] pub struct Method { /// The name of the method in Rust style. pub name: String, /// The name of the method as it appears in the .proto file. pub proto_name: String, /// The method comments. pub comments: Comments, /// The input Rust type. pub input_type: String, /// The output Rust type. pub output_type: String, /// The input Protobuf type. pub input_proto_type: String, /// The output Protobuf type. pub output_proto_type: String, /// The method options. pub options: prost_types::MethodOptions, /// Identifies if client streams multiple client messages. pub client_streaming: bool, /// Identifies if server streams multiple server messages. pub server_streaming: bool, } #[cfg(not(feature = "cleanup-markdown"))] fn get_lines(comments: S) -> Vec where S: AsRef, { comments.as_ref().lines().map(str::to_owned).collect() } #[cfg(feature = "cleanup-markdown")] fn get_lines(comments: S) -> Vec where S: AsRef, { let comments = comments.as_ref(); let mut buffer = String::with_capacity(comments.len() + 256); let opts = pulldown_cmark_to_cmark::Options { code_block_token_count: 3, ..Default::default() }; match pulldown_cmark_to_cmark::cmark_with_options( Parser::new_ext(comments, Options::all() - Options::ENABLE_SMART_PUNCTUATION).map( |event| { fn map_codeblock(kind: CodeBlockKind) -> CodeBlockKind { match kind { CodeBlockKind::Fenced(s) => { if &*s == "rust" { CodeBlockKind::Fenced("compile_fail".into()) } else { CodeBlockKind::Fenced(format!("text,{}", s).into()) } } CodeBlockKind::Indented => CodeBlockKind::Fenced("text".into()), } } match event { Event::Start(Tag::CodeBlock(kind)) => { Event::Start(Tag::CodeBlock(map_codeblock(kind))) } Event::End(Tag::CodeBlock(kind)) => { Event::End(Tag::CodeBlock(map_codeblock(kind))) } e => e, } }, ), &mut buffer, opts, ) { Ok(_) => buffer.lines().map(str::to_owned).collect(), Err(_) => comments.lines().map(str::to_owned).collect(), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_comment_append_with_indent_leaves_prespaced_lines() { struct TestCases { name: &'static str, input: String, expected: String, } let tests = vec![ TestCases { name: "existing_space", input: " A line with a single leading space.".to_string(), expected: "/// A line with a single leading space.\n".to_string(), }, TestCases { name: "non_existing_space", input: "A line without a single leading space.".to_string(), expected: "/// A line without a single leading space.\n".to_string(), }, TestCases { name: "empty", input: "".to_string(), expected: "///\n".to_string(), }, TestCases { name: "multiple_leading_spaces", input: " a line with several leading spaces, such as in a markdown list" .to_string(), expected: "/// a line with several leading spaces, such as in a markdown list\n" .to_string(), }, ]; for t in tests { let input = Comments { leading_detached: vec![], leading: vec![], trailing: vec![t.input], }; let mut actual = "".to_string(); input.append_with_indent(0, &mut actual); assert_eq!(t.expected, actual, "failed {}", t.name); } } #[test] fn test_comment_append_with_indent_sanitizes_comment_doc_url() { struct TestCases { name: &'static str, input: String, expected: String, } let tests = vec![ TestCases { name: "valid_http", input: "See https://www.rust-lang.org/".to_string(), expected: "/// See \n".to_string(), }, TestCases { name: "valid_https", input: "See https://www.rust-lang.org/".to_string(), expected: "/// See \n".to_string(), }, TestCases { name: "valid_https_parenthesis", input: "See (https://www.rust-lang.org/)".to_string(), expected: "/// See ()\n".to_string(), }, TestCases { name: "invalid", input: "See note://abc".to_string(), expected: "/// See note://abc\n".to_string(), }, ]; for t in tests { let input = Comments { leading_detached: vec![], leading: vec![], trailing: vec![t.input], }; let mut actual = "".to_string(); input.append_with_indent(0, &mut actual); assert_eq!(t.expected, actual, "failed {}", t.name); } } #[test] fn test_comment_append_with_indent_sanitizes_square_brackets() { struct TestCases { name: &'static str, input: String, expected: String, } let tests = vec![ TestCases { name: "valid_brackets", input: "foo [bar] baz".to_string(), expected: "/// foo \\[bar\\] baz\n".to_string(), }, TestCases { name: "invalid_start_bracket", input: "foo [= baz".to_string(), expected: "/// foo [= baz\n".to_string(), }, TestCases { name: "invalid_end_bracket", input: "foo =] baz".to_string(), expected: "/// foo =] baz\n".to_string(), }, TestCases { name: "invalid_bracket_combination", input: "[0, 9)".to_string(), expected: "/// [0, 9)\n".to_string(), }, ]; for t in tests { let input = Comments { leading_detached: vec![], leading: vec![], trailing: vec![t.input], }; let mut actual = "".to_string(); input.append_with_indent(0, &mut actual); assert_eq!(t.expected, actual, "failed {}", t.name); } } #[test] fn test_codeblocks() { struct TestCase { name: &'static str, input: &'static str, #[allow(unused)] cleanedup_expected: Vec<&'static str>, } let tests = vec![ TestCase { name: "unlabelled_block", input: " thingy\n", cleanedup_expected: vec!["", "```text", "thingy", "```"], }, TestCase { name: "rust_block", input: "```rust\nfoo.bar()\n```\n", cleanedup_expected: vec!["", "```compile_fail", "foo.bar()", "```"], }, TestCase { name: "js_block", input: "```javascript\nfoo.bar()\n```\n", cleanedup_expected: vec!["", "```text,javascript", "foo.bar()", "```"], }, ]; for t in tests { let loc = Location { path: vec![], span: vec![], leading_comments: Some(t.input.into()), trailing_comments: None, leading_detached_comments: vec![], }; let comments = Comments::from_location(&loc); #[cfg(feature = "cleanup-markdown")] let expected = t.cleanedup_expected; #[cfg(not(feature = "cleanup-markdown"))] let expected: Vec<&str> = t.input.lines().collect(); assert_eq!(expected, comments.leading, "failed {}", t.name); } } } prost-build-0.11.9/src/code_generator.rs000064400000000000000000001205171046102023000163030ustar 00000000000000use std::ascii; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use std::iter; use itertools::{Either, Itertools}; use log::debug; use multimap::MultiMap; use prost_types::field_descriptor_proto::{Label, Type}; use prost_types::source_code_info::Location; use prost_types::{ DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, FieldDescriptorProto, FieldOptions, FileDescriptorProto, OneofDescriptorProto, ServiceDescriptorProto, SourceCodeInfo, }; use crate::ast::{Comments, Method, Service}; use crate::extern_paths::ExternPaths; use crate::ident::{to_snake, to_upper_camel}; use crate::message_graph::MessageGraph; use crate::{BytesType, Config, MapType}; #[derive(PartialEq)] enum Syntax { Proto2, Proto3, } pub struct CodeGenerator<'a> { config: &'a mut Config, package: String, source_info: Option, syntax: Syntax, message_graph: &'a MessageGraph, extern_paths: &'a ExternPaths, depth: u8, path: Vec, buf: &'a mut String, } fn push_indent(buf: &mut String, depth: u8) { for _ in 0..depth { buf.push_str(" "); } } impl<'a> CodeGenerator<'a> { pub fn generate( config: &mut Config, message_graph: &MessageGraph, extern_paths: &ExternPaths, file: FileDescriptorProto, buf: &mut String, ) { let source_info = file.source_code_info.map(|mut s| { s.location.retain(|loc| { let len = loc.path.len(); len > 0 && len % 2 == 0 }); s.location.sort_by(|a, b| a.path.cmp(&b.path)); s }); let syntax = match file.syntax.as_ref().map(String::as_str) { None | Some("proto2") => Syntax::Proto2, Some("proto3") => Syntax::Proto3, Some(s) => panic!("unknown syntax: {}", s), }; let mut code_gen = CodeGenerator { config, package: file.package.unwrap_or_default(), source_info, syntax, message_graph, extern_paths, depth: 0, path: Vec::new(), buf, }; debug!( "file: {:?}, package: {:?}", file.name.as_ref().unwrap(), code_gen.package ); code_gen.path.push(4); for (idx, message) in file.message_type.into_iter().enumerate() { code_gen.path.push(idx as i32); code_gen.append_message(message); code_gen.path.pop(); } code_gen.path.pop(); code_gen.path.push(5); for (idx, desc) in file.enum_type.into_iter().enumerate() { code_gen.path.push(idx as i32); code_gen.append_enum(desc); code_gen.path.pop(); } code_gen.path.pop(); if code_gen.config.service_generator.is_some() { code_gen.path.push(6); for (idx, service) in file.service.into_iter().enumerate() { code_gen.path.push(idx as i32); code_gen.push_service(service); code_gen.path.pop(); } if let Some(service_generator) = code_gen.config.service_generator.as_mut() { service_generator.finalize(code_gen.buf); } code_gen.path.pop(); } } fn append_message(&mut self, message: DescriptorProto) { debug!(" message: {:?}", message.name()); let message_name = message.name().to_string(); let fq_message_name = format!( "{}{}.{}", if self.package.is_empty() { "" } else { "." }, self.package, message.name() ); // Skip external types. if self.extern_paths.resolve_ident(&fq_message_name).is_some() { return; } // Split the nested message types into a vector of normal nested message types, and a map // of the map field entry types. The path index of the nested message types is preserved so // that comments can be retrieved. type NestedTypes = Vec<(DescriptorProto, usize)>; type MapTypes = HashMap; let (nested_types, map_types): (NestedTypes, MapTypes) = message .nested_type .into_iter() .enumerate() .partition_map(|(idx, nested_type)| { if nested_type .options .as_ref() .and_then(|options| options.map_entry) .unwrap_or(false) { let key = nested_type.field[0].clone(); let value = nested_type.field[1].clone(); assert_eq!("key", key.name()); assert_eq!("value", value.name()); let name = format!("{}.{}", &fq_message_name, nested_type.name()); Either::Right((name, (key, value))) } else { Either::Left((nested_type, idx)) } }); // Split the fields into a vector of the normal fields, and oneof fields. // Path indexes are preserved so that comments can be retrieved. type Fields = Vec<(FieldDescriptorProto, usize)>; type OneofFields = MultiMap; let (fields, mut oneof_fields): (Fields, OneofFields) = message .field .into_iter() .enumerate() .partition_map(|(idx, field)| { if field.proto3_optional.unwrap_or(false) { Either::Left((field, idx)) } else if let Some(oneof_index) = field.oneof_index { Either::Right((oneof_index, (field, idx))) } else { Either::Left((field, idx)) } }); self.append_doc(&fq_message_name, None); self.append_type_attributes(&fq_message_name); self.append_message_attributes(&fq_message_name); self.push_indent(); self.buf .push_str("#[allow(clippy::derive_partial_eq_without_eq)]\n"); self.buf.push_str(&format!( "#[derive(Clone, PartialEq, {}::Message)]\n", self.config.prost_path.as_deref().unwrap_or("::prost") )); self.push_indent(); self.buf.push_str("pub struct "); self.buf.push_str(&to_upper_camel(&message_name)); self.buf.push_str(" {\n"); self.depth += 1; self.path.push(2); for (field, idx) in fields { self.path.push(idx as i32); match field .type_name .as_ref() .and_then(|type_name| map_types.get(type_name)) { Some(&(ref key, ref value)) => { self.append_map_field(&fq_message_name, field, key, value) } None => self.append_field(&fq_message_name, field), } self.path.pop(); } self.path.pop(); self.path.push(8); for (idx, oneof) in message.oneof_decl.iter().enumerate() { let idx = idx as i32; let fields = match oneof_fields.get_vec(&idx) { Some(fields) => fields, None => continue, }; self.path.push(idx); self.append_oneof_field(&message_name, &fq_message_name, oneof, fields); self.path.pop(); } self.path.pop(); self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); if !message.enum_type.is_empty() || !nested_types.is_empty() || !oneof_fields.is_empty() { self.push_mod(&message_name); self.path.push(3); for (nested_type, idx) in nested_types { self.path.push(idx as i32); self.append_message(nested_type); self.path.pop(); } self.path.pop(); self.path.push(4); for (idx, nested_enum) in message.enum_type.into_iter().enumerate() { self.path.push(idx as i32); self.append_enum(nested_enum); self.path.pop(); } self.path.pop(); for (idx, oneof) in message.oneof_decl.into_iter().enumerate() { let idx = idx as i32; // optional fields create a synthetic oneof that we want to skip let fields = match oneof_fields.remove(&idx) { Some(fields) => fields, None => continue, }; self.append_oneof(&fq_message_name, oneof, idx, fields); } self.pop_mod(); } } fn append_type_attributes(&mut self, fq_message_name: &str) { assert_eq!(b'.', fq_message_name.as_bytes()[0]); for attribute in self.config.type_attributes.get(fq_message_name) { push_indent(self.buf, self.depth); self.buf.push_str(attribute); self.buf.push('\n'); } } fn append_message_attributes(&mut self, fq_message_name: &str) { assert_eq!(b'.', fq_message_name.as_bytes()[0]); for attribute in self.config.message_attributes.get(fq_message_name) { push_indent(self.buf, self.depth); self.buf.push_str(attribute); self.buf.push('\n'); } } fn append_enum_attributes(&mut self, fq_message_name: &str) { assert_eq!(b'.', fq_message_name.as_bytes()[0]); for attribute in self.config.enum_attributes.get(fq_message_name) { push_indent(self.buf, self.depth); self.buf.push_str(attribute); self.buf.push('\n'); } } fn append_field_attributes(&mut self, fq_message_name: &str, field_name: &str) { assert_eq!(b'.', fq_message_name.as_bytes()[0]); for attribute in self .config .field_attributes .get_field(fq_message_name, field_name) { push_indent(self.buf, self.depth); self.buf.push_str(attribute); self.buf.push('\n'); } } fn append_field(&mut self, fq_message_name: &str, field: FieldDescriptorProto) { let type_ = field.r#type(); let repeated = field.label == Some(Label::Repeated as i32); let deprecated = self.deprecated(&field); let optional = self.optional(&field); let ty = self.resolve_type(&field, fq_message_name); let boxed = !repeated && ((type_ == Type::Message || type_ == Type::Group) && self .message_graph .is_nested(field.type_name(), fq_message_name)) || (self .config .boxed .get_first_field(&fq_message_name, field.name()) .is_some()); debug!( " field: {:?}, type: {:?}, boxed: {}", field.name(), ty, boxed ); self.append_doc(fq_message_name, Some(field.name())); if deprecated { self.push_indent(); self.buf.push_str("#[deprecated]\n"); } self.push_indent(); self.buf.push_str("#[prost("); let type_tag = self.field_type_tag(&field); self.buf.push_str(&type_tag); if type_ == Type::Bytes { let bytes_type = self .config .bytes_type .get_first_field(fq_message_name, field.name()) .copied() .unwrap_or_default(); self.buf .push_str(&format!("={:?}", bytes_type.annotation())); } match field.label() { Label::Optional => { if optional { self.buf.push_str(", optional"); } } Label::Required => self.buf.push_str(", required"), Label::Repeated => { self.buf.push_str(", repeated"); if can_pack(&field) && !field .options .as_ref() .map_or(self.syntax == Syntax::Proto3, |options| options.packed()) { self.buf.push_str(", packed=\"false\""); } } } if boxed { self.buf.push_str(", boxed"); } self.buf.push_str(", tag=\""); self.buf.push_str(&field.number().to_string()); if let Some(ref default) = field.default_value { self.buf.push_str("\", default=\""); if type_ == Type::Bytes { self.buf.push_str("b\\\""); for b in unescape_c_escape_string(default) { self.buf.extend( ascii::escape_default(b).flat_map(|c| (c as char).escape_default()), ); } self.buf.push_str("\\\""); } else if type_ == Type::Enum { let mut enum_value = to_upper_camel(default); if self.config.strip_enum_prefix { // Field types are fully qualified, so we extract // the last segment and strip it from the left // side of the default value. let enum_type = field .type_name .as_ref() .and_then(|ty| ty.split('.').last()) .unwrap(); enum_value = strip_enum_prefix(&to_upper_camel(enum_type), &enum_value) } self.buf.push_str(&enum_value); } else { self.buf.push_str(&default.escape_default().to_string()); } } self.buf.push_str("\")]\n"); self.append_field_attributes(fq_message_name, field.name()); self.push_indent(); self.buf.push_str("pub "); self.buf.push_str(&to_snake(field.name())); self.buf.push_str(": "); let prost_path = self.config.prost_path.as_deref().unwrap_or("::prost"); if repeated { self.buf .push_str(&format!("{}::alloc::vec::Vec<", prost_path)); } else if optional { self.buf.push_str("::core::option::Option<"); } if boxed { self.buf .push_str(&format!("{}::alloc::boxed::Box<", prost_path)); } self.buf.push_str(&ty); if boxed { self.buf.push('>'); } if repeated || optional { self.buf.push('>'); } self.buf.push_str(",\n"); } fn append_map_field( &mut self, fq_message_name: &str, field: FieldDescriptorProto, key: &FieldDescriptorProto, value: &FieldDescriptorProto, ) { let key_ty = self.resolve_type(key, fq_message_name); let value_ty = self.resolve_type(value, fq_message_name); debug!( " map field: {:?}, key type: {:?}, value type: {:?}", field.name(), key_ty, value_ty ); self.append_doc(fq_message_name, Some(field.name())); self.push_indent(); let map_type = self .config .map_type .get_first_field(fq_message_name, field.name()) .copied() .unwrap_or_default(); let key_tag = self.field_type_tag(key); let value_tag = self.map_value_type_tag(value); self.buf.push_str(&format!( "#[prost({}=\"{}, {}\", tag=\"{}\")]\n", map_type.annotation(), key_tag, value_tag, field.number() )); self.append_field_attributes(fq_message_name, field.name()); self.push_indent(); self.buf.push_str(&format!( "pub {}: {}<{}, {}>,\n", to_snake(field.name()), map_type.rust_type(), key_ty, value_ty )); } fn append_oneof_field( &mut self, message_name: &str, fq_message_name: &str, oneof: &OneofDescriptorProto, fields: &[(FieldDescriptorProto, usize)], ) { let name = format!( "{}::{}", to_snake(message_name), to_upper_camel(oneof.name()) ); self.append_doc(fq_message_name, None); self.push_indent(); self.buf.push_str(&format!( "#[prost(oneof=\"{}\", tags=\"{}\")]\n", name, fields .iter() .map(|&(ref field, _)| field.number()) .join(", ") )); self.append_field_attributes(fq_message_name, oneof.name()); self.push_indent(); self.buf.push_str(&format!( "pub {}: ::core::option::Option<{}>,\n", to_snake(oneof.name()), name )); } fn append_oneof( &mut self, fq_message_name: &str, oneof: OneofDescriptorProto, idx: i32, fields: Vec<(FieldDescriptorProto, usize)>, ) { self.path.push(8); self.path.push(idx); self.append_doc(fq_message_name, None); self.path.pop(); self.path.pop(); let oneof_name = format!("{}.{}", fq_message_name, oneof.name()); self.append_type_attributes(&oneof_name); self.append_enum_attributes(&oneof_name); self.push_indent(); self.buf .push_str("#[allow(clippy::derive_partial_eq_without_eq)]\n"); self.buf.push_str(&format!( "#[derive(Clone, PartialEq, {}::Oneof)]\n", self.config.prost_path.as_deref().unwrap_or("::prost") )); self.push_indent(); self.buf.push_str("pub enum "); self.buf.push_str(&to_upper_camel(oneof.name())); self.buf.push_str(" {\n"); self.path.push(2); self.depth += 1; for (field, idx) in fields { let type_ = field.r#type(); self.path.push(idx as i32); self.append_doc(fq_message_name, Some(field.name())); self.path.pop(); self.push_indent(); let ty_tag = self.field_type_tag(&field); self.buf.push_str(&format!( "#[prost({}, tag=\"{}\")]\n", ty_tag, field.number() )); self.append_field_attributes(&oneof_name, field.name()); self.push_indent(); let ty = self.resolve_type(&field, fq_message_name); let boxed = ((type_ == Type::Message || type_ == Type::Group) && self .message_graph .is_nested(field.type_name(), fq_message_name)) || (self .config .boxed .get_first_field(&oneof_name, field.name()) .is_some()); debug!( " oneof: {:?}, type: {:?}, boxed: {}", field.name(), ty, boxed ); if boxed { self.buf.push_str(&format!( "{}(::prost::alloc::boxed::Box<{}>),\n", to_upper_camel(field.name()), ty )); } else { self.buf .push_str(&format!("{}({}),\n", to_upper_camel(field.name()), ty)); } } self.depth -= 1; self.path.pop(); self.push_indent(); self.buf.push_str("}\n"); } fn location(&self) -> Option<&Location> { let source_info = self.source_info.as_ref()?; let idx = source_info .location .binary_search_by_key(&&self.path[..], |location| &location.path[..]) .unwrap(); Some(&source_info.location[idx]) } fn append_doc(&mut self, fq_name: &str, field_name: Option<&str>) { let append_doc = if let Some(field_name) = field_name { self.config .disable_comments .get_first_field(fq_name, field_name) .is_none() } else { self.config.disable_comments.get(fq_name).next().is_none() }; if append_doc { if let Some(comments) = self.location().map(Comments::from_location) { comments.append_with_indent(self.depth, self.buf); } } } fn append_enum(&mut self, desc: EnumDescriptorProto) { debug!(" enum: {:?}", desc.name()); let proto_enum_name = desc.name(); let enum_name = to_upper_camel(proto_enum_name); let enum_values = &desc.value; let fq_proto_enum_name = format!( "{}{}.{}", if self.package.is_empty() { "" } else { "." }, self.package, proto_enum_name ); if self .extern_paths .resolve_ident(&fq_proto_enum_name) .is_some() { return; } self.append_doc(&fq_proto_enum_name, None); self.append_type_attributes(&fq_proto_enum_name); self.append_enum_attributes(&fq_proto_enum_name); self.push_indent(); self.buf.push_str( &format!("#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, {}::Enumeration)]\n",self.config.prost_path.as_deref().unwrap_or("::prost")), ); self.push_indent(); self.buf.push_str("#[repr(i32)]\n"); self.push_indent(); self.buf.push_str("pub enum "); self.buf.push_str(&enum_name); self.buf.push_str(" {\n"); let variant_mappings = build_enum_value_mappings(&enum_name, self.config.strip_enum_prefix, enum_values); self.depth += 1; self.path.push(2); for variant in variant_mappings.iter() { self.path.push(variant.path_idx as i32); self.append_doc(&fq_proto_enum_name, Some(variant.proto_name)); self.append_field_attributes(&fq_proto_enum_name, variant.proto_name); self.push_indent(); self.buf.push_str(&variant.generated_variant_name); self.buf.push_str(" = "); self.buf.push_str(&variant.proto_number.to_string()); self.buf.push_str(",\n"); self.path.pop(); } self.path.pop(); self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); self.push_indent(); self.buf.push_str("impl "); self.buf.push_str(&enum_name); self.buf.push_str(" {\n"); self.depth += 1; self.path.push(2); self.push_indent(); self.buf.push_str( "/// String value of the enum field names used in the ProtoBuf definition.\n", ); self.push_indent(); self.buf.push_str("///\n"); self.push_indent(); self.buf.push_str( "/// The values are not transformed in any way and thus are considered stable\n", ); self.push_indent(); self.buf.push_str( "/// (if the ProtoBuf definition does not change) and safe for programmatic use.\n", ); self.push_indent(); self.buf .push_str("pub fn as_str_name(&self) -> &'static str {\n"); self.depth += 1; self.push_indent(); self.buf.push_str("match self {\n"); self.depth += 1; for variant in variant_mappings.iter() { self.push_indent(); self.buf.push_str(&enum_name); self.buf.push_str("::"); self.buf.push_str(&variant.generated_variant_name); self.buf.push_str(" => \""); self.buf.push_str(variant.proto_name); self.buf.push_str("\",\n"); } self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); // End of match self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); // End of as_str_name() self.push_indent(); self.buf .push_str("/// Creates an enum from field names used in the ProtoBuf definition.\n"); self.push_indent(); self.buf .push_str("pub fn from_str_name(value: &str) -> ::core::option::Option {\n"); self.depth += 1; self.push_indent(); self.buf.push_str("match value {\n"); self.depth += 1; for variant in variant_mappings.iter() { self.push_indent(); self.buf.push('\"'); self.buf.push_str(variant.proto_name); self.buf.push_str("\" => Some(Self::"); self.buf.push_str(&variant.generated_variant_name); self.buf.push_str("),\n"); } self.push_indent(); self.buf.push_str("_ => None,\n"); self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); // End of match self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); // End of from_str_name() self.path.pop(); self.depth -= 1; self.push_indent(); self.buf.push_str("}\n"); // End of impl } fn push_service(&mut self, service: ServiceDescriptorProto) { let name = service.name().to_owned(); debug!(" service: {:?}", name); let comments = self .location() .map(Comments::from_location) .unwrap_or_default(); self.path.push(2); let methods = service .method .into_iter() .enumerate() .map(|(idx, mut method)| { debug!(" method: {:?}", method.name()); self.path.push(idx as i32); let comments = self .location() .map(Comments::from_location) .unwrap_or_default(); self.path.pop(); let name = method.name.take().unwrap(); let input_proto_type = method.input_type.take().unwrap(); let output_proto_type = method.output_type.take().unwrap(); let input_type = self.resolve_ident(&input_proto_type); let output_type = self.resolve_ident(&output_proto_type); let client_streaming = method.client_streaming(); let server_streaming = method.server_streaming(); Method { name: to_snake(&name), proto_name: name, comments, input_type, output_type, input_proto_type, output_proto_type, options: method.options.unwrap_or_default(), client_streaming, server_streaming, } }) .collect(); self.path.pop(); let service = Service { name: to_upper_camel(&name), proto_name: name, package: self.package.clone(), comments, methods, options: service.options.unwrap_or_default(), }; if let Some(service_generator) = self.config.service_generator.as_mut() { service_generator.generate(service, self.buf) } } fn push_indent(&mut self) { push_indent(self.buf, self.depth); } fn push_mod(&mut self, module: &str) { self.push_indent(); self.buf.push_str("/// Nested message and enum types in `"); self.buf.push_str(module); self.buf.push_str("`.\n"); self.push_indent(); self.buf.push_str("pub mod "); self.buf.push_str(&to_snake(module)); self.buf.push_str(" {\n"); self.package.push('.'); self.package.push_str(module); self.depth += 1; } fn pop_mod(&mut self) { self.depth -= 1; let idx = self.package.rfind('.').unwrap(); self.package.truncate(idx); self.push_indent(); self.buf.push_str("}\n"); } fn resolve_type(&self, field: &FieldDescriptorProto, fq_message_name: &str) -> String { let prost_path = self.config.prost_path.as_deref().unwrap_or("::prost"); match field.r#type() { Type::Float => String::from("f32"), Type::Double => String::from("f64"), Type::Uint32 | Type::Fixed32 => String::from("u32"), Type::Uint64 | Type::Fixed64 => String::from("u64"), Type::Int32 | Type::Sfixed32 | Type::Sint32 | Type::Enum => String::from("i32"), Type::Int64 | Type::Sfixed64 | Type::Sint64 => String::from("i64"), Type::Bool => String::from("bool"), Type::String => format!("{}::alloc::string::String", prost_path), Type::Bytes => self .config .bytes_type .get_first_field(fq_message_name, field.name()) .copied() .unwrap_or_default() .rust_type() .to_owned(), Type::Group | Type::Message => self.resolve_ident(field.type_name()), } } fn resolve_ident(&self, pb_ident: &str) -> String { // protoc should always give fully qualified identifiers. assert_eq!(".", &pb_ident[..1]); if let Some(proto_ident) = self.extern_paths.resolve_ident(pb_ident) { return proto_ident; } let mut local_path = self.package.split('.').peekable(); // If no package is specified the start of the package name will be '.' // and split will return an empty string ("") which breaks resolution // The fix to this is to ignore the first item if it is empty. if local_path.peek().map_or(false, |s| s.is_empty()) { local_path.next(); } let mut ident_path = pb_ident[1..].split('.'); let ident_type = ident_path.next_back().unwrap(); let mut ident_path = ident_path.peekable(); // Skip path elements in common. while local_path.peek().is_some() && local_path.peek() == ident_path.peek() { local_path.next(); ident_path.next(); } local_path .map(|_| "super".to_string()) .chain(ident_path.map(to_snake)) .chain(iter::once(to_upper_camel(ident_type))) .join("::") } fn field_type_tag(&self, field: &FieldDescriptorProto) -> Cow<'static, str> { match field.r#type() { Type::Float => Cow::Borrowed("float"), Type::Double => Cow::Borrowed("double"), Type::Int32 => Cow::Borrowed("int32"), Type::Int64 => Cow::Borrowed("int64"), Type::Uint32 => Cow::Borrowed("uint32"), Type::Uint64 => Cow::Borrowed("uint64"), Type::Sint32 => Cow::Borrowed("sint32"), Type::Sint64 => Cow::Borrowed("sint64"), Type::Fixed32 => Cow::Borrowed("fixed32"), Type::Fixed64 => Cow::Borrowed("fixed64"), Type::Sfixed32 => Cow::Borrowed("sfixed32"), Type::Sfixed64 => Cow::Borrowed("sfixed64"), Type::Bool => Cow::Borrowed("bool"), Type::String => Cow::Borrowed("string"), Type::Bytes => Cow::Borrowed("bytes"), Type::Group => Cow::Borrowed("group"), Type::Message => Cow::Borrowed("message"), Type::Enum => Cow::Owned(format!( "enumeration={:?}", self.resolve_ident(field.type_name()) )), } } fn map_value_type_tag(&self, field: &FieldDescriptorProto) -> Cow<'static, str> { match field.r#type() { Type::Enum => Cow::Owned(format!( "enumeration({})", self.resolve_ident(field.type_name()) )), _ => self.field_type_tag(field), } } fn optional(&self, field: &FieldDescriptorProto) -> bool { if field.proto3_optional.unwrap_or(false) { return true; } if field.label() != Label::Optional { return false; } match field.r#type() { Type::Message => true, _ => self.syntax == Syntax::Proto2, } } /// Returns `true` if the field options includes the `deprecated` option. fn deprecated(&self, field: &FieldDescriptorProto) -> bool { field .options .as_ref() .map_or(false, FieldOptions::deprecated) } } /// Returns `true` if the repeated field type can be packed. fn can_pack(field: &FieldDescriptorProto) -> bool { matches!( field.r#type(), Type::Float | Type::Double | Type::Int32 | Type::Int64 | Type::Uint32 | Type::Uint64 | Type::Sint32 | Type::Sint64 | Type::Fixed32 | Type::Fixed64 | Type::Sfixed32 | Type::Sfixed64 | Type::Bool | Type::Enum ) } /// Based on [`google::protobuf::UnescapeCEscapeString`][1] /// [1]: https://github.com/google/protobuf/blob/3.3.x/src/google/protobuf/stubs/strutil.cc#L312-L322 fn unescape_c_escape_string(s: &str) -> Vec { let src = s.as_bytes(); let len = src.len(); let mut dst = Vec::new(); let mut p = 0; while p < len { if src[p] != b'\\' { dst.push(src[p]); p += 1; } else { p += 1; if p == len { panic!( "invalid c-escaped default binary value ({}): ends with '\'", s ) } match src[p] { b'a' => { dst.push(0x07); p += 1; } b'b' => { dst.push(0x08); p += 1; } b'f' => { dst.push(0x0C); p += 1; } b'n' => { dst.push(0x0A); p += 1; } b'r' => { dst.push(0x0D); p += 1; } b't' => { dst.push(0x09); p += 1; } b'v' => { dst.push(0x0B); p += 1; } b'\\' => { dst.push(0x5C); p += 1; } b'?' => { dst.push(0x3F); p += 1; } b'\'' => { dst.push(0x27); p += 1; } b'"' => { dst.push(0x22); p += 1; } b'0'..=b'7' => { debug!("another octal: {}, offset: {}", s, &s[p..]); let mut octal = 0; for _ in 0..3 { if p < len && src[p] >= b'0' && src[p] <= b'7' { debug!("\toctal: {}", octal); octal = octal * 8 + (src[p] - b'0'); p += 1; } else { break; } } dst.push(octal); } b'x' | b'X' => { if p + 3 > len { panic!( "invalid c-escaped default binary value ({}): incomplete hex value", s ) } match u8::from_str_radix(&s[p + 1..p + 3], 16) { Ok(b) => dst.push(b), _ => panic!( "invalid c-escaped default binary value ({}): invalid hex value", &s[p..p + 2] ), } p += 3; } _ => panic!( "invalid c-escaped default binary value ({}): invalid escape", s ), } } } dst } /// Strip an enum's type name from the prefix of an enum value. /// /// This function assumes that both have been formatted to Rust's /// upper camel case naming conventions. /// /// It also tries to handle cases where the stripped name would be /// invalid - for example, if it were to begin with a number. fn strip_enum_prefix(prefix: &str, name: &str) -> String { let stripped = name.strip_prefix(prefix).unwrap_or(name); // If the next character after the stripped prefix is not // uppercase, then it means that we didn't have a true prefix - // for example, "Foo" should not be stripped from "Foobar". if stripped .chars() .next() .map(char::is_uppercase) .unwrap_or(false) { stripped.to_owned() } else { name.to_owned() } } struct EnumVariantMapping<'a> { path_idx: usize, proto_name: &'a str, proto_number: i32, generated_variant_name: String, } fn build_enum_value_mappings<'a>( generated_enum_name: &str, do_strip_enum_prefix: bool, enum_values: &'a [EnumValueDescriptorProto], ) -> Vec> { let mut numbers = HashSet::new(); let mut generated_names = HashMap::new(); let mut mappings = Vec::new(); for (idx, value) in enum_values.iter().enumerate() { // Skip duplicate enum values. Protobuf allows this when the // 'allow_alias' option is set. if !numbers.insert(value.number()) { continue; } let mut generated_variant_name = to_upper_camel(value.name()); if do_strip_enum_prefix { generated_variant_name = strip_enum_prefix(generated_enum_name, &generated_variant_name); } if let Some(old_v) = generated_names.insert(generated_variant_name.to_owned(), value.name()) { panic!("Generated enum variant names overlap: `{}` variant name to be used both by `{}` and `{}` ProtoBuf enum values", generated_variant_name, old_v, value.name()); } mappings.push(EnumVariantMapping { path_idx: idx, proto_name: value.name(), proto_number: value.number(), generated_variant_name, }) } mappings } impl MapType { /// The `prost-derive` annotation type corresponding to the map type. fn annotation(&self) -> &'static str { match self { MapType::HashMap => "map", MapType::BTreeMap => "btree_map", } } /// The fully-qualified Rust type corresponding to the map type. fn rust_type(&self) -> &'static str { match self { MapType::HashMap => "::std::collections::HashMap", MapType::BTreeMap => "::prost::alloc::collections::BTreeMap", } } } impl BytesType { /// The `prost-derive` annotation type corresponding to the bytes type. fn annotation(&self) -> &'static str { match self { BytesType::Vec => "vec", BytesType::Bytes => "bytes", } } /// The fully-qualified Rust type corresponding to the bytes type. fn rust_type(&self) -> &'static str { match self { BytesType::Vec => "::prost::alloc::vec::Vec", BytesType::Bytes => "::prost::bytes::Bytes", } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_unescape_c_escape_string() { assert_eq!( &b"hello world"[..], &unescape_c_escape_string("hello world")[..] ); assert_eq!(&b"\0"[..], &unescape_c_escape_string(r#"\0"#)[..]); assert_eq!( &[0o012, 0o156], &unescape_c_escape_string(r#"\012\156"#)[..] ); assert_eq!(&[0x01, 0x02], &unescape_c_escape_string(r#"\x01\x02"#)[..]); assert_eq!( &b"\0\x01\x07\x08\x0C\n\r\t\x0B\\\'\"\xFE"[..], &unescape_c_escape_string(r#"\0\001\a\b\f\n\r\t\v\\\'\"\xfe"#)[..] ); } #[test] #[should_panic(expected = "incomplete hex value")] fn test_unescape_c_escape_string_incomplete_hex_value() { unescape_c_escape_string(r#"\x1"#); } #[test] fn test_strip_enum_prefix() { assert_eq!(strip_enum_prefix("Foo", "FooBar"), "Bar"); assert_eq!(strip_enum_prefix("Foo", "Foobar"), "Foobar"); assert_eq!(strip_enum_prefix("Foo", "Foo"), "Foo"); assert_eq!(strip_enum_prefix("Foo", "Bar"), "Bar"); assert_eq!(strip_enum_prefix("Foo", "Foo1"), "Foo1"); } } prost-build-0.11.9/src/extern_paths.rs000064400000000000000000000136661046102023000160350ustar 00000000000000use std::collections::{hash_map, HashMap}; use itertools::Itertools; use crate::ident::{to_snake, to_upper_camel}; fn validate_proto_path(path: &str) -> Result<(), String> { if path.chars().next().map(|c| c != '.').unwrap_or(true) { return Err(format!( "Protobuf paths must be fully qualified (begin with a leading '.'): {}", path )); } if path.split('.').skip(1).any(str::is_empty) { return Err(format!("invalid fully-qualified Protobuf path: {}", path)); } Ok(()) } #[derive(Debug)] pub struct ExternPaths { extern_paths: HashMap, } impl ExternPaths { pub fn new(paths: &[(String, String)], prost_types: bool) -> Result { let mut extern_paths = ExternPaths { extern_paths: HashMap::new(), }; for (proto_path, rust_path) in paths { extern_paths.insert(proto_path.clone(), rust_path.clone())?; } if prost_types { extern_paths.insert(".google.protobuf".to_string(), "::prost_types".to_string())?; extern_paths.insert(".google.protobuf.BoolValue".to_string(), "bool".to_string())?; extern_paths.insert( ".google.protobuf.BytesValue".to_string(), "::prost::alloc::vec::Vec".to_string(), )?; extern_paths.insert( ".google.protobuf.DoubleValue".to_string(), "f64".to_string(), )?; extern_paths.insert(".google.protobuf.Empty".to_string(), "()".to_string())?; extern_paths.insert(".google.protobuf.FloatValue".to_string(), "f32".to_string())?; extern_paths.insert(".google.protobuf.Int32Value".to_string(), "i32".to_string())?; extern_paths.insert(".google.protobuf.Int64Value".to_string(), "i64".to_string())?; extern_paths.insert( ".google.protobuf.StringValue".to_string(), "::prost::alloc::string::String".to_string(), )?; extern_paths.insert( ".google.protobuf.UInt32Value".to_string(), "u32".to_string(), )?; extern_paths.insert( ".google.protobuf.UInt64Value".to_string(), "u64".to_string(), )?; } Ok(extern_paths) } fn insert(&mut self, proto_path: String, rust_path: String) -> Result<(), String> { validate_proto_path(&proto_path)?; match self.extern_paths.entry(proto_path) { hash_map::Entry::Occupied(occupied) => { return Err(format!( "duplicate extern Protobuf path: {}", occupied.key() )); } hash_map::Entry::Vacant(vacant) => vacant.insert(rust_path), }; Ok(()) } pub fn resolve_ident(&self, pb_ident: &str) -> Option { // protoc should always give fully qualified identifiers. assert_eq!(".", &pb_ident[..1]); if let Some(rust_path) = self.extern_paths.get(pb_ident) { return Some(rust_path.clone()); } // TODO(danburkert): there must be a more efficient way to do this, maybe a trie? for (idx, _) in pb_ident.rmatch_indices('.') { if let Some(rust_path) = self.extern_paths.get(&pb_ident[..idx]) { let mut segments = pb_ident[idx + 1..].split('.'); let ident_type = segments.next_back().map(to_upper_camel); return Some( rust_path .split("::") .chain(segments) .enumerate() .map(|(idx, segment)| { if idx == 0 && segment == "crate" { // If the first segment of the path is 'crate', then do not escape // it into a raw identifier, since it's being used as the keyword. segment.to_owned() } else { to_snake(segment) } }) .chain(ident_type.into_iter()) .join("::"), ); } } None } } #[cfg(test)] mod tests { use super::*; #[test] fn test_extern_paths() { let paths = ExternPaths::new( &[ (".foo".to_string(), "::foo1".to_string()), (".foo.bar".to_string(), "::foo2".to_string()), (".foo.baz".to_string(), "::foo3".to_string()), (".foo.Fuzz".to_string(), "::foo4::Fuzz".to_string()), (".a.b.c.d.e.f".to_string(), "::abc::def".to_string()), ], false, ) .unwrap(); let case = |proto_ident: &str, resolved_ident: &str| { assert_eq!(paths.resolve_ident(proto_ident).unwrap(), resolved_ident); }; case(".foo", "::foo1"); case(".foo.Foo", "::foo1::Foo"); case(".foo.bar", "::foo2"); case(".foo.Bas", "::foo1::Bas"); case(".foo.bar.Bar", "::foo2::Bar"); case(".foo.Fuzz.Bar", "::foo4::fuzz::Bar"); case(".a.b.c.d.e.f", "::abc::def"); case(".a.b.c.d.e.f.g.FooBar.Baz", "::abc::def::g::foo_bar::Baz"); assert!(paths.resolve_ident(".a").is_none()); assert!(paths.resolve_ident(".a.b").is_none()); assert!(paths.resolve_ident(".a.c").is_none()); } #[test] fn test_well_known_types() { let paths = ExternPaths::new(&[], true).unwrap(); let case = |proto_ident: &str, resolved_ident: &str| { assert_eq!(paths.resolve_ident(proto_ident).unwrap(), resolved_ident); }; case(".google.protobuf.Value", "::prost_types::Value"); case(".google.protobuf.Duration", "::prost_types::Duration"); case(".google.protobuf.Empty", "()"); } } prost-build-0.11.9/src/fixtures/alphabet/_expected_include.rs000064400000000000000000000003301046102023000224050ustar 00000000000000pub mod a { include!("a.rs"); } pub mod b { include!("b.rs"); } pub mod c { include!("c.rs"); } pub mod d { include!("d.rs"); } pub mod e { include!("e.rs"); } pub mod f { include!("f.rs"); } prost-build-0.11.9/src/fixtures/alphabet/a.proto000064400000000000000000000001421046102023000177020ustar 00000000000000syntax = "proto3"; package a; message Alpha { string sign = 1; string pronounciation = 2; } prost-build-0.11.9/src/fixtures/alphabet/b.proto000064400000000000000000000001421046102023000177030ustar 00000000000000syntax = "proto3"; package b; message Bravo { string sign = 1; string pronounciation = 2; } prost-build-0.11.9/src/fixtures/alphabet/c.proto000064400000000000000000000001441046102023000177060ustar 00000000000000syntax = "proto3"; package c; message Charlie { string sign = 1; string pronounciation = 2; } prost-build-0.11.9/src/fixtures/alphabet/d.proto000064400000000000000000000001421046102023000177050ustar 00000000000000syntax = "proto3"; package d; message Delta { string sign = 1; string pronounciation = 2; } prost-build-0.11.9/src/fixtures/alphabet/e.proto000064400000000000000000000001401046102023000177040ustar 00000000000000syntax = "proto3"; package e; message Echo { string sign = 1; string pronounciation = 2; }prost-build-0.11.9/src/fixtures/alphabet/f.proto000064400000000000000000000001431046102023000177100ustar 00000000000000syntax = "proto3"; package f; message Foxtrot { string sign = 1; string pronounciation = 2; }prost-build-0.11.9/src/fixtures/field_attributes/_expected_field_attributes.rs000064400000000000000000000021061046102023000260670ustar 00000000000000#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Container { #[prost(oneof="container::Data", tags="1, 2")] pub data: ::core::option::Option, } /// Nested message and enum types in `Container`. pub mod container { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Data { #[prost(message, tag="1")] Foo(::prost::alloc::boxed::Box), #[prost(message, tag="2")] Bar(super::Bar), } } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Foo { #[prost(string, tag="1")] pub foo: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Bar { #[prost(message, optional, boxed, tag="1")] pub qux: ::core::option::Option<::prost::alloc::boxed::Box>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Qux { } prost-build-0.11.9/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs000064400000000000000000000021251046102023000301350ustar 00000000000000#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Container { #[prost(oneof = "container::Data", tags = "1, 2")] pub data: ::core::option::Option, } /// Nested message and enum types in `Container`. pub mod container { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Data { #[prost(message, tag = "1")] Foo(::prost::alloc::boxed::Box), #[prost(message, tag = "2")] Bar(super::Bar), } } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Foo { #[prost(string, tag = "1")] pub foo: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Bar { #[prost(message, optional, boxed, tag = "1")] pub qux: ::core::option::Option<::prost::alloc::boxed::Box>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Qux {} prost-build-0.11.9/src/fixtures/field_attributes/field_attributes.proto000064400000000000000000000003161046102023000245670ustar 00000000000000syntax = "proto3"; package field_attributes; message Container { oneof data { Foo foo = 1; Bar bar = 2; } } message Foo { string foo = 1; } message Bar { Qux qux = 1; } message Qux { } prost-build-0.11.9/src/fixtures/helloworld/_expected_helloworld.rs000064400000000000000000000027701046102023000235420ustar 00000000000000#[derive(derive_builder::Builder)] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Message { #[prost(string, tag = "1")] pub say: ::prost::alloc::string::String, } #[derive(derive_builder::Builder)] #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Response { #[prost(string, tag = "1")] pub say: ::prost::alloc::string::String, } #[some_enum_attr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum ServingStatus { Unknown = 0, Serving = 1, NotServing = 2, } impl ServingStatus { /// String value of the enum field names used in the ProtoBuf definition. /// /// The values are not transformed in any way and thus are considered stable /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { ServingStatus::Unknown => "UNKNOWN", ServingStatus::Serving => "SERVING", ServingStatus::NotServing => "NOT_SERVING", } } /// Creates an enum from field names used in the ProtoBuf definition. pub fn from_str_name(value: &str) -> ::core::option::Option { match value { "UNKNOWN" => Some(Self::Unknown), "SERVING" => Some(Self::Serving), "NOT_SERVING" => Some(Self::NotServing), _ => None, } } } prost-build-0.11.9/src/fixtures/helloworld/goodbye.proto000064400000000000000000000002031046102023000215030ustar 00000000000000syntax = "proto3"; import "types.proto"; package helloworld; service Farewell { rpc Goodbye (Message) returns (Response) {} } prost-build-0.11.9/src/fixtures/helloworld/hello.proto000064400000000000000000000002011046102023000211540ustar 00000000000000syntax = "proto3"; import "types.proto"; package helloworld; service Greeting { rpc Hello (Message) returns (Response) {} } prost-build-0.11.9/src/fixtures/helloworld/types.proto000064400000000000000000000003001046102023000212150ustar 00000000000000syntax = "proto3"; package helloworld; message Message { string say = 1; } message Response { string say = 1; } enum ServingStatus { UNKNOWN = 0; SERVING = 1; NOT_SERVING = 2; } prost-build-0.11.9/src/fixtures/imports_empty/_expected_include.rs000064400000000000000000000002561046102023000235470ustar 00000000000000pub mod com { pub mod prost_test { pub mod test { pub mod v1 { include!("com.prost_test.test.v1.rs"); } } } } prost-build-0.11.9/src/fixtures/imports_empty/imports_empty.proto000064400000000000000000000021261046102023000235340ustar 00000000000000syntax = "proto3"; /******************************************************************************* * 1. Package */ package com.prost_test.test.v1; /******************************************************************************* * 2. Imports */ import "google/protobuf/empty.proto"; /******************************************************************************* * 3. File Options */ /******************************************************************************* * 4. service */ /* test service */ service Test { /* test method */ rpc GetTest(google.protobuf.Empty) returns (GetTestResponse); } /****************************************************************************** * 5. resource "message" definitions */ /* Test application configuration */ message TestConfig { } /****************************************************************************** * 6. request & response "message" definitions */ /* Test response */ message GetTestResponse { /* Test config */ TestConfig conf = 1; } /****************************************************************************** * 7. enum */prost-build-0.11.9/src/fixtures/smoke_test/smoke_test.proto000064400000000000000000000004111046102023000222330ustar 00000000000000syntax = "proto2"; package smoke_test; message SmokeRequest { } message SmokeResponse { } // Just a smoke test service. service SmokeService { // A detached comment block. // Blow some smoke. rpc BlowSmoke(SmokeRequest) returns (SmokeResponse); } prost-build-0.11.9/src/ident.rs000064400000000000000000000155421046102023000144270ustar 00000000000000//! Utility functions for working with identifiers. use heck::{ToSnakeCase, ToUpperCamelCase}; /// Converts a `camelCase` or `SCREAMING_SNAKE_CASE` identifier to a `lower_snake` case Rust field /// identifier. pub fn to_snake(s: &str) -> String { let mut ident = s.to_snake_case(); // Use a raw identifier if the identifier matches a Rust keyword: // https://doc.rust-lang.org/reference/keywords.html. match ident.as_str() { // 2015 strict keywords. | "as" | "break" | "const" | "continue" | "else" | "enum" | "false" | "fn" | "for" | "if" | "impl" | "in" | "let" | "loop" | "match" | "mod" | "move" | "mut" | "pub" | "ref" | "return" | "static" | "struct" | "trait" | "true" | "type" | "unsafe" | "use" | "where" | "while" // 2018 strict keywords. | "dyn" // 2015 reserved keywords. | "abstract" | "become" | "box" | "do" | "final" | "macro" | "override" | "priv" | "typeof" | "unsized" | "virtual" | "yield" // 2018 reserved keywords. | "async" | "await" | "try" => ident.insert_str(0, "r#"), // the following keywords are not supported as raw identifiers and are therefore suffixed with an underscore. "self" | "super" | "extern" | "crate" => ident += "_", _ => (), } ident } /// Converts a `snake_case` identifier to an `UpperCamel` case Rust type identifier. pub fn to_upper_camel(s: &str) -> String { let mut ident = s.to_upper_camel_case(); // Suffix an underscore for the `Self` Rust keyword as it is not allowed as raw identifier. if ident == "Self" { ident += "_"; } ident } #[cfg(test)] mod tests { #![allow(clippy::cognitive_complexity)] use super::*; #[test] fn test_to_snake() { assert_eq!("foo_bar", &to_snake("FooBar")); assert_eq!("foo_bar_baz", &to_snake("FooBarBAZ")); assert_eq!("foo_bar_baz", &to_snake("FooBarBAZ")); assert_eq!("xml_http_request", &to_snake("XMLHttpRequest")); assert_eq!("r#while", &to_snake("While")); assert_eq!("fuzz_buster", &to_snake("FUZZ_BUSTER")); assert_eq!("foo_bar_baz", &to_snake("foo_bar_baz")); assert_eq!("fuzz_buster", &to_snake("FUZZ_buster")); assert_eq!("fuzz", &to_snake("_FUZZ")); assert_eq!("fuzz", &to_snake("_fuzz")); assert_eq!("fuzz", &to_snake("_Fuzz")); assert_eq!("fuzz", &to_snake("FUZZ_")); assert_eq!("fuzz", &to_snake("fuzz_")); assert_eq!("fuzz", &to_snake("Fuzz_")); assert_eq!("fuz_z", &to_snake("FuzZ_")); // From test_messages_proto3.proto. assert_eq!("fieldname1", &to_snake("fieldname1")); assert_eq!("field_name2", &to_snake("field_name2")); assert_eq!("field_name3", &to_snake("_field_name3")); assert_eq!("field_name4", &to_snake("field__name4_")); assert_eq!("field0name5", &to_snake("field0name5")); assert_eq!("field_0_name6", &to_snake("field_0_name6")); assert_eq!("field_name7", &to_snake("fieldName7")); assert_eq!("field_name8", &to_snake("FieldName8")); assert_eq!("field_name9", &to_snake("field_Name9")); assert_eq!("field_name10", &to_snake("Field_Name10")); assert_eq!("field_name11", &to_snake("FIELD_NAME11")); assert_eq!("field_name12", &to_snake("FIELD_name12")); assert_eq!("field_name13", &to_snake("__field_name13")); assert_eq!("field_name14", &to_snake("__Field_name14")); assert_eq!("field_name15", &to_snake("field__name15")); assert_eq!("field_name16", &to_snake("field__Name16")); assert_eq!("field_name17", &to_snake("field_name17__")); assert_eq!("field_name18", &to_snake("Field_name18__")); } #[test] fn test_to_snake_raw_keyword() { assert_eq!("r#as", &to_snake("as")); assert_eq!("r#break", &to_snake("break")); assert_eq!("r#const", &to_snake("const")); assert_eq!("r#continue", &to_snake("continue")); assert_eq!("r#else", &to_snake("else")); assert_eq!("r#enum", &to_snake("enum")); assert_eq!("r#false", &to_snake("false")); assert_eq!("r#fn", &to_snake("fn")); assert_eq!("r#for", &to_snake("for")); assert_eq!("r#if", &to_snake("if")); assert_eq!("r#impl", &to_snake("impl")); assert_eq!("r#in", &to_snake("in")); assert_eq!("r#let", &to_snake("let")); assert_eq!("r#loop", &to_snake("loop")); assert_eq!("r#match", &to_snake("match")); assert_eq!("r#mod", &to_snake("mod")); assert_eq!("r#move", &to_snake("move")); assert_eq!("r#mut", &to_snake("mut")); assert_eq!("r#pub", &to_snake("pub")); assert_eq!("r#ref", &to_snake("ref")); assert_eq!("r#return", &to_snake("return")); assert_eq!("r#static", &to_snake("static")); assert_eq!("r#struct", &to_snake("struct")); assert_eq!("r#trait", &to_snake("trait")); assert_eq!("r#true", &to_snake("true")); assert_eq!("r#type", &to_snake("type")); assert_eq!("r#unsafe", &to_snake("unsafe")); assert_eq!("r#use", &to_snake("use")); assert_eq!("r#where", &to_snake("where")); assert_eq!("r#while", &to_snake("while")); assert_eq!("r#dyn", &to_snake("dyn")); assert_eq!("r#abstract", &to_snake("abstract")); assert_eq!("r#become", &to_snake("become")); assert_eq!("r#box", &to_snake("box")); assert_eq!("r#do", &to_snake("do")); assert_eq!("r#final", &to_snake("final")); assert_eq!("r#macro", &to_snake("macro")); assert_eq!("r#override", &to_snake("override")); assert_eq!("r#priv", &to_snake("priv")); assert_eq!("r#typeof", &to_snake("typeof")); assert_eq!("r#unsized", &to_snake("unsized")); assert_eq!("r#virtual", &to_snake("virtual")); assert_eq!("r#yield", &to_snake("yield")); assert_eq!("r#async", &to_snake("async")); assert_eq!("r#await", &to_snake("await")); assert_eq!("r#try", &to_snake("try")); } #[test] fn test_to_snake_non_raw_keyword() { assert_eq!("self_", &to_snake("self")); assert_eq!("super_", &to_snake("super")); assert_eq!("extern_", &to_snake("extern")); assert_eq!("crate_", &to_snake("crate")); } #[test] fn test_to_upper_camel() { assert_eq!("", &to_upper_camel("")); assert_eq!("F", &to_upper_camel("F")); assert_eq!("Foo", &to_upper_camel("FOO")); assert_eq!("FooBar", &to_upper_camel("FOO_BAR")); assert_eq!("FooBar", &to_upper_camel("_FOO_BAR")); assert_eq!("FooBar", &to_upper_camel("FOO_BAR_")); assert_eq!("FooBar", &to_upper_camel("_FOO_BAR_")); assert_eq!("FuzzBuster", &to_upper_camel("fuzzBuster")); assert_eq!("FuzzBuster", &to_upper_camel("FuzzBuster")); assert_eq!("Self_", &to_upper_camel("self")); } } prost-build-0.11.9/src/lib.rs000064400000000000000000001740261046102023000140750ustar 00000000000000#![doc(html_root_url = "https://docs.rs/prost-build/0.11.9")] #![allow(clippy::option_as_ref_deref, clippy::format_push_string)] //! `prost-build` compiles `.proto` files into Rust. //! //! `prost-build` is designed to be used for build-time code generation as part of a Cargo //! build-script. //! //! ## Example //! //! Let's create a small crate, `snazzy`, that defines a collection of //! snazzy new items in a protobuf file. //! //! ```bash //! $ cargo new snazzy && cd snazzy //! ``` //! //! First, add `prost-build`, `prost` and its public dependencies to `Cargo.toml` //! (see [crates.io](https://crates.io/crates/prost) for the current versions): //! //! ```toml //! [dependencies] //! bytes = //! prost = //! //! [build-dependencies] //! prost-build = { version = } //! ``` //! //! Next, add `src/items.proto` to the project: //! //! ```proto //! syntax = "proto3"; //! //! package snazzy.items; //! //! // A snazzy new shirt! //! message Shirt { //! enum Size { //! SMALL = 0; //! MEDIUM = 1; //! LARGE = 2; //! } //! //! string color = 1; //! Size size = 2; //! } //! ``` //! //! To generate Rust code from `items.proto`, we use `prost-build` in the crate's //! `build.rs` build-script: //! //! ```rust,no_run //! use std::io::Result; //! fn main() -> Result<()> { //! prost_build::compile_protos(&["src/items.proto"], &["src/"])?; //! Ok(()) //! } //! ``` //! //! And finally, in `lib.rs`, include the generated code: //! //! ```rust,ignore //! // Include the `items` module, which is generated from items.proto. //! // It is important to maintain the same structure as in the proto. //! pub mod snazzy { //! pub mod items { //! include!(concat!(env!("OUT_DIR"), "/snazzy.items.rs")); //! } //! } //! //! use snazzy::items; //! //! pub fn create_large_shirt(color: String) -> items::Shirt { //! let mut shirt = items::Shirt::default(); //! shirt.color = color; //! shirt.set_size(items::shirt::Size::Large); //! shirt //! } //! ``` //! //! That's it! Run `cargo doc` to see documentation for the generated code. The full //! example project can be found on [GitHub](https://github.com/danburkert/snazzy). //! //! ### Cleaning up Markdown in code docs //! //! If you are using protobuf files from third parties, where the author of the protobuf //! is not treating comments as Markdown, or is, but has codeblocks in their docs, //! then you may need to clean up the documentation in order that `cargo test --doc` //! will not fail spuriously, and that `cargo doc` doesn't attempt to render the //! codeblocks as Rust code. //! //! To do this, in your `Cargo.toml`, add `features = ["cleanup-markdown"]` to the inclusion //! of the `prost-build` crate and when your code is generated, the code docs will automatically //! be cleaned up a bit. //! //! ## Sourcing `protoc` //! //! `prost-build` depends on the Protocol Buffers compiler, `protoc`, to parse `.proto` files into //! a representation that can be transformed into Rust. If set, `prost-build` uses the `PROTOC` //! for locating `protoc`. For example, on a macOS system where Protobuf is installed //! with Homebrew, set the environment variables to: //! //! ```bash //! PROTOC=/usr/local/bin/protoc //! ``` //! //! and in a typical Linux installation: //! //! ```bash //! PROTOC=/usr/bin/protoc //! ``` //! //! If no `PROTOC` environment variable is set then `prost-build` will search the //! current path for `protoc` or `protoc.exe`. If `prost-build` can not find `protoc` //! via these methods the `compile_protos` method will fail. //! //! ### Compiling `protoc` from source //! //! To compile `protoc` from source you can use the `protobuf-src` crate and //! set the correct environment variables. //! ```no_run,ignore, rust //! std::env::set_var("PROTOC", protobuf_src::protoc()); //! //! // Now compile your proto files via prost-build //! ``` //! //! [`protobuf-src`]: https://docs.rs/protobuf-src use std::collections::HashMap; use std::default; use std::env; use std::ffi::{OsStr, OsString}; use std::fmt; use std::fs; use std::io::{Error, ErrorKind, Result, Write}; use std::ops::RangeToInclusive; use std::path::{Path, PathBuf}; use std::process::Command; use log::debug; use log::trace; use prost::Message; use prost_types::{FileDescriptorProto, FileDescriptorSet}; pub use crate::ast::{Comments, Method, Service}; use crate::code_generator::CodeGenerator; use crate::extern_paths::ExternPaths; use crate::ident::to_snake; use crate::message_graph::MessageGraph; use crate::path::PathMap; mod ast; mod code_generator; mod extern_paths; mod ident; mod message_graph; mod path; /// A service generator takes a service descriptor and generates Rust code. /// /// `ServiceGenerator` can be used to generate application-specific interfaces /// or implementations for Protobuf service definitions. /// /// Service generators are registered with a code generator using the /// `Config::service_generator` method. /// /// A viable scenario is that an RPC framework provides a service generator. It generates a trait /// describing methods of the service and some glue code to call the methods of the trait, defining /// details like how errors are handled or if it is asynchronous. Then the user provides an /// implementation of the generated trait in the application code and plugs it into the framework. /// /// Such framework isn't part of Prost at present. pub trait ServiceGenerator { /// Generates a Rust interface or implementation for a service, writing the /// result to `buf`. fn generate(&mut self, service: Service, buf: &mut String); /// Finalizes the generation process. /// /// In case there's something that needs to be output at the end of the generation process, it /// goes here. Similar to [`generate`](#method.generate), the output should be appended to /// `buf`. /// /// An example can be a module or other thing that needs to appear just once, not for each /// service generated. /// /// This still can be called multiple times in a lifetime of the service generator, because it /// is called once per `.proto` file. /// /// The default implementation is empty and does nothing. fn finalize(&mut self, _buf: &mut String) {} /// Finalizes the generation process for an entire protobuf package. /// /// This differs from [`finalize`](#method.finalize) by where (and how often) it is called /// during the service generator life cycle. This method is called once per protobuf package, /// making it ideal for grouping services within a single package spread across multiple /// `.proto` files. /// /// The default implementation is empty and does nothing. fn finalize_package(&mut self, _package: &str, _buf: &mut String) {} } /// The map collection type to output for Protobuf `map` fields. #[non_exhaustive] #[derive(Clone, Copy, Debug, PartialEq)] enum MapType { /// The [`std::collections::HashMap`] type. HashMap, /// The [`std::collections::BTreeMap`] type. BTreeMap, } impl Default for MapType { fn default() -> MapType { MapType::HashMap } } /// The bytes collection type to output for Protobuf `bytes` fields. #[non_exhaustive] #[derive(Clone, Copy, Debug, PartialEq)] enum BytesType { /// The [`alloc::collections::Vec::`] type. Vec, /// The [`bytes::Bytes`] type. Bytes, } impl Default for BytesType { fn default() -> BytesType { BytesType::Vec } } /// Configuration options for Protobuf code generation. /// /// This configuration builder can be used to set non-default code generation options. pub struct Config { file_descriptor_set_path: Option, service_generator: Option>, map_type: PathMap, bytes_type: PathMap, type_attributes: PathMap, message_attributes: PathMap, enum_attributes: PathMap, field_attributes: PathMap, boxed: PathMap<()>, prost_types: bool, strip_enum_prefix: bool, out_dir: Option, extern_paths: Vec<(String, String)>, default_package_filename: String, protoc_args: Vec, disable_comments: PathMap<()>, skip_protoc_run: bool, include_file: Option, prost_path: Option, fmt: bool, } impl Config { /// Creates a new code generator configuration with default options. pub fn new() -> Config { Config::default() } /// Configure the code generator to generate Rust [`BTreeMap`][1] fields for Protobuf /// [`map`][2] type fields. /// /// # Arguments /// /// **`paths`** - paths to specific fields, messages, or packages which should use a Rust /// `BTreeMap` for Protobuf `map` fields. Paths are specified in terms of the Protobuf type /// name (not the generated Rust type name). Paths with a leading `.` are treated as fully /// qualified names. Paths without a leading `.` are treated as relative, and are suffix /// matched on the fully qualified field name. If a Protobuf map field matches any of the /// paths, a Rust `BTreeMap` field is generated instead of the default [`HashMap`][3]. /// /// The matching is done on the Protobuf names, before converting to Rust-friendly casing /// standards. /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Match a specific field in a message type. /// config.btree_map(&[".my_messages.MyMessageType.my_map_field"]); /// /// // Match all map fields in a message type. /// config.btree_map(&[".my_messages.MyMessageType"]); /// /// // Match all map fields in a package. /// config.btree_map(&[".my_messages"]); /// /// // Match all map fields. Specially useful in `no_std` contexts. /// config.btree_map(&["."]); /// /// // Match all map fields in a nested message. /// config.btree_map(&[".my_messages.MyMessageType.MyNestedMessageType"]); /// /// // Match all fields named 'my_map_field'. /// config.btree_map(&["my_map_field"]); /// /// // Match all fields named 'my_map_field' in messages named 'MyMessageType', regardless of /// // package or nesting. /// config.btree_map(&["MyMessageType.my_map_field"]); /// /// // Match all fields named 'my_map_field', and all fields in the 'foo.bar' package. /// config.btree_map(&["my_map_field", ".foo.bar"]); /// ``` /// /// [1]: https://doc.rust-lang.org/std/collections/struct.BTreeMap.html /// [2]: https://developers.google.com/protocol-buffers/docs/proto3#maps /// [3]: https://doc.rust-lang.org/std/collections/struct.HashMap.html pub fn btree_map(&mut self, paths: I) -> &mut Self where I: IntoIterator, S: AsRef, { self.map_type.clear(); for matcher in paths { self.map_type .insert(matcher.as_ref().to_string(), MapType::BTreeMap); } self } /// Configure the code generator to generate Rust [`bytes::Bytes`][1] fields for Protobuf /// [`bytes`][2] type fields. /// /// # Arguments /// /// **`paths`** - paths to specific fields, messages, or packages which should use a Rust /// `Bytes` for Protobuf `bytes` fields. Paths are specified in terms of the Protobuf type /// name (not the generated Rust type name). Paths with a leading `.` are treated as fully /// qualified names. Paths without a leading `.` are treated as relative, and are suffix /// matched on the fully qualified field name. If a Protobuf map field matches any of the /// paths, a Rust `Bytes` field is generated instead of the default [`Vec`][3]. /// /// The matching is done on the Protobuf names, before converting to Rust-friendly casing /// standards. /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Match a specific field in a message type. /// config.bytes(&[".my_messages.MyMessageType.my_bytes_field"]); /// /// // Match all bytes fields in a message type. /// config.bytes(&[".my_messages.MyMessageType"]); /// /// // Match all bytes fields in a package. /// config.bytes(&[".my_messages"]); /// /// // Match all bytes fields. Specially useful in `no_std` contexts. /// config.bytes(&["."]); /// /// // Match all bytes fields in a nested message. /// config.bytes(&[".my_messages.MyMessageType.MyNestedMessageType"]); /// /// // Match all fields named 'my_bytes_field'. /// config.bytes(&["my_bytes_field"]); /// /// // Match all fields named 'my_bytes_field' in messages named 'MyMessageType', regardless of /// // package or nesting. /// config.bytes(&["MyMessageType.my_bytes_field"]); /// /// // Match all fields named 'my_bytes_field', and all fields in the 'foo.bar' package. /// config.bytes(&["my_bytes_field", ".foo.bar"]); /// ``` /// /// [1]: https://docs.rs/bytes/latest/bytes/struct.Bytes.html /// [2]: https://developers.google.com/protocol-buffers/docs/proto3#scalar /// [3]: https://doc.rust-lang.org/std/vec/struct.Vec.html pub fn bytes(&mut self, paths: I) -> &mut Self where I: IntoIterator, S: AsRef, { self.bytes_type.clear(); for matcher in paths { self.bytes_type .insert(matcher.as_ref().to_string(), BytesType::Bytes); } self } /// Add additional attribute to matched fields. /// /// # Arguments /// /// **`path`** - a path matching any number of fields. These fields get the attribute. /// For details about matching fields see [`btree_map`](#method.btree_map). /// /// **`attribute`** - an arbitrary string that'll be placed before each matched field. The /// expected usage are additional attributes, usually in concert with whole-type /// attributes set with [`type_attribute`](method.type_attribute), but it is not /// checked and anything can be put there. /// /// Note that the calls to this method are cumulative ‒ if multiple paths from multiple calls /// match the same field, the field gets all the corresponding attributes. /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Prost renames fields named `in` to `in_`. But if serialized through serde, /// // they should as `in`. /// config.field_attribute("in", "#[serde(rename = \"in\")]"); /// ``` pub fn field_attribute(&mut self, path: P, attribute: A) -> &mut Self where P: AsRef, A: AsRef, { self.field_attributes .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); self } /// Add additional attribute to matched messages, enums and one-ofs. /// /// # Arguments /// /// **`paths`** - a path matching any number of types. It works the same way as in /// [`btree_map`](#method.btree_map), just with the field name omitted. /// /// **`attribute`** - an arbitrary string to be placed before each matched type. The /// expected usage are additional attributes, but anything is allowed. /// /// The calls to this method are cumulative. They don't overwrite previous calls and if a /// type is matched by multiple calls of the method, all relevant attributes are added to /// it. /// /// For things like serde it might be needed to combine with [field /// attributes](#method.field_attribute). /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`. /// config.type_attribute(".", "#[derive(Eq)]"); /// // Some messages want to be serializable with serde as well. /// config.type_attribute("my_messages.MyMessageType", /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); /// config.type_attribute("my_messages.MyMessageType.MyNestedMessageType", /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); /// ``` /// /// # Oneof fields /// /// The `oneof` fields don't have a type name of their own inside Protobuf. Therefore, the /// field name can be used both with `type_attribute` and `field_attribute` ‒ the first is /// placed before the `enum` type definition, the other before the field inside corresponding /// message `struct`. /// /// In other words, to place an attribute on the `enum` implementing the `oneof`, the match /// would look like `my_messages.MyMessageType.oneofname`. pub fn type_attribute(&mut self, path: P, attribute: A) -> &mut Self where P: AsRef, A: AsRef, { self.type_attributes .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); self } /// Add additional attribute to matched messages. /// /// # Arguments /// /// **`paths`** - a path matching any number of types. It works the same way as in /// [`btree_map`](#method.btree_map), just with the field name omitted. /// /// **`attribute`** - an arbitrary string to be placed before each matched type. The /// expected usage are additional attributes, but anything is allowed. /// /// The calls to this method are cumulative. They don't overwrite previous calls and if a /// type is matched by multiple calls of the method, all relevant attributes are added to /// it. /// /// For things like serde it might be needed to combine with [field /// attributes](#method.field_attribute). /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`. /// config.message_attribute(".", "#[derive(Eq)]"); /// // Some messages want to be serializable with serde as well. /// config.message_attribute("my_messages.MyMessageType", /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); /// config.message_attribute("my_messages.MyMessageType.MyNestedMessageType", /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); /// ``` pub fn message_attribute(&mut self, path: P, attribute: A) -> &mut Self where P: AsRef, A: AsRef, { self.message_attributes .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); self } /// Add additional attribute to matched enums and one-ofs. /// /// # Arguments /// /// **`paths`** - a path matching any number of types. It works the same way as in /// [`btree_map`](#method.btree_map), just with the field name omitted. /// /// **`attribute`** - an arbitrary string to be placed before each matched type. The /// expected usage are additional attributes, but anything is allowed. /// /// The calls to this method are cumulative. They don't overwrite previous calls and if a /// type is matched by multiple calls of the method, all relevant attributes are added to /// it. /// /// For things like serde it might be needed to combine with [field /// attributes](#method.field_attribute). /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`. /// config.enum_attribute(".", "#[derive(Eq)]"); /// // Some messages want to be serializable with serde as well. /// config.enum_attribute("my_messages.MyEnumType", /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); /// config.enum_attribute("my_messages.MyMessageType.MyNestedEnumType", /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); /// ``` /// /// # Oneof fields /// /// The `oneof` fields don't have a type name of their own inside Protobuf. Therefore, the /// field name can be used both with `enum_attribute` and `field_attribute` ‒ the first is /// placed before the `enum` type definition, the other before the field inside corresponding /// message `struct`. /// /// In other words, to place an attribute on the `enum` implementing the `oneof`, the match /// would look like `my_messages.MyNestedMessageType.oneofname`. pub fn enum_attribute(&mut self, path: P, attribute: A) -> &mut Self where P: AsRef, A: AsRef, { self.enum_attributes .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); self } /// Wrap matched fields in a `Box`. /// /// # Arguments /// /// **`path`** - a path matching any number of fields. These fields get the attribute. /// For details about matching fields see [`btree_map`](#method.btree_map). /// /// # Examples /// /// ```rust /// # let mut config = prost_build::Config::new(); /// config.boxed(".my_messages.MyMessageType.my_field"); /// ``` pub fn boxed

(&mut self, path: P) -> &mut Self where P: AsRef, { self.boxed.insert(path.as_ref().to_string(), ()); self } /// Configures the code generator to use the provided service generator. pub fn service_generator(&mut self, service_generator: Box) -> &mut Self { self.service_generator = Some(service_generator); self } /// Configures the code generator to not use the `prost_types` crate for Protobuf well-known /// types, and instead generate Protobuf well-known types from their `.proto` definitions. pub fn compile_well_known_types(&mut self) -> &mut Self { self.prost_types = false; self } /// Configures the code generator to omit documentation comments on generated Protobuf types. /// /// # Example /// /// Occasionally `.proto` files contain code blocks which are not valid Rust. To avoid doctest /// failures, annotate the invalid code blocks with an [`ignore` or `no_run` attribute][1], or /// disable doctests for the crate with a [Cargo.toml entry][2]. If neither of these options /// are possible, then omit comments on generated code during doctest builds: /// /// ```rust,no_run /// # fn main() -> std::io::Result<()> { /// let mut config = prost_build::Config::new(); /// config.disable_comments(&["."]); /// config.compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?; /// # Ok(()) /// # } /// ``` /// /// As with other options which take a set of paths, comments can be disabled on a per-package /// or per-symbol basis. /// /// [1]: https://doc.rust-lang.org/rustdoc/documentation-tests.html#attributes /// [2]: https://doc.rust-lang.org/cargo/reference/cargo-targets.html#configuring-a-target pub fn disable_comments(&mut self, paths: I) -> &mut Self where I: IntoIterator, S: AsRef, { self.disable_comments.clear(); for matcher in paths { self.disable_comments .insert(matcher.as_ref().to_string(), ()); } self } /// Declare an externally provided Protobuf package or type. /// /// `extern_path` allows `prost` types in external crates to be referenced in generated code. /// /// When `prost` compiles a `.proto` which includes an import of another `.proto`, it will /// automatically recursively compile the imported file as well. `extern_path` can be used /// to instead substitute types from an external crate. /// /// # Example /// /// As an example, consider a crate, `uuid`, with a `prost`-generated `Uuid` type: /// /// ```proto /// // uuid.proto /// /// syntax = "proto3"; /// package uuid; /// /// message Uuid { /// string uuid_str = 1; /// } /// ``` /// /// The `uuid` crate implements some traits for `Uuid`, and publicly exports it: /// /// ```rust,ignore /// // lib.rs in the uuid crate /// /// include!(concat!(env!("OUT_DIR"), "/uuid.rs")); /// /// pub trait DoSomething { /// fn do_it(&self); /// } /// /// impl DoSomething for Uuid { /// fn do_it(&self) { /// println!("Done"); /// } /// } /// ``` /// /// A separate crate, `my_application`, uses `prost` to generate message types which reference /// `Uuid`: /// /// ```proto /// // my_application.proto /// /// syntax = "proto3"; /// package my_application; /// /// import "uuid.proto"; /// /// message MyMessage { /// uuid.Uuid message_id = 1; /// string some_payload = 2; /// } /// ``` /// /// Additionally, `my_application` depends on the trait impls provided by the `uuid` crate: /// /// ```rust,ignore /// // `main.rs` of `my_application` /// /// use uuid::{DoSomething, Uuid}; /// /// include!(concat!(env!("OUT_DIR"), "/my_application.rs")); /// /// pub fn process_message(msg: MyMessage) { /// if let Some(uuid) = msg.message_id { /// uuid.do_it(); /// } /// } /// ``` /// /// Without configuring `uuid` as an external path in `my_application`'s `build.rs`, `prost` /// would compile a completely separate version of the `Uuid` type, and `process_message` would /// fail to compile. However, if `my_application` configures `uuid` as an extern path with a /// call to `.extern_path(".uuid", "::uuid")`, `prost` will use the external type instead of /// compiling a new version of `Uuid`. Note that the configuration could also be specified as /// `.extern_path(".uuid.Uuid", "::uuid::Uuid")` if only the `Uuid` type were externally /// provided, and not the whole `uuid` package. /// /// # Usage /// /// `extern_path` takes a fully-qualified Protobuf path, and the corresponding Rust path that /// it will be substituted with in generated code. The Protobuf path can refer to a package or /// a type, and the Rust path should correspondingly refer to a Rust module or type. /// /// ```rust /// # let mut config = prost_build::Config::new(); /// // Declare the `uuid` Protobuf package and all nested packages and types as externally /// // provided by the `uuid` crate. /// config.extern_path(".uuid", "::uuid"); /// /// // Declare the `foo.bar.baz` Protobuf package and all nested packages and types as /// // externally provided by the `foo_bar_baz` crate. /// config.extern_path(".foo.bar.baz", "::foo_bar_baz"); /// /// // Declare the `uuid.Uuid` Protobuf type (and all nested types) as externally provided /// // by the `uuid` crate's `Uuid` type. /// config.extern_path(".uuid.Uuid", "::uuid::Uuid"); /// ``` pub fn extern_path(&mut self, proto_path: P1, rust_path: P2) -> &mut Self where P1: Into, P2: Into, { self.extern_paths .push((proto_path.into(), rust_path.into())); self } /// When set, the `FileDescriptorSet` generated by `protoc` is written to the provided /// filesystem path. /// /// This option can be used in conjunction with the [`include_bytes!`] macro and the types in /// the `prost-types` crate for implementing reflection capabilities, among other things. /// /// ## Example /// /// In `build.rs`: /// /// ```rust, no_run /// # use std::env; /// # use std::path::PathBuf; /// # let mut config = prost_build::Config::new(); /// config.file_descriptor_set_path( /// PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set")) /// .join("file_descriptor_set.bin")); /// ``` /// /// In `lib.rs`: /// /// ```rust,ignore /// let file_descriptor_set_bytes = include_bytes!(concat!(env!("OUT_DIR"), "/file_descriptor_set.bin")); /// let file_descriptor_set = prost_types::FileDescriptorSet::decode(&file_descriptor_set_bytes[..]).unwrap(); /// ``` pub fn file_descriptor_set_path

(&mut self, path: P) -> &mut Self where P: Into, { self.file_descriptor_set_path = Some(path.into()); self } /// In combination with with `file_descriptor_set_path`, this can be used to provide a file /// descriptor set as an input file, rather than having prost-build generate the file by calling /// protoc. /// /// In `build.rs`: /// /// ```rust /// # let mut config = prost_build::Config::new(); /// config.file_descriptor_set_path("path/from/build/system") /// .skip_protoc_run() /// .compile_protos(&["src/items.proto"], &["src/"]); /// ``` /// pub fn skip_protoc_run(&mut self) -> &mut Self { self.skip_protoc_run = true; self } /// Configures the code generator to not strip the enum name from variant names. /// /// Protobuf enum definitions commonly include the enum name as a prefix of every variant name. /// This style is non-idiomatic in Rust, so by default `prost` strips the enum name prefix from /// variants which include it. Configuring this option prevents `prost` from stripping the /// prefix. pub fn retain_enum_prefix(&mut self) -> &mut Self { self.strip_enum_prefix = false; self } /// Configures the output directory where generated Rust files will be written. /// /// If unset, defaults to the `OUT_DIR` environment variable. `OUT_DIR` is set by Cargo when /// executing build scripts, so `out_dir` typically does not need to be configured. pub fn out_dir

(&mut self, path: P) -> &mut Self where P: Into, { self.out_dir = Some(path.into()); self } /// Configures what filename protobufs with no package definition are written to. /// The filename will be appended with the `.rs` extension. pub fn default_package_filename(&mut self, filename: S) -> &mut Self where S: Into, { self.default_package_filename = filename.into(); self } /// Configures the path that's used for deriving `Message` for generated messages. /// This is mainly useful for generating crates that wish to re-export prost. /// Defaults to `::prost::Message` if not specified. pub fn prost_path(&mut self, path: S) -> &mut Self where S: Into, { self.prost_path = Some(path.into()); self } /// Add an argument to the `protoc` protobuf compilation invocation. /// /// # Example `build.rs` /// /// ```rust,no_run /// # use std::io::Result; /// fn main() -> Result<()> { /// let mut prost_build = prost_build::Config::new(); /// // Enable a protoc experimental feature. /// prost_build.protoc_arg("--experimental_allow_proto3_optional"); /// prost_build.compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?; /// Ok(()) /// } /// ``` pub fn protoc_arg(&mut self, arg: S) -> &mut Self where S: AsRef, { self.protoc_args.push(arg.as_ref().to_owned()); self } /// Configures the optional module filename for easy inclusion of all generated Rust files /// /// If set, generates a file (inside the `OUT_DIR` or `out_dir()` as appropriate) which contains /// a set of `pub mod XXX` statements combining to load all Rust files generated. This can allow /// for a shortcut where multiple related proto files have been compiled together resulting in /// a semi-complex set of includes. /// /// Turning a need for: /// /// ```rust,no_run,ignore /// pub mod Foo { /// pub mod Bar { /// include!(concat!(env!("OUT_DIR"), "/foo.bar.rs")); /// } /// pub mod Baz { /// include!(concat!(env!("OUT_DIR"), "/foo.baz.rs")); /// } /// } /// ``` /// /// Into the simpler: /// /// ```rust,no_run,ignore /// include!(concat!(env!("OUT_DIR"), "/_includes.rs")); /// ``` pub fn include_file

(&mut self, path: P) -> &mut Self where P: Into, { self.include_file = Some(path.into()); self } /// Configures the code generator to format the output code via `prettyplease`. /// /// By default, this is enabled but if the `format` feature is not enabled this does /// nothing. pub fn format(&mut self, enabled: bool) -> &mut Self { self.fmt = enabled; self } /// Compile a [`FileDescriptorSet`] into Rust files during a Cargo build with /// additional code generator configuration options. /// /// This method is like `compile_protos` function except it does not invoke `protoc` /// and instead requires the user to supply a [`FileDescriptorSet`]. /// /// # Example `build.rs` /// /// ```rust,no_run /// # fn fds() -> FileDescriptorSet { todo!() } /// fn main() -> std::io::Result<()> { /// let file_descriptor_set = fds(); /// /// prost_build::Config::new() /// .compile_fds(file_descriptor_set)?; /// } /// ``` pub fn compile_fds(&mut self, fds: FileDescriptorSet) -> Result<()> { let mut target_is_env = false; let target: PathBuf = self.out_dir.clone().map(Ok).unwrap_or_else(|| { env::var_os("OUT_DIR") .ok_or_else(|| { Error::new(ErrorKind::Other, "OUT_DIR environment variable is not set") }) .map(|val| { target_is_env = true; Into::into(val) }) })?; let requests = fds .file .into_iter() .map(|descriptor| { ( Module::from_protobuf_package_name(descriptor.package()), descriptor, ) }) .collect::>(); let file_names = requests .iter() .map(|req| { ( req.0.clone(), req.0.to_file_name_or(&self.default_package_filename), ) }) .collect::>(); let modules = self.generate(requests)?; for (module, content) in &modules { let file_name = file_names .get(module) .expect("every module should have a filename"); let output_path = target.join(file_name); let previous_content = fs::read(&output_path); if previous_content .map(|previous_content| previous_content == content.as_bytes()) .unwrap_or(false) { trace!("unchanged: {:?}", file_name); } else { trace!("writing: {:?}", file_name); fs::write(output_path, content)?; } } if let Some(ref include_file) = self.include_file { trace!("Writing include file: {:?}", target.join(include_file)); let mut file = fs::File::create(target.join(include_file))?; self.write_includes( modules.keys().collect(), &mut file, 0, if target_is_env { None } else { Some(&target) }, )?; file.flush()?; } Ok(()) } /// Compile `.proto` files into Rust files during a Cargo build with additional code generator /// configuration options. /// /// This method is like the `prost_build::compile_protos` function, with the added ability to /// specify non-default code generation options. See that function for more information about /// the arguments and generated outputs. /// /// The `protos` and `includes` arguments are ignored if `skip_protoc_run` is specified. /// /// # Example `build.rs` /// /// ```rust,no_run /// # use std::io::Result; /// fn main() -> Result<()> { /// let mut prost_build = prost_build::Config::new(); /// prost_build.btree_map(&["."]); /// prost_build.compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?; /// Ok(()) /// } /// ``` pub fn compile_protos( &mut self, protos: &[impl AsRef], includes: &[impl AsRef], ) -> Result<()> { // TODO: This should probably emit 'rerun-if-changed=PATH' directives for cargo, however // according to [1] if any are output then those paths replace the default crate root, // which is undesirable. Figure out how to do it in an additive way; perhaps gcc-rs has // this figured out. // [1]: http://doc.crates.io/build-script.html#outputs-of-the-build-script let tmp; let file_descriptor_set_path = if let Some(path) = &self.file_descriptor_set_path { path.clone() } else { if self.skip_protoc_run { return Err(Error::new( ErrorKind::Other, "file_descriptor_set_path is required with skip_protoc_run", )); } tmp = tempfile::Builder::new().prefix("prost-build").tempdir()?; tmp.path().join("prost-descriptor-set") }; if !self.skip_protoc_run { let protoc = protoc_from_env(); let mut cmd = Command::new(protoc.clone()); cmd.arg("--include_imports") .arg("--include_source_info") .arg("-o") .arg(&file_descriptor_set_path); for include in includes { if include.as_ref().exists() { cmd.arg("-I").arg(include.as_ref()); } else { debug!( "ignoring {} since it does not exist.", include.as_ref().display() ) } } // Set the protoc include after the user includes in case the user wants to // override one of the built-in .protos. if let Some(protoc_include) = protoc_include_from_env() { cmd.arg("-I").arg(protoc_include); } for arg in &self.protoc_args { cmd.arg(arg); } for proto in protos { cmd.arg(proto.as_ref()); } debug!("Running: {:?}", cmd); let output = cmd.output().map_err(|error| { Error::new( error.kind(), format!("failed to invoke protoc (hint: https://docs.rs/prost-build/#sourcing-protoc): (path: {:?}): {}", &protoc, error), ) })?; if !output.status.success() { return Err(Error::new( ErrorKind::Other, format!("protoc failed: {}", String::from_utf8_lossy(&output.stderr)), )); } } let buf = fs::read(&file_descriptor_set_path).map_err(|e| { Error::new( e.kind(), format!( "unable to open file_descriptor_set_path: {:?}, OS: {}", &file_descriptor_set_path, e ), ) })?; let file_descriptor_set = FileDescriptorSet::decode(&*buf).map_err(|error| { Error::new( ErrorKind::InvalidInput, format!("invalid FileDescriptorSet: {}", error), ) })?; self.compile_fds(file_descriptor_set) } fn write_includes( &self, mut entries: Vec<&Module>, outfile: &mut fs::File, depth: usize, basepath: Option<&PathBuf>, ) -> Result { let mut written = 0; entries.sort(); while !entries.is_empty() { let modident = entries[0].part(depth); let matching: Vec<&Module> = entries .iter() .filter(|&v| v.part(depth) == modident) .copied() .collect(); { // Will NLL sort this mess out? let _temp = entries .drain(..) .filter(|&v| v.part(depth) != modident) .collect(); entries = _temp; } self.write_line(outfile, depth, &format!("pub mod {} {{", modident))?; let subwritten = self.write_includes( matching .iter() .filter(|v| v.len() > depth + 1) .copied() .collect(), outfile, depth + 1, basepath, )?; written += subwritten; if subwritten != matching.len() { let modname = matching[0].to_partial_file_name(..=depth); if basepath.is_some() { self.write_line( outfile, depth + 1, &format!("include!(\"{}.rs\");", modname), )?; } else { self.write_line( outfile, depth + 1, &format!("include!(concat!(env!(\"OUT_DIR\"), \"/{}.rs\"));", modname), )?; } written += 1; } self.write_line(outfile, depth, "}")?; } Ok(written) } fn write_line(&self, outfile: &mut fs::File, depth: usize, line: &str) -> Result<()> { outfile.write_all(format!("{}{}\n", (" ").to_owned().repeat(depth), line).as_bytes()) } /// Processes a set of modules and file descriptors, returning a map of modules to generated /// code contents. /// /// This is generally used when control over the output should not be managed by Prost, /// such as in a flow for a `protoc` code generating plugin. When compiling as part of a /// `build.rs` file, instead use [`compile_protos()`]. pub fn generate( &mut self, requests: Vec<(Module, FileDescriptorProto)>, ) -> Result> { let mut modules = HashMap::new(); let mut packages = HashMap::new(); let message_graph = MessageGraph::new(requests.iter().map(|x| &x.1)) .map_err(|error| Error::new(ErrorKind::InvalidInput, error))?; let extern_paths = ExternPaths::new(&self.extern_paths, self.prost_types) .map_err(|error| Error::new(ErrorKind::InvalidInput, error))?; for (request_module, request_fd) in requests { // Only record packages that have services if !request_fd.service.is_empty() { packages.insert(request_module.clone(), request_fd.package().to_string()); } let buf = modules .entry(request_module.clone()) .or_insert_with(String::new); CodeGenerator::generate(self, &message_graph, &extern_paths, request_fd, buf); if buf.is_empty() { // Did not generate any code, remove from list to avoid inclusion in include file or output file list modules.remove(&request_module); } } if let Some(ref mut service_generator) = self.service_generator { for (module, package) in packages { let buf = modules.get_mut(&module).unwrap(); service_generator.finalize_package(&package, buf); } } if self.fmt { self.fmt_modules(&mut modules); } Ok(modules) } #[cfg(feature = "format")] fn fmt_modules(&mut self, modules: &mut HashMap) { for buf in modules.values_mut() { let file = syn::parse_file(buf).unwrap(); let formatted = prettyplease::unparse(&file); *buf = formatted; } } #[cfg(not(feature = "format"))] fn fmt_modules(&mut self, _: &mut HashMap) {} } impl default::Default for Config { fn default() -> Config { Config { file_descriptor_set_path: None, service_generator: None, map_type: PathMap::default(), bytes_type: PathMap::default(), type_attributes: PathMap::default(), message_attributes: PathMap::default(), enum_attributes: PathMap::default(), field_attributes: PathMap::default(), boxed: PathMap::default(), prost_types: true, strip_enum_prefix: true, out_dir: None, extern_paths: Vec::new(), default_package_filename: "_".to_string(), protoc_args: Vec::new(), disable_comments: PathMap::default(), skip_protoc_run: false, include_file: None, prost_path: None, fmt: true, } } } impl fmt::Debug for Config { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("Config") .field("file_descriptor_set_path", &self.file_descriptor_set_path) .field("service_generator", &self.service_generator.is_some()) .field("map_type", &self.map_type) .field("bytes_type", &self.bytes_type) .field("type_attributes", &self.type_attributes) .field("field_attributes", &self.field_attributes) .field("prost_types", &self.prost_types) .field("strip_enum_prefix", &self.strip_enum_prefix) .field("out_dir", &self.out_dir) .field("extern_paths", &self.extern_paths) .field("default_package_filename", &self.default_package_filename) .field("protoc_args", &self.protoc_args) .field("disable_comments", &self.disable_comments) .field("prost_path", &self.prost_path) .finish() } } /// A Rust module path for a Protobuf package. #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Module { components: Vec, } impl Module { /// Construct a module path from an iterator of parts. pub fn from_parts(parts: I) -> Self where I: IntoIterator, I::Item: Into, { Self { components: parts.into_iter().map(|s| s.into()).collect(), } } /// Construct a module path from a Protobuf package name. /// /// Constituent parts are automatically converted to snake case in order to follow /// Rust module naming conventions. pub fn from_protobuf_package_name(name: &str) -> Self { Self { components: name .split('.') .filter(|s| !s.is_empty()) .map(to_snake) .collect(), } } /// An iterator over the parts of the path. pub fn parts(&self) -> impl Iterator { self.components.iter().map(|s| s.as_str()) } /// Format the module path into a filename for generated Rust code. /// /// If the module path is empty, `default` is used to provide the root of the filename. pub fn to_file_name_or(&self, default: &str) -> String { let mut root = if self.components.is_empty() { default.to_owned() } else { self.components.join(".") }; root.push_str(".rs"); root } /// The number of parts in the module's path. pub fn len(&self) -> usize { self.components.len() } /// Whether the module's path contains any components. pub fn is_empty(&self) -> bool { self.components.is_empty() } fn to_partial_file_name(&self, range: RangeToInclusive) -> String { self.components[range].join(".") } fn part(&self, idx: usize) -> &str { self.components[idx].as_str() } } impl fmt::Display for Module { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut parts = self.parts(); if let Some(first) = parts.next() { f.write_str(first)?; } for part in parts { f.write_str("::")?; f.write_str(part)?; } Ok(()) } } /// Compile `.proto` files into Rust files during a Cargo build. /// /// The generated `.rs` files are written to the Cargo `OUT_DIR` directory, suitable for use with /// the [include!][1] macro. See the [Cargo `build.rs` code generation][2] example for more info. /// /// This function should be called in a project's `build.rs`. /// /// # Arguments /// /// **`protos`** - Paths to `.proto` files to compile. Any transitively [imported][3] `.proto` /// files are automatically be included. /// /// **`includes`** - Paths to directories in which to search for imports. Directories are searched /// in order. The `.proto` files passed in **`protos`** must be found in one of the provided /// include directories. /// /// # Errors /// /// This function can fail for a number of reasons: /// /// - Failure to locate or download `protoc`. /// - Failure to parse the `.proto`s. /// - Failure to locate an imported `.proto`. /// - Failure to compile a `.proto` without a [package specifier][4]. /// /// It's expected that this function call be `unwrap`ed in a `build.rs`; there is typically no /// reason to gracefully recover from errors during a build. /// /// # Example `build.rs` /// /// ```rust,no_run /// # use std::io::Result; /// fn main() -> Result<()> { /// prost_build::compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?; /// Ok(()) /// } /// ``` /// /// [1]: https://doc.rust-lang.org/std/macro.include.html /// [2]: http://doc.crates.io/build-script.html#case-study-code-generation /// [3]: https://developers.google.com/protocol-buffers/docs/proto3#importing-definitions /// [4]: https://developers.google.com/protocol-buffers/docs/proto#packages pub fn compile_protos(protos: &[impl AsRef], includes: &[impl AsRef]) -> Result<()> { Config::new().compile_protos(protos, includes) } /// Compile a [`FileDescriptorSet`] into Rust files during a Cargo build. /// /// The generated `.rs` files are written to the Cargo `OUT_DIR` directory, suitable for use with /// the [include!][1] macro. See the [Cargo `build.rs` code generation][2] example for more info. /// /// This function should be called in a project's `build.rs`. /// /// This function can be combined with a crate like [`protox`] which outputs a /// [`FileDescriptorSet`] and is a pure Rust implementation of `protoc`. /// /// [`protox`]: https://github.com/andrewhickman/protox /// /// # Example /// ```rust,no_run /// # fn fds() -> FileDescriptorSet { todo!() } /// fn main() -> std::io::Result<()> { /// let file_descriptor_set = fds(); /// /// prost_build::compile_fds(file_descriptor_set)?; /// } /// ``` pub fn compile_fds(fds: FileDescriptorSet) -> Result<()> { Config::new().compile_fds(fds) } /// Returns the path to the `protoc` binary. pub fn protoc_from_env() -> PathBuf { let os_specific_hint = if cfg!(target_os = "macos") { "You could try running `brew install protobuf` or downloading it from https://github.com/protocolbuffers/protobuf/releases" } else if cfg!(target_os = "linux") { "If you're on debian, try `apt-get install protobuf-compiler` or download it from https://github.com/protocolbuffers/protobuf/releases" } else { "You can download it from https://github.com/protocolbuffers/protobuf/releases or from your package manager." }; let error_msg = "Could not find `protoc` installation and this build crate cannot proceed without this knowledge. If `protoc` is installed and this crate had trouble finding it, you can set the `PROTOC` environment variable with the specific path to your installed `protoc` binary."; let msg = format!( "{}{} For more information: https://docs.rs/prost-build/#sourcing-protoc ", error_msg, os_specific_hint ); env::var_os("PROTOC") .map(PathBuf::from) .or_else(|| which::which("protoc").ok()) .expect(&msg) } /// Returns the path to the Protobuf include directory. pub fn protoc_include_from_env() -> Option { let protoc_include: PathBuf = env::var_os("PROTOC_INCLUDE")?.into(); if !protoc_include.exists() { panic!( "PROTOC_INCLUDE environment variable points to non-existent directory ({:?})", protoc_include ); } if !protoc_include.is_dir() { panic!( "PROTOC_INCLUDE environment variable points to a non-directory file ({:?})", protoc_include ); } Some(protoc_include) } #[cfg(test)] mod tests { use std::cell::RefCell; use std::fs::File; use std::io::Read; use std::path::Path; use std::rc::Rc; use super::*; /// An example service generator that generates a trait with methods corresponding to the /// service methods. struct ServiceTraitGenerator; impl ServiceGenerator for ServiceTraitGenerator { fn generate(&mut self, service: Service, buf: &mut String) { // Generate a trait for the service. service.comments.append_with_indent(0, buf); buf.push_str(&format!("trait {} {{\n", &service.name)); // Generate the service methods. for method in service.methods { method.comments.append_with_indent(1, buf); buf.push_str(&format!( " fn {}(_: {}) -> {};\n", method.name, method.input_type, method.output_type )); } // Close out the trait. buf.push_str("}\n"); } fn finalize(&mut self, buf: &mut String) { // Needs to be present only once, no matter how many services there are buf.push_str("pub mod utils { }\n"); } } /// Implements `ServiceGenerator` and provides some state for assertions. struct MockServiceGenerator { state: Rc>, } /// Holds state for `MockServiceGenerator` #[derive(Default)] struct MockState { service_names: Vec, package_names: Vec, finalized: u32, } impl MockServiceGenerator { fn new(state: Rc>) -> Self { Self { state } } } impl ServiceGenerator for MockServiceGenerator { fn generate(&mut self, service: Service, _buf: &mut String) { let mut state = self.state.borrow_mut(); state.service_names.push(service.name); } fn finalize(&mut self, _buf: &mut String) { let mut state = self.state.borrow_mut(); state.finalized += 1; } fn finalize_package(&mut self, package: &str, _buf: &mut String) { let mut state = self.state.borrow_mut(); state.package_names.push(package.to_string()); } } #[test] fn smoke_test() { let _ = env_logger::try_init(); Config::new() .service_generator(Box::new(ServiceTraitGenerator)) .out_dir(std::env::temp_dir()) .compile_protos(&["src/fixtures/smoke_test/smoke_test.proto"], &["src"]) .unwrap(); } #[test] fn finalize_package() { let _ = env_logger::try_init(); let state = Rc::new(RefCell::new(MockState::default())); let gen = MockServiceGenerator::new(Rc::clone(&state)); Config::new() .service_generator(Box::new(gen)) .include_file("_protos.rs") .out_dir(std::env::temp_dir()) .compile_protos( &[ "src/fixtures/helloworld/hello.proto", "src/fixtures/helloworld/goodbye.proto", ], &["src/fixtures/helloworld"], ) .unwrap(); let state = state.borrow(); assert_eq!(&state.service_names, &["Greeting", "Farewell"]); assert_eq!(&state.package_names, &["helloworld"]); assert_eq!(state.finalized, 3); } #[test] fn test_generate_message_attributes() { let _ = env_logger::try_init(); let out_dir = std::env::temp_dir(); Config::new() .out_dir(out_dir.clone()) .message_attribute(".", "#[derive(derive_builder::Builder)]") .enum_attribute(".", "#[some_enum_attr(u8)]") .compile_protos( &["src/fixtures/helloworld/hello.proto"], &["src/fixtures/helloworld"], ) .unwrap(); let out_file = out_dir .join("helloworld.rs") .as_path() .display() .to_string(); let expected_content = read_all_content("src/fixtures/helloworld/_expected_helloworld.rs") .replace("\r\n", "\n"); let content = read_all_content(&out_file).replace("\r\n", "\n"); assert_eq!( expected_content, content, "Unexpected content: \n{}", content ); } #[test] fn test_generate_no_empty_outputs() { let _ = env_logger::try_init(); let state = Rc::new(RefCell::new(MockState::default())); let gen = MockServiceGenerator::new(Rc::clone(&state)); let include_file = "_include.rs"; let out_dir = std::env::temp_dir() .as_path() .join("test_generate_no_empty_outputs"); let previously_empty_proto_path = out_dir.as_path().join(Path::new("google.protobuf.rs")); // For reproducibility, ensure we start with the out directory created and empty let _ = fs::remove_dir_all(&out_dir); let _ = fs::create_dir(&out_dir); Config::new() .service_generator(Box::new(gen)) .include_file(include_file) .out_dir(&out_dir) .compile_protos( &["src/fixtures/imports_empty/imports_empty.proto"], &["src/fixtures/imports_empty"], ) .unwrap(); // Prior to PR introducing this test, the generated include file would have the file // google.protobuf.rs which was an empty file. Now that file should only exist if it has content if let Ok(mut f) = File::open(&previously_empty_proto_path) { // Since this file was generated, it should not be empty. let mut contents = String::new(); f.read_to_string(&mut contents).unwrap(); assert!(!contents.is_empty()); } else { // The file wasn't generated so the result include file should not reference it let expected = read_all_content("src/fixtures/imports_empty/_expected_include.rs"); let actual = read_all_content( out_dir .as_path() .join(Path::new(include_file)) .display() .to_string() .as_str(), ); // Normalizes windows and Linux-style EOL let expected = expected.replace("\r\n", "\n"); let actual = actual.replace("\r\n", "\n"); assert_eq!(expected, actual); } } #[test] fn test_generate_field_attributes() { let _ = env_logger::try_init(); let out_dir = std::env::temp_dir(); Config::new() .out_dir(out_dir.clone()) .boxed("Container.data.foo") .boxed("Bar.qux") .compile_protos( &["src/fixtures/field_attributes/field_attributes.proto"], &["src/fixtures/field_attributes"], ) .unwrap(); let out_file = out_dir .join("field_attributes.rs") .as_path() .display() .to_string(); let content = read_all_content(&out_file).replace("\r\n", "\n"); #[cfg(feature = "format")] let expected_content = read_all_content( "src/fixtures/field_attributes/_expected_field_attributes_formatted.rs", ) .replace("\r\n", "\n"); #[cfg(not(feature = "format"))] let expected_content = read_all_content("src/fixtures/field_attributes/_expected_field_attributes.rs") .replace("\r\n", "\n"); assert_eq!( expected_content, content, "Unexpected content: \n{}", content ); } #[test] fn deterministic_include_file() { let _ = env_logger::try_init(); for _ in 1..10 { let state = Rc::new(RefCell::new(MockState::default())); let gen = MockServiceGenerator::new(Rc::clone(&state)); let include_file = "_include.rs"; let tmp_dir = std::env::temp_dir(); Config::new() .service_generator(Box::new(gen)) .include_file(include_file) .out_dir(std::env::temp_dir()) .compile_protos( &[ "src/fixtures/alphabet/a.proto", "src/fixtures/alphabet/b.proto", "src/fixtures/alphabet/c.proto", "src/fixtures/alphabet/d.proto", "src/fixtures/alphabet/e.proto", "src/fixtures/alphabet/f.proto", ], &["src/fixtures/alphabet"], ) .unwrap(); let expected = read_all_content("src/fixtures/alphabet/_expected_include.rs"); let actual = read_all_content( tmp_dir .as_path() .join(Path::new(include_file)) .display() .to_string() .as_str(), ); // Normalizes windows and Linux-style EOL let expected = expected.replace("\r\n", "\n"); let actual = actual.replace("\r\n", "\n"); assert_eq!(expected, actual); } } fn read_all_content(filepath: &str) -> String { let mut f = File::open(filepath).unwrap(); let mut content = String::new(); f.read_to_string(&mut content).unwrap(); content } } prost-build-0.11.9/src/message_graph.rs000064400000000000000000000062471046102023000161330ustar 00000000000000use std::collections::HashMap; use petgraph::algo::has_path_connecting; use petgraph::graph::NodeIndex; use petgraph::Graph; use prost_types::{field_descriptor_proto, DescriptorProto, FileDescriptorProto}; /// `MessageGraph` builds a graph of messages whose edges correspond to nesting. /// The goal is to recognize when message types are recursively nested, so /// that fields can be boxed when necessary. pub struct MessageGraph { index: HashMap, graph: Graph, } impl MessageGraph { pub fn new<'a>( files: impl Iterator, ) -> Result { let mut msg_graph = MessageGraph { index: HashMap::new(), graph: Graph::new(), }; for file in files { let package = format!( "{}{}", if file.package.is_some() { "." } else { "" }, file.package.as_ref().map(String::as_str).unwrap_or("") ); for msg in &file.message_type { msg_graph.add_message(&package, msg); } } Ok(msg_graph) } fn get_or_insert_index(&mut self, msg_name: String) -> NodeIndex { let MessageGraph { ref mut index, ref mut graph, } = *self; assert_eq!(b'.', msg_name.as_bytes()[0]); *index .entry(msg_name.clone()) .or_insert_with(|| graph.add_node(msg_name)) } /// Adds message to graph IFF it contains a non-repeated field containing another message. /// The purpose of the message graph is detecting recursively nested messages and co-recursively nested messages. /// Because prost does not box message fields, recursively nested messages would not compile in Rust. /// To allow recursive messages, the message graph is used to detect recursion and automatically box the recursive field. /// Since repeated messages are already put in a Vec, boxing them isn’t necessary even if the reference is recursive. fn add_message(&mut self, package: &str, msg: &DescriptorProto) { let msg_name = format!("{}.{}", package, msg.name.as_ref().unwrap()); let msg_index = self.get_or_insert_index(msg_name.clone()); for field in &msg.field { if field.r#type() == field_descriptor_proto::Type::Message && field.label() != field_descriptor_proto::Label::Repeated { let field_index = self.get_or_insert_index(field.type_name.clone().unwrap()); self.graph.add_edge(msg_index, field_index, ()); } } for msg in &msg.nested_type { self.add_message(&msg_name, msg); } } /// Returns true if message type `inner` is nested in message type `outer`. pub fn is_nested(&self, outer: &str, inner: &str) -> bool { let outer = match self.index.get(outer) { Some(outer) => *outer, None => return false, }; let inner = match self.index.get(inner) { Some(inner) => *inner, None => return false, }; has_path_connecting(&self.graph, outer, inner, None) } } prost-build-0.11.9/src/path.rs000064400000000000000000000203361046102023000142550ustar 00000000000000//! Utilities for working with Protobuf paths. use std::iter; /// Maps a fully-qualified Protobuf path to a value using path matchers. #[derive(Debug, Default)] pub(crate) struct PathMap { // insertion order might actually matter (to avoid warning about legacy-derive-helpers) // see: https://doc.rust-lang.org/rustc/lints/listing/warn-by-default.html#legacy-derive-helpers pub(crate) matchers: Vec<(String, T)>, } impl PathMap { /// Inserts a new matcher and associated value to the path map. pub(crate) fn insert(&mut self, matcher: String, value: T) { self.matchers.push((matcher, value)); } /// Returns a iterator over all the value matching the given fd_path and associated suffix/prefix path pub(crate) fn get(&self, fq_path: &str) -> Iter<'_, T> { Iter::new(self, fq_path.to_string()) } /// Returns a iterator over all the value matching the path `fq_path.field` and associated suffix/prefix path pub(crate) fn get_field(&self, fq_path: &str, field: &str) -> Iter<'_, T> { Iter::new(self, format!("{}.{}", fq_path, field)) } /// Returns the first value found matching the given path /// If nothing matches the path, suffix paths will be tried, then prefix paths, then the global path #[allow(unused)] pub(crate) fn get_first<'a>(&'a self, fq_path: &'_ str) -> Option<&'a T> { self.find_best_matching(fq_path) } /// Returns the first value found matching the path `fq_path.field` /// If nothing matches the path, suffix paths will be tried, then prefix paths, then the global path pub(crate) fn get_first_field<'a>(&'a self, fq_path: &'_ str, field: &'_ str) -> Option<&'a T> { self.find_best_matching(&format!("{}.{}", fq_path, field)) } /// Removes all matchers from the path map. pub(crate) fn clear(&mut self) { self.matchers.clear(); } /// Returns the first value found best matching the path /// See [sub_path_iter()] for paths test order fn find_best_matching(&self, full_path: &str) -> Option<&T> { sub_path_iter(full_path).find_map(|path| { self.matchers .iter() .find(|(p, _)| p == path) .map(|(_, v)| v) }) } } /// Iterator inside a PathMap that only returns values that matches a given path pub(crate) struct Iter<'a, T> { iter: std::slice::Iter<'a, (String, T)>, path: String, } impl<'a, T> Iter<'a, T> { fn new(map: &'a PathMap, path: String) -> Self { Self { iter: map.matchers.iter(), path, } } fn is_match(&self, path: &str) -> bool { sub_path_iter(self.path.as_str()).any(|p| p == path) } } impl<'a, T> std::iter::Iterator for Iter<'a, T> { type Item = &'a T; fn next(&mut self) -> Option { loop { match self.iter.next() { Some((p, v)) => { if self.is_match(p) { return Some(v); } } None => return None, } } } } impl<'a, T> std::iter::FusedIterator for Iter<'a, T> {} /// Given a fully-qualified path, returns a sequence of paths: /// - the path itself /// - the sequence of suffix paths /// - the sequence of prefix paths /// - the global path /// /// Example: sub_path_iter(".a.b.c") -> [".a.b.c", "a.b.c", "b.c", "c", ".a.b", ".a", "."] fn sub_path_iter(full_path: &str) -> impl Iterator { // First, try matching the path. iter::once(full_path) // Then, try matching path suffixes. .chain(suffixes(full_path)) // Then, try matching path prefixes. .chain(prefixes(full_path)) // Then, match the global path. .chain(iter::once(".")) } /// Given a fully-qualified path, returns a sequence of fully-qualified paths which match a prefix /// of the input path, in decreasing path-length order. /// /// Example: prefixes(".a.b.c.d") -> [".a.b.c", ".a.b", ".a"] fn prefixes(fq_path: &str) -> impl Iterator { std::iter::successors(Some(fq_path), |path| { #[allow(unknown_lints, clippy::manual_split_once)] path.rsplitn(2, '.').nth(1).filter(|path| !path.is_empty()) }) .skip(1) } /// Given a fully-qualified path, returns a sequence of paths which match the suffix of the input /// path, in decreasing path-length order. /// /// Example: suffixes(".a.b.c.d") -> ["a.b.c.d", "b.c.d", "c.d", "d"] fn suffixes(fq_path: &str) -> impl Iterator { std::iter::successors(Some(fq_path), |path| { #[allow(unknown_lints, clippy::manual_split_once)] path.splitn(2, '.').nth(1).filter(|path| !path.is_empty()) }) .skip(1) } #[cfg(test)] mod tests { use super::*; #[test] fn test_prefixes() { assert_eq!( prefixes(".a.b.c.d").collect::>(), vec![".a.b.c", ".a.b", ".a"], ); assert_eq!(prefixes(".a").count(), 0); assert_eq!(prefixes(".").count(), 0); } #[test] fn test_suffixes() { assert_eq!( suffixes(".a.b.c.d").collect::>(), vec!["a.b.c.d", "b.c.d", "c.d", "d"], ); assert_eq!(suffixes(".a").collect::>(), vec!["a"]); assert_eq!(suffixes(".").collect::>(), Vec::<&str>::new()); } #[test] fn test_get_matches_sub_path() { let mut path_map = PathMap::default(); // full path path_map.insert(".a.b.c.d".to_owned(), 1); assert_eq!(Some(&1), path_map.get(".a.b.c.d").next()); assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next()); // suffix path_map.clear(); path_map.insert("c.d".to_owned(), 1); assert_eq!(Some(&1), path_map.get(".a.b.c.d").next()); assert_eq!(Some(&1), path_map.get("b.c.d").next()); assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next()); // prefix path_map.clear(); path_map.insert(".a.b".to_owned(), 1); assert_eq!(Some(&1), path_map.get(".a.b.c.d").next()); assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next()); // global path_map.clear(); path_map.insert(".".to_owned(), 1); assert_eq!(Some(&1), path_map.get(".a.b.c.d").next()); assert_eq!(Some(&1), path_map.get("b.c.d").next()); assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next()); } #[test] fn test_get_best() { let mut path_map = PathMap::default(); // worst is global path_map.insert(".".to_owned(), 1); assert_eq!(Some(&1), path_map.get_first(".a.b.c.d")); assert_eq!(Some(&1), path_map.get_first("b.c.d")); assert_eq!(Some(&1), path_map.get_first_field(".a.b.c", "d")); // then prefix path_map.insert(".a.b".to_owned(), 2); assert_eq!(Some(&2), path_map.get_first(".a.b.c.d")); assert_eq!(Some(&2), path_map.get_first_field(".a.b.c", "d")); // then suffix path_map.insert("c.d".to_owned(), 3); assert_eq!(Some(&3), path_map.get_first(".a.b.c.d")); assert_eq!(Some(&3), path_map.get_first("b.c.d")); assert_eq!(Some(&3), path_map.get_first_field(".a.b.c", "d")); // best is full path path_map.insert(".a.b.c.d".to_owned(), 4); assert_eq!(Some(&4), path_map.get_first(".a.b.c.d")); assert_eq!(Some(&4), path_map.get_first_field(".a.b.c", "d")); } #[test] fn test_get_keep_order() { let mut path_map = PathMap::default(); path_map.insert(".".to_owned(), 1); path_map.insert(".a.b".to_owned(), 2); path_map.insert(".a.b.c.d".to_owned(), 3); let mut iter = path_map.get(".a.b.c.d"); assert_eq!(Some(&1), iter.next()); assert_eq!(Some(&2), iter.next()); assert_eq!(Some(&3), iter.next()); assert_eq!(None, iter.next()); path_map.clear(); path_map.insert(".a.b.c.d".to_owned(), 1); path_map.insert(".a.b".to_owned(), 2); path_map.insert(".".to_owned(), 3); let mut iter = path_map.get(".a.b.c.d"); assert_eq!(Some(&1), iter.next()); assert_eq!(Some(&2), iter.next()); assert_eq!(Some(&3), iter.next()); assert_eq!(None, iter.next()); } }