semver-parser-0.10.0/.cargo_vcs_info.json0000644000000001121373312131400137330ustar { "git": { "sha1": "8c2092f67313976cc37ca27086cb7d491088f470" } } semver-parser-0.10.0/.gitignore000064400000000000000000000000321370432213400144730ustar 00000000000000target Cargo.lock *.bk semver-parser-0.10.0/.travis.yml000064400000000000000000000004271370432213400146240ustar 00000000000000language: rust cache: cargo matrix: include: - rust: stable script: - cargo test - rustup component add rustfmt-preview - cargo fmt --all -- --check - rust: beta script: - cargo test - rust: nightly script: - cargo test semver-parser-0.10.0/Cargo.lock0000644000000110521373312131400117130ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "block-buffer" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" dependencies = [ "block-padding", "byte-tools", "byteorder", "generic-array", ] [[package]] name = "block-padding" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" dependencies = [ "byte-tools", ] [[package]] name = "byte-tools" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "digest" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" dependencies = [ "generic-array", ] [[package]] name = "fake-simd" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" [[package]] name = "generic-array" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" dependencies = [ "typenum", ] [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "opaque-debug" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" [[package]] name = "pest" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" dependencies = [ "ucd-trie", ] [[package]] name = "pest_derive" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" dependencies = [ "pest", "pest_generator", ] [[package]] name = "pest_generator" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", "syn", ] [[package]] name = "pest_meta" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d" dependencies = [ "maplit", "pest", "sha-1", ] [[package]] name = "proc-macro2" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "beae6331a816b1f65d04c45b078fd8e6c93e8071771f41b8163255bbd8d7c8fa" dependencies = [ "unicode-xid", ] [[package]] name = "quote" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" dependencies = [ "proc-macro2", ] [[package]] name = "semver-parser" version = "0.10.0" dependencies = [ "pest", "pest_derive", ] [[package]] name = "sha-1" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" dependencies = [ "block-buffer", "digest", "fake-simd", "opaque-debug", ] [[package]] name = "syn" version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "936cae2873c940d92e697597c5eee105fb570cd5689c695806f672883653349b" dependencies = [ "proc-macro2", "quote", "unicode-xid", ] [[package]] name = "typenum" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" [[package]] name = "ucd-trie" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" [[package]] name = "unicode-xid" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" semver-parser-0.10.0/Cargo.toml0000644000000022031373312131400117340ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] edition = "2018" name = "semver-parser" version = "0.10.0" authors = ["Steve Klabnik "] description = "Parsing of the semver spec.\n" homepage = "https://github.com/steveklabnik/semver-parser" documentation = "https://docs.rs/semver-parser" readme = "README.md" keywords = ["parser", "semver", "version", "semantic"] categories = ["development-tools", "parsing"] license = "MIT/Apache-2.0" repository = "https://github.com/steveklabnik/semver-parser" [dependencies.pest] version = "2.0.0" [dependencies.pest_derive] version = "2.0.0" [badges.travis-ci] repository = "steveklabnik/semver-parser" semver-parser-0.10.0/Cargo.toml.orig000064400000000000000000000011741373312122600154030ustar 00000000000000[package] name = "semver-parser" version = "0.10.0" authors = ["Steve Klabnik "] license = "MIT/Apache-2.0" edition = "2018" repository = "https://github.com/steveklabnik/semver-parser" homepage = "https://github.com/steveklabnik/semver-parser" documentation = "https://docs.rs/semver-parser" description = """ Parsing of the semver spec. """ keywords = ["parser", "semver", "version", "semantic"] categories = ["development-tools", "parsing"] readme = "README.md" [badges] travis-ci = { repository = "steveklabnik/semver-parser" } [dependencies] pest = "2.0.0" pest_derive = "2.0.0" semver-parser-0.10.0/LICENSE-APACHE000064400000000000000000000254501370432213400144420ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. semver-parser-0.10.0/LICENSE-MIT000064400000000000000000000020721370432213400141450ustar 00000000000000Copyright (c) 2016 Steve Klabnik Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. semver-parser-0.10.0/README.md000064400000000000000000000001251370432213400137650ustar 00000000000000# `semver-parser` Parsing for the semver spec. We'll have better docs at 1.0. semver-parser-0.10.0/src/lexer.rs000064400000000000000000000211371370432217700147770ustar 00000000000000//! Lexer for semver ranges. //! //! Breaks a string of input into an iterator of tokens that can be used with a parser. //! //! This should be used with the [`parser`] module. //! //! [`parser`]: ../parser/index.html //! //! # Examples //! //! Example without errors: //! //! ```rust //! use semver_parser::lexer::{Lexer, Token}; //! //! let mut l = Lexer::new("foo 123 *"); //! //! assert_eq!(Some(Ok(Token::AlphaNumeric("foo"))), l.next()); //! assert_eq!(Some(Ok(Token::Whitespace(3, 4))), l.next()); //! assert_eq!(Some(Ok(Token::Numeric(123))), l.next()); //! assert_eq!(Some(Ok(Token::Whitespace(7, 8))), l.next()); //! assert_eq!(Some(Ok(Token::Star)), l.next()); //! assert_eq!(None, l.next()); //! ``` //! //! Example with error: //! //! ```rust //! use semver_parser::lexer::{Lexer, Token, Error}; //! //! let mut l = Lexer::new("foo / *"); //! //! assert_eq!(Some(Ok(Token::AlphaNumeric("foo"))), l.next()); //! assert_eq!(Some(Ok(Token::Whitespace(3, 4))), l.next()); //! assert_eq!(Some(Err(Error::UnexpectedChar('/'))), l.next()); //! ``` use self::Error::*; use self::Token::*; use std::str; macro_rules! scan_while { ($slf:expr, $start:expr, $first:pat $(| $rest:pat)*) => {{ let mut __end = $start; loop { if let Some((idx, c)) = $slf.one() { __end = idx; match c { $first $(| $rest)* => $slf.step(), _ => break, } continue; } else { __end = $slf.input.len(); } break; } __end }} } /// Semver tokens. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum Token<'input> { /// `=` Eq, /// `>` Gt, /// `<` Lt, /// `<=` LtEq, /// `>=` GtEq, /// '^` Caret, /// '~` Tilde, /// '*` Star, /// `.` Dot, /// `,` Comma, /// `-` Hyphen, /// `+` Plus, /// '||' Or, /// any number of whitespace (`\t\r\n `) and its span. Whitespace(usize, usize), /// Numeric component, like `0` or `42`. Numeric(u64), /// Alphanumeric component, like `alpha1` or `79deadbe`. AlphaNumeric(&'input str), } impl<'input> Token<'input> { /// Check if the current token is a whitespace token. pub fn is_whitespace(&self) -> bool { match *self { Whitespace(..) => true, _ => false, } } /// Check if the current token is a wildcard token. pub fn is_wildcard(&self) -> bool { match *self { Star | AlphaNumeric("X") | AlphaNumeric("x") => true, _ => false, } } } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum Error { /// Unexpected character. UnexpectedChar(char), } /// Lexer for semver tokens belonging to a range. #[derive(Debug)] pub struct Lexer<'input> { input: &'input str, chars: str::CharIndices<'input>, // lookahead c1: Option<(usize, char)>, c2: Option<(usize, char)>, } impl<'input> Lexer<'input> { /// Construct a new lexer for the given input. pub fn new(input: &str) -> Lexer { let mut chars = input.char_indices(); let c1 = chars.next(); let c2 = chars.next(); Lexer { input, chars, c1, c2, } } /// Shift all lookahead storage by one. fn step(&mut self) { self.c1 = self.c2; self.c2 = self.chars.next(); } fn step_n(&mut self, n: usize) { for _ in 0..n { self.step(); } } /// Access the one character, or set it if it is not set. fn one(&mut self) -> Option<(usize, char)> { self.c1 } /// Access two characters. fn two(&mut self) -> Option<(usize, char, char)> { self.c1 .and_then(|(start, c1)| self.c2.map(|(_, c2)| (start, c1, c2))) } /// Consume a component. /// /// A component can either be an alphanumeric or numeric. /// Does not permit leading zeroes if numeric. fn component(&mut self, start: usize) -> Result, Error> { let end = scan_while!(self, start, '0'..='9' | 'A'..='Z' | 'a'..='z'); let input = &self.input[start..end]; let mut it = input.chars(); let (a, b) = (it.next(), it.next()); // exactly zero if a == Some('0') && b.is_none() { return Ok(Numeric(0)); } if a != Some('0') { if let Ok(numeric) = input.parse::() { return Ok(Numeric(numeric)); } } Ok(AlphaNumeric(input)) } /// Consume whitespace. fn whitespace(&mut self, start: usize) -> Result, Error> { let end = scan_while!(self, start, ' ' | '\t' | '\n' | '\r'); Ok(Whitespace(start, end)) } } impl<'input> Iterator for Lexer<'input> { type Item = Result, Error>; fn next(&mut self) -> Option { #[allow(clippy::never_loop)] loop { // two subsequent char tokens. if let Some((_, a, b)) = self.two() { let two = match (a, b) { ('<', '=') => Some(LtEq), ('>', '=') => Some(GtEq), ('|', '|') => Some(Or), _ => None, }; if let Some(two) = two { self.step_n(2); return Some(Ok(two)); } } // single char and start of numeric tokens. if let Some((start, c)) = self.one() { let tok = match c { ' ' | '\t' | '\n' | '\r' => { self.step(); return Some(self.whitespace(start)); } '=' => Eq, '>' => Gt, '<' => Lt, '^' => Caret, '~' => Tilde, '*' => Star, '.' => Dot, ',' => Comma, '-' => Hyphen, '+' => Plus, '0'..='9' | 'a'..='z' | 'A'..='Z' => { self.step(); return Some(self.component(start)); } c => return Some(Err(UnexpectedChar(c))), }; self.step(); return Some(Ok(tok)); }; return None; } } } #[cfg(test)] mod tests { use super::*; fn lex(input: &str) -> Vec { Lexer::new(input).map(Result::unwrap).collect::>() } #[test] pub fn simple_tokens() { assert_eq!( lex("=><<=>=^~*.,-+||"), vec![Eq, Gt, Lt, LtEq, GtEq, Caret, Tilde, Star, Dot, Comma, Hyphen, Plus, Or,] ); } #[test] pub fn whitespace() { assert_eq!( lex(" foo \t\n\rbar"), vec![ Whitespace(0, 2), AlphaNumeric("foo"), Whitespace(5, 9), AlphaNumeric("bar"), ] ); } #[test] pub fn components() { assert_eq!(lex("42"), vec![Numeric(42)]); assert_eq!(lex("0"), vec![Numeric(0)]); assert_eq!(lex("01"), vec![AlphaNumeric("01")]); assert_eq!(lex("01"), vec![AlphaNumeric("01")]); assert_eq!(lex("5885644aa"), vec![AlphaNumeric("5885644aa")]); assert_eq!(lex("beta2"), vec![AlphaNumeric("beta2")]); assert_eq!(lex("beta.2"), vec![AlphaNumeric("beta"), Dot, Numeric(2)]); } #[test] pub fn is_wildcard() { assert_eq!(Star.is_wildcard(), true); assert_eq!(AlphaNumeric("x").is_wildcard(), true); assert_eq!(AlphaNumeric("X").is_wildcard(), true); assert_eq!(AlphaNumeric("other").is_wildcard(), false); } #[test] pub fn empty() { assert_eq!(lex(""), vec![]); } #[test] pub fn numeric_all_numbers() { let expected: Vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9] .into_iter() .map(Numeric) .collect::>(); let actual: Vec<_> = lex("0 1 2 3 4 5 6 7 8 9") .into_iter() .filter(|t| !t.is_whitespace()) .collect(); assert_eq!(actual, expected); } } semver-parser-0.10.0/src/lib.rs000064400000000000000000000006151370432541300144200ustar 00000000000000use pest_derive::Parser; #[derive(Parser)] #[grammar = "semver.pest"] struct SemverParser; mod range_set; pub use crate::range_set::Compat; pub use crate::range_set::RangeSet; mod range; pub use crate::range::Comparator; pub use crate::range::Identifier; pub use crate::range::Op; pub use crate::range::Range; // from old lib: pub mod lexer; pub mod parser; // pub mod range; pub mod version; semver-parser-0.10.0/src/main.rs000064400000000000000000000006411373312100500145660ustar 00000000000000use semver_parser::Compat; use semver_parser::RangeSet; use std::error::Error; fn main() -> Result<(), Box> { // default operation let range_set: RangeSet = "1.2.3".parse()?; println!("Found range set: {:?}", range_set); // npm compatibility let range_set = RangeSet::parse("1.2.3", Compat::Npm)?; println!("Found range set (node): {:?}", range_set); Ok(()) } semver-parser-0.10.0/src/parser.rs000064400000000000000000000166321370432217700151600ustar 00000000000000// this is only for parsing versions now use std::fmt; use std::mem; use self::Error::*; use crate::lexer::{self, Lexer, Token}; use crate::version::{Identifier, Version}; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum Error<'input> { /// Needed more tokens for parsing, but none are available. UnexpectedEnd, /// Unexpected token. UnexpectedToken(Token<'input>), /// An error occurred in the lexer. Lexer(lexer::Error), /// More input available. MoreInput(Vec>), /// Encountered empty predicate in a set of predicates. EmptyPredicate, /// Encountered an empty range. EmptyRange, } impl<'input> From for Error<'input> { fn from(value: lexer::Error) -> Self { Error::Lexer(value) } } impl<'input> fmt::Display for Error<'input> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { use self::Error::*; match *self { UnexpectedEnd => write!(fmt, "expected more input"), UnexpectedToken(ref token) => write!(fmt, "encountered unexpected token: {:?}", token), Lexer(ref error) => write!(fmt, "lexer error: {:?}", error), MoreInput(ref tokens) => write!(fmt, "expected end of input, but got: {:?}", tokens), EmptyPredicate => write!(fmt, "encountered empty predicate"), EmptyRange => write!(fmt, "encountered empty range"), } } } /// impl for backwards compatibility. impl<'input> From> for String { fn from(value: Error<'input>) -> Self { value.to_string() } } /// A recursive-descent parser for parsing version requirements. pub struct Parser<'input> { /// Source of token. lexer: Lexer<'input>, /// Lookaehead. c1: Option>, } impl<'input> Parser<'input> { /// Construct a new parser for the given input. pub fn new(input: &'input str) -> Result, Error<'input>> { let mut lexer = Lexer::new(input); let c1 = if let Some(c1) = lexer.next() { Some(c1?) } else { None }; Ok(Parser { lexer, c1 }) } /// Pop one token. #[inline(always)] fn pop(&mut self) -> Result, Error<'input>> { let c1 = if let Some(c1) = self.lexer.next() { Some(c1?) } else { None }; mem::replace(&mut self.c1, c1).ok_or_else(|| UnexpectedEnd) } /// Peek one token. #[inline(always)] fn peek(&mut self) -> Option<&Token<'input>> { self.c1.as_ref() } /// Skip whitespace if present. fn skip_whitespace(&mut self) -> Result<(), Error<'input>> { match self.peek() { Some(&Token::Whitespace(_, _)) => self.pop().map(|_| ()), _ => Ok(()), } } /// Parse a single component. /// /// Returns `None` if the component is a wildcard. pub fn component(&mut self) -> Result, Error<'input>> { match self.pop()? { Token::Numeric(number) => Ok(Some(number)), ref t if t.is_wildcard() => Ok(None), tok => Err(UnexpectedToken(tok)), } } /// Parse a single numeric. pub fn numeric(&mut self) -> Result> { match self.pop()? { Token::Numeric(number) => Ok(number), tok => Err(UnexpectedToken(tok)), } } /// Optionally parse a dot, then a component. /// /// The second component of the tuple indicates if a wildcard has been encountered, and is /// always `false` if the first component is `Some`. /// /// If a dot is not encountered, `(None, false)` is returned. /// /// If a wildcard is encountered, `(None, true)` is returned. pub fn dot_component(&mut self) -> Result<(Option, bool), Error<'input>> { match self.peek() { Some(&Token::Dot) => {} _ => return Ok((None, false)), } // pop the peeked dot. self.pop()?; self.component().map(|n| (n, n.is_none())) } /// Parse a dot, then a numeric. pub fn dot_numeric(&mut self) -> Result> { match self.pop()? { Token::Dot => {} tok => return Err(UnexpectedToken(tok)), } self.numeric() } /// Parse an string identifier. /// /// Like, `foo`, or `bar`, or `beta-1`. pub fn identifier(&mut self) -> Result> { let identifier = match self.pop()? { Token::AlphaNumeric(identifier) => { // TODO: Borrow? Identifier::AlphaNumeric(identifier.to_string()) } Token::Numeric(n) => Identifier::Numeric(n), tok => return Err(UnexpectedToken(tok)), }; if let Some(&Token::Hyphen) = self.peek() { // pop the peeked hyphen self.pop()?; // concat with any following identifiers Ok(identifier .concat("-") .concat(&self.identifier()?.to_string())) } else { Ok(identifier) } } /// Parse all pre-release identifiers, separated by dots. /// /// Like, `abcdef.1234`. fn pre(&mut self) -> Result, Error<'input>> { match self.peek() { Some(&Token::Hyphen) => {} _ => return Ok(vec![]), } // pop the peeked hyphen. self.pop()?; self.parts() } /// Parse a dot-separated set of identifiers. fn parts(&mut self) -> Result, Error<'input>> { let mut parts = Vec::new(); parts.push(self.identifier()?); while let Some(&Token::Dot) = self.peek() { self.pop()?; parts.push(self.identifier()?); } Ok(parts) } /// Parse optional build metadata. /// /// Like, `` (empty), or `+abcdef`. fn plus_build_metadata(&mut self) -> Result, Error<'input>> { match self.peek() { Some(&Token::Plus) => {} _ => return Ok(vec![]), } // pop the plus. self.pop()?; self.parts() } /// Parse a version. /// /// Like, `1.0.0` or `3.0.0-beta.1`. pub fn version(&mut self) -> Result> { self.skip_whitespace()?; let major = self.numeric()?; let minor = self.dot_numeric()?; let patch = self.dot_numeric()?; let pre = self.pre()?; let build = self.plus_build_metadata()?; self.skip_whitespace()?; Ok(Version { major, minor, patch, pre, build, }) } /// Check if we have reached the end of input. pub fn is_eof(&mut self) -> bool { self.c1.is_none() } /// Get the rest of the tokens in the parser. /// /// Useful for debugging. pub fn tail(&mut self) -> Result>, Error<'input>> { let mut out = Vec::new(); if let Some(t) = self.c1.take() { out.push(t); } while let Some(t) = self.lexer.next() { out.push(t?); } Ok(out) } } semver-parser-0.10.0/src/range.rs000064400000000000000000001112231373312100500147350ustar 00000000000000use crate::*; #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct Range { pub comparator_set: Vec, pub compat: range_set::Compat, } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct Comparator { pub op: Op, pub major: u64, pub minor: u64, pub patch: u64, pub pre: Vec, } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Op { Lt, Lte, Gt, Gte, Eq, } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Identifier { Numeric(u64), AlphaNumeric(String), } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct Partial { major: Option, minor: Option, patch: Option, pre: Vec, kind: PartialKind, } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum PartialKind { XRangeOnly, MajorOnly, MajorMinor, MajorMinorPatch, } impl Partial { pub fn new() -> Self { Self { major: None, minor: None, patch: None, pre: Vec::new(), kind: PartialKind::XRangeOnly, } } pub fn as_comparator(&self, op: Op) -> Comparator { Comparator { op, major: self.major.unwrap_or(0), minor: self.minor.unwrap_or(0), patch: self.patch.unwrap_or(0), pre: self.pre.clone(), } } pub fn inc_major(&mut self) -> &mut Self { self.major = Some(self.major.unwrap_or(0) + 1); self } pub fn inc_minor(&mut self) -> &mut Self { self.minor = Some(self.minor.unwrap_or(0) + 1); self } pub fn inc_patch(&mut self) -> &mut Self { self.patch = Some(self.patch.unwrap_or(0) + 1); self } pub fn zero_missing(&mut self) -> &mut Self { self.major = Some(self.major.unwrap_or(0)); self.minor = Some(self.minor.unwrap_or(0)); self.patch = Some(self.patch.unwrap_or(0)); self } pub fn zero_minor(&mut self) -> &mut Self { self.minor = Some(0); self } pub fn zero_patch(&mut self) -> &mut Self { self.patch = Some(0); self } pub fn no_pre(&mut self) -> &mut Self { self.pre = Vec::new(); self } } pub fn from_pair_iterator( parsed_range: pest::iterators::Pair<'_, Rule>, compat: range_set::Compat, ) -> Result { // First of all, do we have the correct iterator? if parsed_range.as_rule() != Rule::range { return Err(String::from("Error parsing range")); } let mut comparator_set = Vec::new(); // Now we need to parse each comparator set out of the range for record in parsed_range.into_inner() { match record.as_rule() { Rule::hyphen => { let mut hyphen_set = simple::from_hyphen_range(record)?; comparator_set.append(&mut hyphen_set); } Rule::simple => { let mut comparators = simple::from_pair_iterator(record, compat)?; comparator_set.append(&mut comparators); } Rule::empty => { comparator_set.push(Partial::new().zero_missing().as_comparator(Op::Gte)); } _ => unreachable!(), } } Ok(Range { comparator_set, compat, }) } pub mod simple { use super::*; pub fn from_pair_iterator( parsed_simple: pest::iterators::Pair<'_, Rule>, compat: range_set::Compat, ) -> Result, String> { // First of all, do we have the correct iterator? if parsed_simple.as_rule() != Rule::simple { return Err(String::from("Error parsing comparator set")); } let mut comparators = Vec::new(); // Now we need to parse each comparator set out of the range for record in parsed_simple.into_inner() { match record.as_rule() { Rule::partial => { let components: Vec<_> = record.into_inner().collect(); let mut partial = parse_partial(components); match partial.kind { PartialKind::XRangeOnly => { // '*', 'x', 'X' --> ">=0.0.0" comparators.push(partial.zero_missing().as_comparator(Op::Gte)); } PartialKind::MajorOnly => { // "1", "1.*", or "1.*.*" --> ">=1.0.0 <2.0.0" // "1.*.3" == "1.*" comparators.push(partial.clone().zero_missing().as_comparator(Op::Gte)); comparators .push(partial.inc_major().zero_missing().as_comparator(Op::Lt)); } PartialKind::MajorMinor => { // "1.2" or "1.2.*" --> ">=1.2.0 <1.3.0" comparators.push(partial.clone().zero_patch().as_comparator(Op::Gte)); comparators .push(partial.inc_minor().zero_patch().as_comparator(Op::Lt)); } PartialKind::MajorMinorPatch => { match compat { range_set::Compat::Npm => { // for node, "1.2.3" is "=1.2.3" comparators.push(partial.as_comparator(Op::Eq)); } range_set::Compat::Cargo => { // for cargo, "1.2.3" is parsed as "^1.2.3" handle_caret_range(partial, &mut comparators); } } } } } Rule::primitive => { let mut components: Vec<_> = record.into_inner().collect(); let op_component = components.remove(0); let op = match op_component.as_str() { "=" => Op::Eq, "<" => Op::Lt, "<=" => Op::Lte, ">" => Op::Gt, ">=" => Op::Gte, _ => unreachable!(), }; let partial_component = components.remove(0); let components: Vec<_> = partial_component.into_inner().collect(); let mut partial = parse_partial(components); // equal is different because it can be a range with 2 comparators if op == Op::Eq { match partial.kind { PartialKind::XRangeOnly => { // '=*' --> ">=0.0.0" comparators.push(partial.zero_missing().as_comparator(Op::Gte)); } PartialKind::MajorOnly => { // "=1", "=1.*", or "=1.*.*" --> ">=1.0.0 <2.0.0" comparators .push(partial.clone().zero_missing().as_comparator(Op::Gte)); comparators .push(partial.inc_major().zero_missing().as_comparator(Op::Lt)); } PartialKind::MajorMinor => { // "=1.2" or "=1.2.*" --> ">=1.2.0 <1.3.0" comparators .push(partial.clone().zero_patch().as_comparator(Op::Gte)); comparators .push(partial.inc_minor().zero_patch().as_comparator(Op::Lt)); } PartialKind::MajorMinorPatch => { comparators.push(partial.as_comparator(Op::Eq)); } } } else { match partial.kind { PartialKind::XRangeOnly => { match op { Op::Eq => comparators .push(partial.zero_missing().as_comparator(Op::Gte)), Op::Lt => comparators .push(partial.zero_missing().as_comparator(Op::Lt)), Op::Lte => comparators .push(partial.zero_missing().as_comparator(Op::Gte)), Op::Gt => comparators .push(partial.zero_missing().as_comparator(Op::Lt)), Op::Gte => comparators .push(partial.zero_missing().as_comparator(Op::Gte)), } } PartialKind::MajorOnly => { // ">1", "=1", etc. // ">1.*.3" == ">1.*" match op { Op::Lte => comparators.push( partial .inc_major() .zero_minor() .zero_patch() .as_comparator(Op::Lt), ), _ => comparators.push(partial.zero_missing().as_comparator(op)), } } PartialKind::MajorMinor => { // ">1.2", "<1.2.*", etc. match op { Op::Lte => comparators.push( partial.inc_minor().zero_patch().as_comparator(Op::Lt), ), _ => comparators.push(partial.zero_patch().as_comparator(op)), } } PartialKind::MajorMinorPatch => { comparators.push(partial.as_comparator(op)); } } } } Rule::caret => { let mut components: Vec<_> = record.into_inner().collect(); let partial_component = components.remove(0); let components: Vec<_> = partial_component.into_inner().collect(); let partial = parse_partial(components); handle_caret_range(partial, &mut comparators); } Rule::tilde => { let mut components: Vec<_> = record.into_inner().collect(); let partial_component = components.remove(0); let components: Vec<_> = partial_component.into_inner().collect(); let mut partial = parse_partial(components); comparators.push(partial.clone().zero_missing().as_comparator(Op::Gte)); match partial.kind { PartialKind::XRangeOnly => { // "~*" --> ">=0.0.0" // which has already been added, so nothing to do here } PartialKind::MajorOnly => { // "~0" --> ">=0.0.0 <1.0.0" comparators.push( partial .inc_major() .zero_missing() .no_pre() .as_comparator(Op::Lt), ); } PartialKind::MajorMinor | PartialKind::MajorMinorPatch => { // "~1.2" --> ">=1.2.0 <1.3.0" // "~1.2.3" --> ">=1.2.3 <1.3.0" comparators.push( partial .inc_minor() .zero_patch() .no_pre() .as_comparator(Op::Lt), ); } } } _ => unreachable!(), } } Ok(comparators) } fn handle_caret_range(mut partial: Partial, comparators: &mut Vec) { // major version 0 is a special case for caret if partial.major == Some(0) { match partial.kind { PartialKind::XRangeOnly => unreachable!(), PartialKind::MajorOnly => { // "^0", "^0.*" --> ">=0.0.0 <1.0.0" comparators.push(partial.clone().zero_missing().as_comparator(Op::Gte)); comparators.push( partial .inc_major() .zero_missing() .no_pre() .as_comparator(Op::Lt), ); } PartialKind::MajorMinor => { // "^0.2", "^0.2.*" --> ">=0.2.0 <0.3.0" comparators.push(partial.clone().zero_missing().as_comparator(Op::Gte)); comparators.push( partial .inc_minor() .zero_patch() .no_pre() .as_comparator(Op::Lt), ); } PartialKind::MajorMinorPatch => { if partial.minor == Some(0) { // "^0.0.1" --> ">=0.0.1 <0.0.2" comparators.push(partial.as_comparator(Op::Gte)); comparators.push(partial.inc_patch().no_pre().as_comparator(Op::Lt)); } else { // "^0.2.3" --> ">=0.2.3 <0.3.0" comparators.push(partial.as_comparator(Op::Gte)); comparators.push( partial .inc_minor() .zero_patch() .no_pre() .as_comparator(Op::Lt), ); } } } } else { match partial.kind { PartialKind::XRangeOnly => { // "^*" --> ">=0.0.0" comparators.push(partial.zero_missing().as_comparator(Op::Gte)); } _ => { // "^1", "^1.*" --> ">=1.0.0 <2.0.0" // "^1.2", "^1.2.*" --> ">=1.2.0 <2.0.0" // "^1.2.3" --> ">=1.2.3 <2.0.0" comparators.push(partial.clone().zero_missing().as_comparator(Op::Gte)); comparators.push( partial .inc_major() .zero_minor() .zero_patch() .no_pre() .as_comparator(Op::Lt), ); } } } } pub fn from_hyphen_range( parsed_simple: pest::iterators::Pair<'_, Rule>, ) -> Result, String> { // First of all, do we have the correct iterator? if parsed_simple.as_rule() != Rule::hyphen { return Err(String::from("Error parsing comparator set")); } let mut comparators = Vec::new(); // At this point, we have 2 partial records let mut records = parsed_simple.into_inner(); let components1: Vec<_> = records.next().unwrap().into_inner().collect(); let mut partial1 = parse_partial(components1); match partial1.kind { PartialKind::XRangeOnly => { // don't need to include this - the range will be limited by the 2nd part of hyphen // range } _ => comparators.push(partial1.zero_missing().as_comparator(Op::Gte)), } let components2: Vec<_> = records.next().unwrap().into_inner().collect(); let mut partial2 = parse_partial(components2); match partial2.kind { PartialKind::XRangeOnly => { // only include this if the first part of the hyphen range was also '*' if partial1.kind == PartialKind::XRangeOnly { comparators.push(partial2.zero_missing().as_comparator(Op::Gte)); } } PartialKind::MajorOnly => { // "1.2.3 - 2" --> ">=1.2.3 <3.0.0" comparators.push( partial2 .inc_major() .zero_minor() .zero_patch() .as_comparator(Op::Lt), ); } PartialKind::MajorMinor => { // "1.2.3 - 2.3.x" --> ">=1.2.3 <2.4.0" comparators.push(partial2.inc_minor().zero_patch().as_comparator(Op::Lt)); } PartialKind::MajorMinorPatch => { // "1.2.3 - 2.3.4" --> ">=1.2.3 <=2.3.4" comparators.push(partial2.as_comparator(Op::Lte)); } } Ok(comparators) } fn parse_partial(mut components: Vec>) -> Partial { let mut partial = Partial::new(); // there will be at least one component let one = components.remove(0); match one.as_rule() { Rule::xr => { let inner = one.into_inner().next().unwrap(); match inner.as_rule() { Rule::xr_op => { // for "*", ">=*", etc. partial.major = None; partial.kind = PartialKind::XRangeOnly; // end the pattern here return partial; } Rule::nr => { partial.major = Some(inner.as_str().parse::().unwrap()); } _ => unreachable!(), } } _ => unreachable!(), } if components.is_empty() { // only the major has been given partial.kind = PartialKind::MajorOnly; return partial; } else { let two = components.remove(0); match two.as_rule() { Rule::xr => { let inner = two.into_inner().next().unwrap(); match inner.as_rule() { Rule::xr_op => { partial.minor = None; // only the major has been given, minor is xrange (ignore anything after) partial.kind = PartialKind::MajorOnly; return partial; } Rule::nr => { partial.minor = Some(inner.as_str().parse::().unwrap()); } _ => unreachable!(), } } _ => unreachable!(), } } if components.is_empty() { // only major and minor have been given partial.kind = PartialKind::MajorMinor; return partial; } else { let three = components.remove(0); match three.as_rule() { Rule::xr => { let inner = three.into_inner().next().unwrap(); match inner.as_rule() { Rule::xr_op => { partial.patch = None; // only major and minor have been given, patch is xrange partial.kind = PartialKind::MajorMinor; return partial; } Rule::nr => { partial.patch = Some(inner.as_str().parse::().unwrap()); } _ => unreachable!(), } } _ => unreachable!(), } } // at this point we at least have all three fields partial.kind = PartialKind::MajorMinorPatch; if !components.is_empty() { // there's only going to be one, let's move it out let pre = components.remove(0); // now we want to look at the inner bit, so that we don't have the leading - let mut pre: Vec<_> = pre.into_inner().collect(); let pre = pre.remove(0); let pre = pre.as_str(); // now we have all of the stuff in pre, so we split by . to get each bit for bit in pre.split('.') { let identifier = match bit.parse::() { Ok(num) => Identifier::Numeric(num), Err(_) => Identifier::AlphaNumeric(bit.to_string()), }; partial.pre.push(identifier); } } partial } } #[cfg(test)] mod tests { use super::*; use pest::Parser; fn parse_range(input: &str) -> pest::iterators::Pair<'_, Rule> { match SemverParser::parse(Rule::range, input) { Ok(mut parsed) => match parsed.next() { Some(parsed) => parsed, None => panic!("Could not parse {}", input), }, Err(e) => panic!("Parse error:\n{}", e), } } // macros to handle the test boilerplate macro_rules! range_tests { ( $( $name:ident: $value:expr, )* ) => { $( #[test] fn $name() { let (input, expected_range) = $value; let parsed_range = parse_range(input); let range = from_pair_iterator(parsed_range, range_set::Compat::Cargo).expect("parsing failed"); // get the expected length from the input range let num_comparators = range.comparator_set.len(); let expected_comparators = expected_range.comparator_set.len(); assert_eq!(expected_comparators, num_comparators, "expected number of comparators: {}, got: {}", expected_comparators, num_comparators); assert_eq!(range, expected_range); } )* }; } macro_rules! range_tests_nodecompat { ( $( $name:ident: $value:expr, )* ) => { $( #[test] fn $name() { let (input, expected_range) = $value; let parsed_range = parse_range(input); let range = from_pair_iterator(parsed_range, range_set::Compat::Npm).expect("parsing failed"); // get the expected length from the input range let num_comparators = range.comparator_set.len(); let expected_comparators = expected_range.comparator_set.len(); assert_eq!(expected_comparators, num_comparators, "expected number of comparators: {}, got: {}", expected_comparators, num_comparators); assert_eq!(range, expected_range); } )* }; } macro_rules! comp_sets { ( $( [$op:expr, $major:expr, $minor:expr, $patch:expr] ),* ) => { Range { comparator_set: vec![ $( Comparator { op: $op, major: $major, minor: $minor, patch: $patch, pre: pre!(None), }, )* ], compat: range_set::Compat::Cargo } }; // if you specify pre for one item, you have to do it for all of them ( $( [$op:expr, $major:expr, $minor:expr, $patch:expr, $pre:expr] ),* ) => { Range { comparator_set: vec![ $( Comparator { op: $op, major: $major, minor: $minor, patch: $patch, pre: $pre, }, )* ], compat: range_set::Compat::Cargo } }; } // for node compatibility macro_rules! comp_sets_node { ( $( [$op:expr, $major:expr, $minor:expr, $patch:expr] ),* ) => { Range { comparator_set: vec![ $( Comparator { op: $op, major: $major, minor: $minor, patch: $patch, pre: pre!(None), }, )* ], compat: range_set::Compat::Npm } }; } macro_rules! id_num { ( $num:expr ) => { Identifier::Numeric($num) }; } macro_rules! id_alpha { ( $alpha:expr ) => { Identifier::AlphaNumeric(String::from($alpha)) }; } macro_rules! pre { ( None ) => { Vec::new() }; ( $( $e:expr ),* ) => { vec![ $( $e, )* ] }; } macro_rules! op { ( "=" ) => { Op::Eq }; ( "<" ) => { Op::Lt }; ( "<=" ) => { Op::Lte }; ( ">" ) => { Op::Gt }; ( ">=" ) => { Op::Gte }; } // tests range_tests! { major: ("1", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), major_minor: ("1.2", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), major_minor_patch: ("1.2.3", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 2, 0, 0] )), major_0_minor_patch: ("0.2.3", comp_sets!( [op!(">="), 0, 2, 3], [op!("<"), 0, 3, 0] )), major_0_minor_0_patch: ("0.0.1", comp_sets!( [op!(">="), 0, 0, 1], [op!("<"), 0, 0, 2] )), eq_major: ("=1", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), eq_major_minor: ("=1.2", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), eq_major_minor_patch: ("=1.2.3", comp_sets!( [op!("="), 1, 2, 3] )), eq_all: ("=*", comp_sets!( [op!(">="), 0, 0, 0] )), eq_major_star: ("=1.*", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), eq_major_minor_star: ("=1.2.*", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), lt_major: ("<1", comp_sets!( [op!("<"), 1, 0, 0] )), lt_major_minor: ("<1.2", comp_sets!( [op!("<"), 1, 2, 0] )), lt_major_minor_patch: ("<1.2.3", comp_sets!( [op!("<"), 1, 2, 3] )), lt_all: ("<*", comp_sets!( [op!("<"), 0, 0, 0] )), lt_major_star: ("<1.*", comp_sets!( [op!("<"), 1, 0, 0] )), lt_major_minor_star: ("<1.2.*", comp_sets!( [op!("<"), 1, 2, 0] )), lte_major: ("<=1", comp_sets!( [op!("<"), 2, 0, 0] )), lte_major_minor: ("<=1.2", comp_sets!( [op!("<"), 1, 3, 0] )), lte_major_minor_patch: ("<=1.2.3", comp_sets!( [op!("<="), 1, 2, 3] )), lte_all: ("<=*", comp_sets!( [op!(">="), 0, 0, 0] )), lte_major_star: ("<=1.*", comp_sets!( [op!("<"), 2, 0, 0] )), lte_major_minor_star: ("<=1.2.*", comp_sets!( [op!("<"), 1, 3, 0] )), gt_major: (">1", comp_sets!( [op!(">"), 1, 0, 0] )), gt_major_minor: (">1.2", comp_sets!( [op!(">"), 1, 2, 0] )), gt_major_minor_patch: (">1.2.3", comp_sets!( [op!(">"), 1, 2, 3] )), gt_all: (">*", comp_sets!( [op!("<"), 0, 0, 0] )), gt_major_star: (">1.*", comp_sets!( [op!(">"), 1, 0, 0] )), gt_major_minor_star: (">1.2.*", comp_sets!( [op!(">"), 1, 2, 0] )), gte_major: (">=1", comp_sets!( [op!(">="), 1, 0, 0] )), gte_major_minor: (">=1.2", comp_sets!( [op!(">="), 1, 2, 0] )), gte_major_minor_patch: (">=1.2.3", comp_sets!( [op!(">="), 1, 2, 3] )), gte_all: (">=*", comp_sets!( [op!(">="), 0, 0, 0] )), gte_major_star: (">=1.*", comp_sets!( [op!(">="), 1, 0, 0] )), gte_major_minor_star: (">=1.2.*", comp_sets!( [op!(">="), 1, 2, 0] )), tilde_major: ("~1", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), tilde_major_0: ("~0", comp_sets!( [op!(">="), 0, 0, 0], [op!("<"), 1, 0, 0] )), tilde_major_xrange: ("~1.x", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), tilde_major_2: ("~>1", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), tilde_major_minor: ("~1.2", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), tilde_major_minor_xrange: ("~1.2.x", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), tilde_major_minor_2: ("~>1.2", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), tilde_major_minor_patch: ("~1.2.3", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 1, 3, 0] )), tilde_major_minor_patch_pre: ("~1.2.3-beta", comp_sets!( [op!(">="), 1, 2, 3, pre!(id_alpha!("beta"))], [op!("<"), 1, 3, 0, pre!()] )), tilde_major_minor_patch_2: ("~>1.2.3", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 1, 3, 0] )), tilde_major_0_minor_patch: ("~0.2.3", comp_sets!( [op!(">="), 0, 2, 3], [op!("<"), 0, 3, 0] )), tilde_all: ("~*", comp_sets!( [op!(">="), 0, 0, 0] )), caret_major: ("^1", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), caret_major_xrange: ("^1.x", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), caret_major_minor: ("^1.2", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 2, 0, 0] )), caret_major_minor_xrange: ("^1.2.x", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 2, 0, 0] )), caret_major_minor_patch: ("^1.2.3", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 2, 0, 0] )), caret_major_minor_patch_pre: ("^1.2.3-beta.4", comp_sets!( [op!(">="), 1, 2, 3, pre!(id_alpha!("beta"), id_num!(4))], [op!("<"), 2, 0, 0, pre!()] )), caret_major_0: ("^0", comp_sets!( [op!(">="), 0, 0, 0], [op!("<"), 1, 0, 0] )), caret_major_0_xrange: ("^0.x", comp_sets!( [op!(">="), 0, 0, 0], [op!("<"), 1, 0, 0] )), caret_major_0_minor_0: ("^0.0", comp_sets!( [op!(">="), 0, 0, 0], [op!("<"), 0, 1, 0] )), caret_major_0_minor_0_xrange: ("^0.0.x", comp_sets!( [op!(">="), 0, 0, 0], [op!("<"), 0, 1, 0] )), caret_major_0_minor: ("^0.1", comp_sets!( [op!(">="), 0, 1, 0], [op!("<"), 0, 2, 0] )), caret_major_0_minor_xrange: ("^0.1.x", comp_sets!( [op!(">="), 0, 1, 0], [op!("<"), 0, 2, 0] )), caret_major_0_minor_patch: ("^0.1.2", comp_sets!( [op!(">="), 0, 1, 2], [op!("<"), 0, 2, 0] )), caret_major_0_minor_0_patch: ("^0.0.1", comp_sets!( [op!(">="), 0, 0, 1], [op!("<"), 0, 0, 2] )), caret_major_0_minor_0_pre: ("^0.0.1-beta", comp_sets!( [op!(">="), 0, 0, 1, pre!(id_alpha!("beta"))], [op!("<"), 0, 0, 2, pre!()] )), caret_all: ("^*", comp_sets!( [op!(">="), 0, 0, 0] )), two_comparators_1: (">1.2.3 <4.5.6", comp_sets!( [op!(">"), 1, 2, 3], [op!("<"), 4, 5, 6] )), two_comparators_2: ("^1.2 ^1", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 2, 0, 0], [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), comparator_with_pre: ("=1.2.3-rc.1", comp_sets!( [op!("="), 1, 2, 3, pre!(id_alpha!("rc"), id_num!(1))] )), hyphen_major: ("1 - 4", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 5, 0, 0] )), hyphen_major_x: ("1.* - 4.*", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 5, 0, 0] )), hyphen_major_minor_x: ("1.2.x - 4.5.x", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 4, 6, 0] )), hyphen_major_minor_patch: ("1.2.3 - 4.5.6", comp_sets!( [op!(">="), 1, 2, 3], [op!("<="), 4, 5, 6] )), hyphen_with_pre: ("1.2.3-rc1 - 4.5.6", comp_sets!( [op!(">="), 1, 2, 3, pre!(id_alpha!("rc1"))], [op!("<="), 4, 5, 6, pre!()] )), hyphen_xrange_minor_only1: ("1.*.3 - 3.4.5", comp_sets!( [op!(">="), 1, 0, 0], [op!("<="), 3, 4, 5] )), hyphen_xrange_minor_only2: ("1.2.3 - 3.*.5", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 4, 0, 0] )), hyphen_all_to_something: ("* - 3.4.5", comp_sets!( [op!("<="), 3, 4, 5] )), hyphen_to_all: ("1.2.3 - *", comp_sets!( [op!(">="), 1, 2, 3] )), hyphen_all_to_all: ("* - *", comp_sets!( [op!(">="), 0, 0, 0] )), gte_space: (">= 1.2.3", comp_sets!( [op!(">="), 1, 2, 3] )), gte_tab: (">=\t1.2.3", comp_sets!( [op!(">="), 1, 2, 3] )), gte_two_spaces: (">= 1.2.3", comp_sets!( [op!(">="), 1, 2, 3] )), gt_space: ("> 1.2.3", comp_sets!( [op!(">"), 1, 2, 3] )), gt_two_spaces: ("> 1.2.3", comp_sets!( [op!(">"), 1, 2, 3] )), lte_space: ("<= 1.2.3", comp_sets!( [op!("<="), 1, 2, 3] )), lte_two_spaces: ("<= 1.2.3", comp_sets!( [op!("<="), 1, 2, 3] )), lt_space: ("< 1.2.3", comp_sets!( [op!("<"), 1, 2, 3] )), lt_two_spaces: ("< 1.2.3", comp_sets!( [op!("<"), 1, 2, 3] )), eq_space: ("= 1.2.3", comp_sets!( [op!("="), 1, 2, 3] )), eq_two_spaces: ("= 1.2.3", comp_sets!( [op!("="), 1, 2, 3] )), caret_space: ("^ 1.2.3", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 2, 0, 0] )), tilde_space: ("~ 1.2.3", comp_sets!( [op!(">="), 1, 2, 3], [op!("<"), 1, 3, 0] )), hyphen_spacing: ("1.2.3 - 4.5.6", comp_sets!( [op!(">="), 1, 2, 3], [op!("<="), 4, 5, 6] )), // digit options digits: ("=0.2.3", comp_sets!( [op!("="), 0, 2, 3] )), digits_2: ("=11.2.3", comp_sets!( [op!("="), 11, 2, 3] )), digits_3: ("=1.12.3", comp_sets!( [op!("="), 1, 12, 3] )), digits_4: ("=1.2.13", comp_sets!( [op!("="), 1, 2, 13] )), digits_5: ("=1.2.5678", comp_sets!( [op!("="), 1, 2, 5678] )), xrange_major_x: ("1.x", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), xrange_major_x_x: ("1.x.x", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), xrange_major_minor_x: ("1.2.x", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), xrange_major_xx: ("1.X", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), xrange_major_xx_xx: ("1.X.X", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), xrange_major_minor_xx: ("1.2.X", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), xrange_star: ("*", comp_sets!( [op!(">="), 0, 0, 0] )), xrange_x: ("x", comp_sets!( [op!(">="), 0, 0, 0] )), xrange_xx: ("X", comp_sets!( [op!(">="), 0, 0, 0] )), xrange_major_star: ("1.*", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), xrange_major_star_star: ("1.*.*", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), xrange_major_minor_star: ("1.2.*", comp_sets!( [op!(">="), 1, 2, 0], [op!("<"), 1, 3, 0] )), xrange_with_pre: ("1.*.*-beta", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), // this is handled as "1.*": xrange_minor_only: ("1.*.3", comp_sets!( [op!(">="), 1, 0, 0], [op!("<"), 2, 0, 0] )), // special cases gte_star: (">=*", comp_sets!( [op!(">="), 0, 0, 0] )), empty: ("", comp_sets!( [op!(">="), 0, 0, 0] )), } range_tests_nodecompat! { node_major_minor_patch: ("1.2.3", comp_sets_node!( [op!("="), 1, 2, 3] )), } } semver-parser-0.10.0/src/range_set.rs000064400000000000000000000122641373312100600156160ustar 00000000000000use crate::*; use pest::Parser; use std::str::FromStr; #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct RangeSet { pub ranges: Vec, pub compat: Compat, } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Compat { Cargo, // default Npm, } impl RangeSet { fn new() -> RangeSet { RangeSet { ranges: Vec::new(), compat: Compat::Cargo, // default } } pub fn parse(input: &str, compat: Compat) -> Result { let range_set = match SemverParser::parse(Rule::range_set, input) { Ok(mut parsed) => match parsed.next() { Some(parsed) => parsed, None => return Err(String::from("Could not parse a range set")), }, Err(e) => return Err(e.to_string()), }; from_pair_iterator(range_set, compat) } } impl FromStr for RangeSet { type Err = String; fn from_str(input: &str) -> Result { // default to cargo-compatible mode RangeSet::parse(input, Compat::Cargo) } } /// Converts an iterator of Pairs into a RangeSet fn from_pair_iterator( parsed_range_set: pest::iterators::Pair<'_, Rule>, compat: Compat, ) -> Result { // First of all, do we have the correct iterator? if parsed_range_set.as_rule() != Rule::range_set { return Err(String::from("Error parsing range set")); } // Next, we make a new, empty range let mut range_set = RangeSet::new(); range_set.compat = compat; // Now we need to parse each range out of the set for record in parsed_range_set.into_inner() { match record.as_rule() { // if we have a range... Rule::range => { // ... let's parse it and push it onto our list of ranges range_set .ranges .push(range::from_pair_iterator(record, compat)?); } // we don't need to do anything with the logical ors between ranges Rule::logical_or => (), // don't need to do anything with end-of-input Rule::EOI => (), // those are the only rules we can have, according to the grammar _ => unreachable!(), } } // and that's it! Ok(range_set) } #[cfg(test)] mod tests { use super::*; macro_rules! range_set_test { ( $name:ident: $input:expr, $($x:tt)* ) => { #[test] fn $name() { let expected_sets = vec![$($x)*]; let range_set: RangeSet = $input.parse().expect("parse failed"); assert_eq!(range_set.ranges.len(), expected_sets.len()); for it in range_set.ranges.iter().zip(expected_sets.iter()) { let (ai, bi ) = it; assert_eq!(ai.comparator_set.len(), *bi); } } }; } macro_rules! range_set_nodecompat { ( $name:ident: $input:expr, $($x:tt)* ) => { #[test] fn $name() { let expected_sets = vec![$($x)*]; let range_set = RangeSet::parse($input, Compat::Npm).expect("parse failed"); assert_eq!(range_set.ranges.len(), expected_sets.len()); for it in range_set.ranges.iter().zip(expected_sets.iter()) { let (ai, bi ) = it; assert_eq!(ai.comparator_set.len(), *bi); } } }; } macro_rules! should_error { ( $( $name:ident: $value:expr, )* ) => { $( #[test] fn $name() { assert!($value.parse::().is_err()); } )* }; } range_set_test!( one_range: "=1.2.3", 1 ); range_set_test!( one_range_cargo: "1.2.3", 2 ); // this parses as "^1.2.3" range_set_test!( one_range_with_space: " =1.2.3 ", 1 ); range_set_test!( two_ranges: ">1.2.3 || =4.5.6", 1, 1 ); range_set_test!( two_ranges_with_space: " >1.2.3 || =4.5.6 ", 1, 1 ); range_set_test!( two_ranges_with_two_comparators: ">1.2.3 <2.3.4 || >4.5.6 <5.6.7", 2, 2 ); range_set_test!( caret_range: "^1.2.3", 2 ); range_set_test!( two_empty_ranges: "||", 1, 1 ); range_set_test!( two_xranges: "1.2.* || 2.*", 2, 2 ); range_set_test!( see_issue_88: "=1.2.3+meta", 1 ); range_set_nodecompat!( node_one_range: "1.2.3", 1 ); // this parses as "=1.2.3" should_error! { err_only_gt: ">", err_only_lt: "<", err_only_lte: "<=", err_only_gte: ">=", err_only_eq: "=", err_only_tilde: "~", err_only_caret: "^", err_leading_0_major: "01.2.3", err_leading_0_minor: "1.02.3", err_leading_0_patch: "1.2.03", err_hyphen_with_gt: "1.2.3 - >3.4.5", err_hyphen_no_2nd_version: "1.2.3 - ", err_no_pre_hyphen: "~1.2.3beta", } } semver-parser-0.10.0/src/semver.pest000064400000000000000000000015571373312100600155020ustar 00000000000000range_set = { SOI ~ space* ~ range ~ (logical_or ~ range)* ~ space* ~ EOI } logical_or = { space* ~ "||" ~ space* } range = { hyphen | simple ~ ( ","? ~ space+ ~ simple )* | empty } empty = { "" } hyphen = { partial ~ space+ ~ "-" ~ space+ ~ partial } simple = { primitive | partial | tilde | caret } primitive = { primitive_op ~ space* ~ partial } primitive_op = { ("<=" | ">=" | ">" | "<" | "=") } partial = { xr ~ ("." ~ xr ~ ("." ~ xr ~ qualifier?)?)? } xr = { xr_op | nr } xr_op = { "x" | "X" | "*" } nr = { "0" | ('1' .. '9') ~ (('0' .. '9'))* } tilde = { ( "~>" | "~" ) ~ space* ~ partial } caret = { "^" ~ space* ~ partial } qualifier = { (("-" | "+") ~ parts) } parts = { part ~ ("." ~ part)* } part = { nr | ("-" | '0' .. '9' | 'A' .. 'Z' | 'a' .. 'z')+ } space = _{ " " | "\t" } semver-parser-0.10.0/src/version.rs000064400000000000000000000337421370432217700153520ustar 00000000000000//! Version data and functions. //! //! This module contains [`Version`] struct, [`parse`] function for building //! [`Version`] struct from string and some helper data structures and functions. //! //! # Examples //! //! Parsing `Version` from string and checking its fields: //! //! ``` //! use semver_parser::version; //! //! # fn try_main() -> Result<(), String> { //! let version = version::parse("1.2.3-alpha1")?; //! //! assert_eq!(version.major, 1); //! assert_eq!(version.minor, 2); //! assert_eq!(version.patch, 3); //! //! let expected_pre = vec![ //! version::Identifier::AlphaNumeric(String::from("alpha1")), //! ]; //! //! assert_eq!(expected_pre, version.pre); //! # Ok(()) //! # } //! # //! # try_main().unwrap(); //! ``` //! [`Version`]: ./struct.Version.html //! [`parse`]: ./fn.parse.html use crate::parser::{self, Parser}; use std::fmt; /// Structure representing version data. /// /// `Version` struct has some public fields representing version data, like major/minor version /// string, patch number and vectors of prefix and build identifiers. /// /// # Examples /// /// Parsing `Version` from string and checking its fields: /// /// ``` /// use semver_parser::version; /// /// # fn try_main() -> Result<(), String> { /// let version = version::parse("0.1.2-alpha1")?; /// assert_eq!(version.major, 0); /// assert_eq!(version.minor, 1); /// assert_eq!(version.patch, 2); /// let expected_pre = vec![version::Identifier::AlphaNumeric(String::from("alpha1"))]; /// assert_eq!(expected_pre, version.pre); /// # Ok(()) /// # } /// # /// # try_main().unwrap(); /// ``` #[derive(Clone, PartialOrd, Ord, Hash, Debug, PartialEq, Eq)] pub struct Version { /// Major version as number (`0` in `"0.1.2"`). pub major: u64, /// Minor version as number (`1` in `"0.1.2"`). pub minor: u64, /// Patch version as number (`2` in `"0.1.2"`). pub patch: u64, /// Pre-release metadata as a vector of `Identifier` (`"alpha1"` in `"0.1.2-alpha1"` /// or `7` (numeric) in `"0.1.2-7"`, `"pre"` and `0` (numeric) in `"0.1.2-pre.0"`). pub pre: Vec, /// Build metadata as a vector of `Identifier` (`"build1"` in `"0.1.2+build1"` /// or `7` (numeric) in `"0.1.2+7"`, `"build"` and `0` (numeric) in `"0.1.2+pre.0"`). pub build: Vec, } /// Helper enum for holding data of alphanumeric or numeric suffix identifiers. /// /// This enum is used to hold suffix parts of `pre` and `build` fields of /// [`Version`] struct. Theses suffixes may be either numeric or alphanumeric. /// /// # Examples /// /// Parsing [`Version`] with pre-release part composed of two `Identifier`s: /// /// ``` /// use semver_parser::version; /// /// # fn try_main() -> Result<(), String> { /// let version = version::parse("0.1.2-alpha1.0")?; /// /// let expected_pre = vec![ /// version::Identifier::AlphaNumeric(String::from("alpha1")), /// version::Identifier::Numeric(0), /// ]; /// /// assert_eq!(expected_pre, version.pre); /// # Ok(()) /// # } /// # /// # try_main().unwrap(); /// ``` /// [`Version`]: ./struct.Version.html #[derive(Clone, PartialOrd, Ord, Hash, Debug, PartialEq, Eq)] pub enum Identifier { /// An identifier that's solely numbers. Numeric(u64), /// An identifier with letters and numbers. AlphaNumeric(String), } impl Identifier { pub fn concat(self, add_str: &str) -> Identifier { match self { Identifier::Numeric(n) => Identifier::AlphaNumeric(format!("{}{}", n, add_str)), Identifier::AlphaNumeric(s) => Identifier::AlphaNumeric(format!("{}{}", s, add_str)), } } } /// Function for parsing version string to [`Version`]. /// /// Returns `Result<`[`Version`]`, String>`, where `String` represents an error while parsing. /// /// # Examples /// /// Parsing [`Version`] from string and checking its fields: /// /// ``` /// use semver_parser::version; /// /// # fn try_main() -> Result<(), String> { /// let version = version::parse("0.1.2-alpha1")?; /// assert_eq!(version.major, 0); /// assert_eq!(version.minor, 1); /// assert_eq!(version.patch, 2); /// let expected_pre = vec![version::Identifier::AlphaNumeric(String::from("alpha1"))]; /// assert_eq!(expected_pre, version.pre); /// # Ok(()) /// # } /// # /// # try_main().unwrap(); /// ``` /// [`Version`]: ./struct.Version.html pub fn parse(input: &str) -> Result { let mut parser = Parser::new(input)?; let version = parser.version()?; if !parser.is_eof() { return Err(parser::Error::MoreInput(parser.tail()?)); } Ok(version) } impl fmt::Display for Version { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}.{}.{}", self.major, self.minor, self.patch).expect("write failed"); if !self.pre.is_empty() { let strs: Vec<_> = self.pre.iter().map(ToString::to_string).collect(); write!(f, "-{}", strs.join(".")).expect("write failed"); } if !self.build.is_empty() { let strs: Vec<_> = self.build.iter().map(ToString::to_string).collect(); write!(f, "+{}", strs.join(".")).expect("write failed"); } Ok(()) } } impl fmt::Display for Identifier { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Identifier::Numeric(ref id) => id.fmt(f), Identifier::AlphaNumeric(ref id) => id.fmt(f), } } } #[cfg(test)] mod tests { use super::*; use crate::version; #[test] fn parse_empty() { let version = ""; let parsed = version::parse(version); assert!( parsed.is_err(), "empty string incorrectly considered a valid parse" ); } #[test] fn parse_blank() { let version = " "; let parsed = version::parse(version); assert!( parsed.is_err(), "blank string incorrectly considered a valid parse" ); } #[test] fn parse_no_minor_patch() { let version = "1"; let parsed = version::parse(version); assert!( parsed.is_err(), format!("'{}' incorrectly considered a valid parse", version) ); } #[test] fn parse_no_patch() { let version = "1.2"; let parsed = version::parse(version); assert!( parsed.is_err(), format!("'{}' incorrectly considered a valid parse", version) ); } #[test] fn parse_empty_pre() { let version = "1.2.3-"; let parsed = version::parse(version); assert!( parsed.is_err(), format!("'{}' incorrectly considered a valid parse", version) ); } #[test] fn parse_letters() { let version = "a.b.c"; let parsed = version::parse(version); assert!( parsed.is_err(), format!("'{}' incorrectly considered a valid parse", version) ); } #[test] fn parse_with_letters() { let version = "1.2.3 a.b.c"; let parsed = version::parse(version); assert!( parsed.is_err(), format!("'{}' incorrectly considered a valid parse", version) ); } #[test] fn parse_basic_version() { let version = "1.2.3"; let parsed = version::parse(version).unwrap(); assert_eq!(1, parsed.major); assert_eq!(2, parsed.minor); assert_eq!(3, parsed.patch); } #[test] fn parse_trims_input() { let version = " 1.2.3 "; let parsed = version::parse(version).unwrap(); assert_eq!(1, parsed.major); assert_eq!(2, parsed.minor); assert_eq!(3, parsed.patch); } #[test] fn parse_no_major_leading_zeroes() { let version = "01.0.0"; let parsed = version::parse(version); assert!( parsed.is_err(), "01 incorrectly considered a valid major version" ); } #[test] fn parse_no_minor_leading_zeroes() { let version = "0.01.0"; let parsed = version::parse(version); assert!( parsed.is_err(), "01 incorrectly considered a valid minor version" ); } #[test] fn parse_no_patch_leading_zeroes() { let version = "0.0.01"; let parsed = version::parse(version); assert!( parsed.is_err(), "01 incorrectly considered a valid patch version" ); } #[test] fn parse_no_major_overflow() { let version = "98765432109876543210.0.0"; let parsed = version::parse(version); assert!( parsed.is_err(), "98765432109876543210 incorrectly considered a valid major version" ); } #[test] fn parse_no_minor_overflow() { let version = "0.98765432109876543210.0"; let parsed = version::parse(version); assert!( parsed.is_err(), "98765432109876543210 incorrectly considered a valid minor version" ); } #[test] fn parse_no_patch_overflow() { let version = "0.0.98765432109876543210"; let parsed = version::parse(version); assert!( parsed.is_err(), "98765432109876543210 incorrectly considered a valid patch version" ); } #[test] fn parse_basic_prerelease() { let version = "1.2.3-pre"; let parsed = version::parse(version).unwrap(); let expected_pre = vec![Identifier::AlphaNumeric(String::from("pre"))]; assert_eq!(expected_pre, parsed.pre); } #[test] fn parse_prerelease_alphanumeric() { let version = "1.2.3-alpha1"; let parsed = version::parse(version).unwrap(); let expected_pre = vec![Identifier::AlphaNumeric(String::from("alpha1"))]; assert_eq!(expected_pre, parsed.pre); } #[test] fn parse_prerelease_zero() { let version = "1.2.3-pre.0"; let parsed = version::parse(version).unwrap(); let expected_pre = vec![ Identifier::AlphaNumeric(String::from("pre")), Identifier::Numeric(0), ]; assert_eq!(expected_pre, parsed.pre); } #[test] fn parse_basic_build() { let version = "1.2.3+build"; let parsed = version::parse(version).unwrap(); let expected_build = vec![Identifier::AlphaNumeric(String::from("build"))]; assert_eq!(expected_build, parsed.build); } #[test] fn parse_build_alphanumeric() { let version = "1.2.3+build5"; let parsed = version::parse(version).unwrap(); let expected_build = vec![Identifier::AlphaNumeric(String::from("build5"))]; assert_eq!(expected_build, parsed.build); } #[test] fn parse_pre_and_build() { let version = "1.2.3-alpha1+build5"; let parsed = version::parse(version).unwrap(); let expected_pre = vec![Identifier::AlphaNumeric(String::from("alpha1"))]; assert_eq!(expected_pre, parsed.pre); let expected_build = vec![Identifier::AlphaNumeric(String::from("build5"))]; assert_eq!(expected_build, parsed.build); } #[test] fn parse_complex_metadata_01() { let version = "1.2.3-1.alpha1.9+build5.7.3aedf "; let parsed = version::parse(version).unwrap(); let expected_pre = vec![ Identifier::Numeric(1), Identifier::AlphaNumeric(String::from("alpha1")), Identifier::Numeric(9), ]; assert_eq!(expected_pre, parsed.pre); let expected_build = vec![ Identifier::AlphaNumeric(String::from("build5")), Identifier::Numeric(7), Identifier::AlphaNumeric(String::from("3aedf")), ]; assert_eq!(expected_build, parsed.build); } #[test] fn parse_complex_metadata_02() { let version = "0.4.0-beta.1+0851523"; let parsed = version::parse(version).unwrap(); let expected_pre = vec![ Identifier::AlphaNumeric(String::from("beta")), Identifier::Numeric(1), ]; assert_eq!(expected_pre, parsed.pre); let expected_build = vec![Identifier::AlphaNumeric(String::from("0851523"))]; assert_eq!(expected_build, parsed.build); } #[test] fn parse_metadata_overflow() { let version = "0.4.0-beta.1+98765432109876543210"; let parsed = version::parse(version).unwrap(); let expected_pre = vec![ Identifier::AlphaNumeric(String::from("beta")), Identifier::Numeric(1), ]; assert_eq!(expected_pre, parsed.pre); let expected_build = vec![Identifier::AlphaNumeric(String::from( "98765432109876543210", ))]; assert_eq!(expected_build, parsed.build); } #[test] fn parse_regression_01() { let version = "0.0.0-WIP"; let parsed = version::parse(version).unwrap(); assert_eq!(0, parsed.major); assert_eq!(0, parsed.minor); assert_eq!(0, parsed.patch); let expected_pre = vec![Identifier::AlphaNumeric(String::from("WIP"))]; assert_eq!(expected_pre, parsed.pre); } #[test] fn parse_regression_02() { // this is used by really old versions of npm, and is valid according to semver.org let version = "1.2.3-beta-1"; let parsed = version::parse(version).unwrap(); assert_eq!(1, parsed.major); assert_eq!(2, parsed.minor); assert_eq!(3, parsed.patch); let expected_pre = vec![Identifier::AlphaNumeric(String::from("beta-1"))]; assert_eq!(expected_pre, parsed.pre); } }