basic-toml-0.1.10/.cargo_vcs_info.json0000644000000001360000000000100131450ustar { "git": { "sha1": "c54c9bd3ade1a609f5da6e31cf6ee72d6c7552fb" }, "path_in_vcs": "" }basic-toml-0.1.10/.github/workflows/ci.yml000064400000000000000000000050401046102023000164470ustar 00000000000000name: CI on: push: pull_request: workflow_dispatch: schedule: [cron: "40 1 * * *"] permissions: contents: read env: RUSTFLAGS: -Dwarnings jobs: pre_ci: uses: dtolnay/.github/.github/workflows/pre_ci.yml@master test: name: Rust ${{matrix.rust}} needs: pre_ci if: needs.pre_ci.outputs.continue runs-on: ubuntu-latest strategy: fail-fast: false matrix: rust: [nightly, beta, stable, 1.56.0] timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: toolchain: ${{matrix.rust}} - name: Enable type layout randomization run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV if: matrix.rust == 'nightly' - run: cargo test - uses: actions/upload-artifact@v4 if: matrix.rust == 'nightly' && always() with: name: Cargo.lock path: Cargo.lock continue-on-error: true minimal: name: Minimal versions needs: pre_ci if: needs.pre_ci.outputs.continue runs-on: ubuntu-latest timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly - run: cargo generate-lockfile -Z minimal-versions - run: cargo check --locked doc: name: Documentation needs: pre_ci if: needs.pre_ci.outputs.continue runs-on: ubuntu-latest timeout-minutes: 45 env: RUSTDOCFLAGS: -Dwarnings steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly - uses: dtolnay/install@cargo-docs-rs - run: cargo docs-rs clippy: name: Clippy runs-on: ubuntu-latest if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@clippy - run: cargo clippy --tests -- -Dclippy::all -Dclippy::pedantic miri: name: Miri needs: pre_ci if: needs.pre_ci.outputs.continue runs-on: ubuntu-latest timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@miri - run: cargo miri setup - run: cargo miri test env: MIRIFLAGS: -Zmiri-strict-provenance outdated: name: Outdated runs-on: ubuntu-latest if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - uses: dtolnay/install@cargo-outdated - run: cargo outdated --workspace --exit-code 1 basic-toml-0.1.10/.gitignore000064400000000000000000000000251046102023000137220ustar 00000000000000/target/ /Cargo.lock basic-toml-0.1.10/.taplo.toml000064400000000000000000000000771046102023000140330ustar 00000000000000exclude = ["tests/**"] [formatting] array_auto_expand = false basic-toml-0.1.10/Cargo.lock0000644000000051340000000000100111230ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "basic-toml" version = "0.1.10" dependencies = [ "semver", "serde", "serde_derive", "serde_json", ] [[package]] name = "itoa" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "proc-macro2" version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] [[package]] name = "ryu" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "semver" version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" dependencies = [ "serde", ] [[package]] name = "serde" version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "syn" version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "unicode-ident" version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" basic-toml-0.1.10/Cargo.toml0000644000000045340000000000100111510ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "basic-toml" version = "0.1.10" authors = [ "Alex Crichton ", "David Tolnay ", ] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Minimal TOML library with few dependencies" documentation = "https://docs.rs/basic-toml" readme = "README.md" keywords = [ "toml", "serde", ] categories = [ "config", "encoding", "parser-implementations", ] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/basic-toml" [package.metadata.docs.rs] rustdoc-args = [ "--generate-link-to-definition", "--extern-html-root-url=core=https://doc.rust-lang.org", "--extern-html-root-url=alloc=https://doc.rust-lang.org", "--extern-html-root-url=std=https://doc.rust-lang.org", ] targets = ["x86_64-unknown-linux-gnu"] [lib] name = "basic_toml" path = "src/lib.rs" [[example]] name = "decode" path = "examples/decode.rs" [[test]] name = "datetime" path = "tests/datetime.rs" [[test]] name = "de-errors" path = "tests/de-errors.rs" [[test]] name = "display-tricky" path = "tests/display-tricky.rs" [[test]] name = "enum_external_deserialize" path = "tests/enum_external_deserialize.rs" [[test]] name = "float" path = "tests/float.rs" [[test]] name = "formatting" path = "tests/formatting.rs" [[test]] name = "invalid" path = "tests/invalid.rs" [[test]] name = "invalid-misc" path = "tests/invalid-misc.rs" [[test]] name = "parser" path = "tests/parser.rs" [[test]] name = "tokens" path = "tests/tokens.rs" [[test]] name = "valid" path = "tests/valid.rs" [dependencies.serde] version = "1.0.194" [dev-dependencies.semver] version = "1.0.17" features = ["serde"] [dev-dependencies.serde] version = "1.0.194" features = ["derive"] [dev-dependencies.serde_derive] version = "1.0.194" [dev-dependencies.serde_json] version = "1.0.99" basic-toml-0.1.10/Cargo.toml.orig000064400000000000000000000016641046102023000146330ustar 00000000000000[package] name = "basic-toml" version = "0.1.10" authors = ["Alex Crichton ", "David Tolnay "] categories = ["config", "encoding", "parser-implementations"] description = "Minimal TOML library with few dependencies" documentation = "https://docs.rs/basic-toml" edition = "2021" keywords = ["toml", "serde"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/basic-toml" [dependencies] serde = "1.0.194" [dev-dependencies] semver = { version = "1.0.17", features = ["serde"] } serde = { version = "1.0.194", features = ["derive"] } serde_derive = "1.0.194" serde_json = "1.0.99" [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] rustdoc-args = [ "--generate-link-to-definition", "--extern-html-root-url=core=https://doc.rust-lang.org", "--extern-html-root-url=alloc=https://doc.rust-lang.org", "--extern-html-root-url=std=https://doc.rust-lang.org", ] basic-toml-0.1.10/LICENSE-APACHE000064400000000000000000000227731046102023000136740ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS basic-toml-0.1.10/LICENSE-MIT000064400000000000000000000017771046102023000134050ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. basic-toml-0.1.10/README.md000064400000000000000000000027131046102023000132170ustar 00000000000000# basic-toml [github](https://github.com/dtolnay/basic-toml) [crates.io](https://crates.io/crates/basic-toml) [docs.rs](https://docs.rs/basic-toml) [build status](https://github.com/dtolnay/basic-toml/actions?query=branch%3Amaster) A library for parsing and producing data in [TOML] format using [Serde]. This crate is a stripped down fork of version 0.5 of the `toml` crate (from before the `toml_edit` rewrite). [TOML]: https://toml.io [Serde]: https://serde.rs ```toml [dependencies] basic-toml = "0.1" ```
#### License Licensed under either of Apache License, Version 2.0 or MIT license at your option.
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this crate by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. basic-toml-0.1.10/examples/decode.rs000064400000000000000000000023261046102023000153470ustar 00000000000000//! An example showing off the usage of `Deserialize` to automatically decode //! TOML into a Rust `struct` #![deny(warnings)] #![allow(dead_code)] use serde_derive::Deserialize; /// This is what we're going to decode into. Each field is optional, meaning /// that it doesn't have to be present in TOML. #[derive(Debug, Deserialize)] struct Config { global_string: Option, global_integer: Option, server: Option, peers: Option>, } /// Sub-structs are decoded from tables, so this will decode from the `[server]` /// table. /// /// Again, each field is optional, meaning they don't have to be present. #[derive(Debug, Deserialize)] struct ServerConfig { ip: Option, port: Option, } #[derive(Debug, Deserialize)] struct PeerConfig { ip: Option, port: Option, } fn main() { let toml_str = r#" global_string = "test" global_integer = 5 [server] ip = "127.0.0.1" port = 80 [[peers]] ip = "127.0.0.1" port = 8080 [[peers]] ip = "127.0.0.1" "#; let decoded: Config = basic_toml::from_str(toml_str).unwrap(); println!("{:#?}", decoded); } basic-toml-0.1.10/src/de.rs000064400000000000000000001536531046102023000134770ustar 00000000000000use crate::tokens::{Error as TokenError, Span, Token, Tokenizer}; use serde::de; use serde::de::IntoDeserializer; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use std::error; use std::f64; use std::fmt::{self, Display}; use std::iter; use std::str; use std::vec; type TablePair<'a> = ((Span, Cow<'a, str>), Value<'a>); /// Deserializes a byte slice into a type. /// /// This function will attempt to interpret `bytes` as UTF-8 data and then /// deserialize `T` from the TOML document provided. pub fn from_slice<'de, T>(bytes: &'de [u8]) -> Result where T: de::Deserialize<'de>, { match str::from_utf8(bytes) { Ok(s) => from_str(s), Err(e) => Err(crate::Error::from(*Error::custom(None, e.to_string()))), } } /// Deserializes a string into a type. /// /// This function will attempt to interpret `s` as a TOML document and /// deserialize `T` from the document. pub fn from_str<'de, T>(s: &'de str) -> Result where T: de::Deserialize<'de>, { let mut d = Deserializer::new(s); T::deserialize(&mut d).map_err(|e| crate::Error::from(*e)) } #[derive(Debug)] pub(crate) struct Error { kind: ErrorKind, line: Option, col: usize, at: Option, message: String, key: Vec, } /// Errors that can occur when deserializing a type. #[derive(Debug)] enum ErrorKind { /// EOF was reached when looking for a value. UnexpectedEof, /// An invalid character not allowed in a string was found. InvalidCharInString(char), /// An invalid character was found as an escape. InvalidEscape(char), /// An invalid character was found in a hex escape. InvalidHexEscape(char), /// An invalid escape value was specified in a hex escape in a string. /// /// Valid values are in the plane of unicode codepoints. InvalidEscapeValue(u32), /// A newline in a string was encountered when one was not allowed. NewlineInString, /// An unexpected character was encountered, typically when looking for a /// value. Unexpected(char), /// An unterminated string was found where EOF was found before the ending /// EOF mark. UnterminatedString, /// A newline was found in a table key. NewlineInTableKey, /// A number failed to parse. NumberInvalid, /// Wanted one sort of token, but found another. Wanted { /// Expected token type. expected: &'static str, /// Actually found token type. found: &'static str, }, /// A duplicate table definition was found. DuplicateTable(String), /// Duplicate key in table. DuplicateKey(String), /// A previously defined table was redefined as an array. RedefineAsArray, /// Multiline strings are not allowed for key. MultilineStringKey, /// A custom error which could be generated when deserializing a particular /// type. Custom, /// A tuple with a certain number of elements was expected but something /// else was found. ExpectedTuple(usize), /// Expected table keys to be in increasing tuple index order, but something /// else was found. ExpectedTupleIndex { /// Expected index. expected: usize, /// Key that was specified. found: String, }, /// An empty table was expected but entries were found. ExpectedEmptyTable, /// Dotted key attempted to extend something that is not a table. DottedKeyInvalidType, /// An unexpected key was encountered. /// /// Used when deserializing a struct with a limited set of fields. UnexpectedKeys { /// The unexpected keys. keys: Vec, /// Keys that may be specified. available: &'static [&'static str], }, /// Unquoted string was found when quoted one was expected. UnquotedString, } struct Deserializer<'a> { input: &'a str, tokens: Tokenizer<'a>, } impl<'de> de::Deserializer<'de> for &mut Deserializer<'de> { type Error = Box; fn deserialize_any(self, visitor: V) -> Result> where V: de::Visitor<'de>, { let mut tables = self.tables()?; let table_indices = build_table_indices(&tables); let table_pindices = build_table_pindices(&tables); let res = visitor.visit_map(MapVisitor { values: Vec::new().into_iter().peekable(), next_value: None, depth: 0, cur: 0, cur_parent: 0, max: tables.len(), table_indices: &table_indices, table_pindices: &table_pindices, tables: &mut tables, array: false, de: self, keys: HashSet::new(), }); res.map_err(|mut err| { // Errors originating from this library (toml), have an offset // attached to them already. Other errors, like those originating // from serde (like "missing field") or from a custom deserializer, // do not have offsets on them. Here, we do a best guess at their // location, by attributing them to the "current table" (the last // item in `tables`). err.fix_offset(|| tables.last().map(|table| table.at)); err.fix_linecol(|at| self.to_linecol(at)); err }) } serde::forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq bytes byte_buf map unit newtype_struct ignored_any unit_struct tuple_struct tuple option identifier struct enum } } // Builds a datastructure that allows for efficient sublinear lookups. The // returned HashMap contains a mapping from table header (like [a.b.c]) to list // of tables with that precise name. The tables are being identified by their // index in the passed slice. We use a list as the implementation uses this data // structure for arrays as well as tables, so if any top level [[name]] array // contains multiple entries, there are multiple entries in the list. The lookup // is performed in the `SeqAccess` implementation of `MapVisitor`. The lists are // ordered, which we exploit in the search code by using bisection. fn build_table_indices<'de>(tables: &[Table<'de>]) -> HashMap>, Vec> { let mut res = HashMap::new(); for (i, table) in tables.iter().enumerate() { let header = table.header.iter().map(|v| v.1.clone()).collect::>(); res.entry(header).or_insert_with(Vec::new).push(i); } res } // Builds a datastructure that allows for efficient sublinear lookups. The // returned HashMap contains a mapping from table header (like [a.b.c]) to list // of tables whose name at least starts with the specified name. So searching // for [a.b] would give both [a.b.c.d] as well as [a.b.e]. The tables are being // identified by their index in the passed slice. // // A list is used for two reasons: First, the implementation also stores arrays // in the same data structure and any top level array of size 2 or greater // creates multiple entries in the list with the same shared name. Second, there // can be multiple tables sharing the same prefix. // // The lookup is performed in the `MapAccess` implementation of `MapVisitor`. // The lists are ordered, which we exploit in the search code by using // bisection. fn build_table_pindices<'de>(tables: &[Table<'de>]) -> HashMap>, Vec> { let mut res = HashMap::new(); for (i, table) in tables.iter().enumerate() { let header = table.header.iter().map(|v| v.1.clone()).collect::>(); for len in 0..=header.len() { res.entry(header[..len].to_owned()) .or_insert_with(Vec::new) .push(i); } } res } fn headers_equal(hdr_a: &[(Span, Cow)], hdr_b: &[(Span, Cow)]) -> bool { if hdr_a.len() != hdr_b.len() { return false; } hdr_a.iter().zip(hdr_b.iter()).all(|(h1, h2)| h1.1 == h2.1) } struct Table<'a> { at: usize, header: Vec<(Span, Cow<'a, str>)>, values: Option>>, array: bool, } struct MapVisitor<'de, 'b> { values: iter::Peekable>>, next_value: Option>, depth: usize, cur: usize, cur_parent: usize, max: usize, table_indices: &'b HashMap>, Vec>, table_pindices: &'b HashMap>, Vec>, tables: &'b mut [Table<'de>], array: bool, de: &'b mut Deserializer<'de>, keys: HashSet>, } impl<'de, 'b> de::MapAccess<'de> for MapVisitor<'de, 'b> { type Error = Box; fn next_key_seed(&mut self, seed: K) -> Result, Box> where K: de::DeserializeSeed<'de>, { if self.cur_parent == self.max || self.cur == self.max { return Ok(None); } loop { assert!(self.next_value.is_none()); if let Some(((span, key), value)) = self.values.next() { if !self.keys.insert(key.clone()) { return Err(Error::from_kind( Some(span.start), ErrorKind::DuplicateKey(key.into_owned()), )); } let ret = seed.deserialize(StrDeserializer::new(key.clone()))?; self.next_value = Some(((span, key), value)); return Ok(Some(ret)); } let next_table = { let prefix_stripped = self.tables[self.cur_parent].header[..self.depth] .iter() .map(|v| v.1.clone()) .collect::>(); self.table_pindices .get(&prefix_stripped) .and_then(|entries| { let start = entries.binary_search(&self.cur).unwrap_or_else(|v| v); if start == entries.len() || entries[start] < self.cur { return None; } entries[start..] .iter() .filter_map(|i| if *i < self.max { Some(*i) } else { None }) .map(|i| (i, &self.tables[i])) .find(|(_, table)| table.values.is_some()) .map(|p| p.0) }) }; let pos = match next_table { Some(pos) => pos, None => return Ok(None), }; self.cur = pos; // Test to see if we're duplicating our parent's table, and if so // then this is an error in the toml format if self.cur_parent != pos { if headers_equal( &self.tables[self.cur_parent].header, &self.tables[pos].header, ) { let at = self.tables[pos].at; let name = self.tables[pos] .header .iter() .map(|k| k.1.clone()) .collect::>() .join("."); return Err(self.de.error(at, ErrorKind::DuplicateTable(name))); } // If we're here we know we should share the same prefix, and if // the longer table was defined first then we want to narrow // down our parent's length if possible to ensure that we catch // duplicate tables defined afterwards. let parent_len = self.tables[self.cur_parent].header.len(); let cur_len = self.tables[pos].header.len(); if cur_len < parent_len { self.cur_parent = pos; } } let table = &mut self.tables[pos]; // If we're not yet at the appropriate depth for this table then we // just next the next portion of its header and then continue // decoding. if self.depth != table.header.len() { let (span, key) = &table.header[self.depth]; if !self.keys.insert(key.clone()) { return Err(Error::from_kind( Some(span.start), ErrorKind::DuplicateKey(key.clone().into_owned()), )); } let key = seed.deserialize(StrDeserializer::new(key.clone()))?; return Ok(Some(key)); } // Rule out cases like: // // [[foo.bar]] // [[foo]] if table.array { let kind = ErrorKind::RedefineAsArray; return Err(self.de.error(table.at, kind)); } self.values = table .values .take() .expect("Unable to read table values") .into_iter() .peekable(); } } fn next_value_seed(&mut self, seed: V) -> Result> where V: de::DeserializeSeed<'de>, { if let Some((k, v)) = self.next_value.take() { match seed.deserialize(ValueDeserializer::new(v)) { Ok(v) => return Ok(v), Err(mut e) => { e.add_key_context(&k.1); return Err(e); } } } let array = self.tables[self.cur].array && self.depth == self.tables[self.cur].header.len() - 1; self.cur += 1; let res = seed.deserialize(MapVisitor { values: Vec::new().into_iter().peekable(), next_value: None, depth: self.depth + if array { 0 } else { 1 }, cur_parent: self.cur - 1, cur: 0, max: self.max, array, table_indices: self.table_indices, table_pindices: self.table_pindices, tables: &mut *self.tables, de: &mut *self.de, keys: HashSet::new(), }); res.map_err(|mut e| { e.add_key_context(&self.tables[self.cur - 1].header[self.depth].1); e }) } } impl<'de, 'b> de::SeqAccess<'de> for MapVisitor<'de, 'b> { type Error = Box; fn next_element_seed(&mut self, seed: K) -> Result, Box> where K: de::DeserializeSeed<'de>, { assert!(self.next_value.is_none()); assert!(self.values.next().is_none()); if self.cur_parent == self.max { return Ok(None); } let header_stripped = self.tables[self.cur_parent] .header .iter() .map(|v| v.1.clone()) .collect::>(); let start_idx = self.cur_parent + 1; let next = self .table_indices .get(&header_stripped) .and_then(|entries| { let start = entries.binary_search(&start_idx).unwrap_or_else(|v| v); if start == entries.len() || entries[start] < start_idx { return None; } entries[start..] .iter() .filter_map(|i| if *i < self.max { Some(*i) } else { None }) .map(|i| (i, &self.tables[i])) .find(|(_, table)| table.array) .map(|p| p.0) }) .unwrap_or(self.max); let ret = seed.deserialize(MapVisitor { values: self.tables[self.cur_parent] .values .take() .expect("Unable to read table values") .into_iter() .peekable(), next_value: None, depth: self.depth + 1, cur_parent: self.cur_parent, max: next, cur: 0, array: false, table_indices: self.table_indices, table_pindices: self.table_pindices, tables: self.tables, de: self.de, keys: HashSet::new(), })?; self.cur_parent = next; Ok(Some(ret)) } } impl<'de, 'b> de::Deserializer<'de> for MapVisitor<'de, 'b> { type Error = Box; fn deserialize_any(self, visitor: V) -> Result> where V: de::Visitor<'de>, { if self.array { visitor.visit_seq(self) } else { visitor.visit_map(self) } } // `None` is interpreted as a missing field so be sure to implement `Some` // as a present field. fn deserialize_option(self, visitor: V) -> Result> where V: de::Visitor<'de>, { visitor.visit_some(self) } fn deserialize_newtype_struct( self, _name: &'static str, visitor: V, ) -> Result> where V: de::Visitor<'de>, { visitor.visit_newtype_struct(self) } serde::forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq bytes byte_buf map unit identifier ignored_any unit_struct tuple_struct tuple struct enum } } struct StrDeserializer<'a> { key: Cow<'a, str>, } impl<'a> StrDeserializer<'a> { fn new(key: Cow<'a, str>) -> StrDeserializer<'a> { StrDeserializer { key } } } impl<'a> de::IntoDeserializer<'a, Box> for StrDeserializer<'a> { type Deserializer = Self; fn into_deserializer(self) -> Self::Deserializer { self } } impl<'de> de::Deserializer<'de> for StrDeserializer<'de> { type Error = Box; fn deserialize_any(self, visitor: V) -> Result> where V: de::Visitor<'de>, { match self.key { Cow::Borrowed(s) => visitor.visit_borrowed_str(s), Cow::Owned(s) => visitor.visit_string(s), } } serde::forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq bytes byte_buf map option unit newtype_struct ignored_any unit_struct tuple_struct tuple enum identifier struct } } struct ValueDeserializer<'a> { value: Value<'a>, validate_struct_keys: bool, } impl<'a> ValueDeserializer<'a> { fn new(value: Value<'a>) -> ValueDeserializer<'a> { ValueDeserializer { value, validate_struct_keys: false, } } fn with_struct_key_validation(mut self) -> Self { self.validate_struct_keys = true; self } } impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> { type Error = Box; fn deserialize_any(self, visitor: V) -> Result> where V: de::Visitor<'de>, { let start = self.value.start; let res = match self.value.e { E::Integer(i) => visitor.visit_i64(i), E::Boolean(b) => visitor.visit_bool(b), E::Float(f) => visitor.visit_f64(f), E::String(Cow::Borrowed(s)) => visitor.visit_borrowed_str(s), E::String(Cow::Owned(s)) => visitor.visit_string(s), E::Array(values) => { let mut s = de::value::SeqDeserializer::new(values.into_iter()); let ret = visitor.visit_seq(&mut s)?; s.end()?; Ok(ret) } E::InlineTable(values) | E::DottedTable(values) => { visitor.visit_map(InlineTableDeserializer { values: values.into_iter(), next_value: None, keys: HashSet::new(), }) } }; res.map_err(|mut err| { // Attribute the error to whatever value returned the error. err.fix_offset(|| Some(start)); err }) } fn deserialize_struct( self, _name: &'static str, fields: &'static [&'static str], visitor: V, ) -> Result> where V: de::Visitor<'de>, { if self.validate_struct_keys { match self.value.e { E::InlineTable(ref values) | E::DottedTable(ref values) => { let extra_fields = values .iter() .filter_map(|key_value| { let (ref key, ref _val) = *key_value; if fields.contains(&&*(key.1)) { None } else { Some(key.clone()) } }) .collect::>(); if !extra_fields.is_empty() { return Err(Error::from_kind( Some(self.value.start), ErrorKind::UnexpectedKeys { keys: extra_fields .iter() .map(|k| k.1.to_string()) .collect::>(), available: fields, }, )); } } _ => {} } } self.deserialize_any(visitor) } // `None` is interpreted as a missing field so be sure to implement `Some` // as a present field. fn deserialize_option(self, visitor: V) -> Result> where V: de::Visitor<'de>, { visitor.visit_some(self) } fn deserialize_enum( self, _name: &'static str, _variants: &'static [&'static str], visitor: V, ) -> Result> where V: de::Visitor<'de>, { match self.value.e { E::String(val) => visitor.visit_enum(val.into_deserializer()), e => Err(Error::from_kind( Some(self.value.start), ErrorKind::Wanted { expected: "string", found: e.type_name(), }, )), } } fn deserialize_newtype_struct( self, _name: &'static str, visitor: V, ) -> Result> where V: de::Visitor<'de>, { visitor.visit_newtype_struct(self) } serde::forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq bytes byte_buf map unit identifier ignored_any unit_struct tuple_struct tuple } } impl<'de, 'b> de::IntoDeserializer<'de, Box> for MapVisitor<'de, 'b> { type Deserializer = MapVisitor<'de, 'b>; fn into_deserializer(self) -> Self::Deserializer { self } } impl<'de> de::IntoDeserializer<'de, Box> for &mut Deserializer<'de> { type Deserializer = Self; fn into_deserializer(self) -> Self::Deserializer { self } } impl<'de> de::IntoDeserializer<'de, Box> for Value<'de> { type Deserializer = ValueDeserializer<'de>; fn into_deserializer(self) -> Self::Deserializer { ValueDeserializer::new(self) } } struct InlineTableDeserializer<'de> { values: vec::IntoIter>, next_value: Option>, keys: HashSet>, } impl<'de> de::MapAccess<'de> for InlineTableDeserializer<'de> { type Error = Box; fn next_key_seed(&mut self, seed: K) -> Result, Box> where K: de::DeserializeSeed<'de>, { let ((span, key), value) = match self.values.next() { Some(pair) => pair, None => return Ok(None), }; self.next_value = Some(value); if !self.keys.insert(key.clone()) { return Err(Error::from_kind( Some(span.start), ErrorKind::DuplicateKey(key.into_owned()), )); } seed.deserialize(StrDeserializer::new(key)).map(Some) } fn next_value_seed(&mut self, seed: V) -> Result> where V: de::DeserializeSeed<'de>, { let value = self.next_value.take().expect("Unable to read table values"); seed.deserialize(ValueDeserializer::new(value)) } } impl<'de> de::EnumAccess<'de> for InlineTableDeserializer<'de> { type Error = Box; type Variant = TableEnumDeserializer<'de>; fn variant_seed(mut self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error> where V: de::DeserializeSeed<'de>, { let (key, value) = match self.values.next() { Some(pair) => pair, None => { return Err(Error::from_kind( None, // FIXME: How do we get an offset here? ErrorKind::Wanted { expected: "table with exactly 1 entry", found: "empty table", }, )); } }; seed.deserialize(StrDeserializer::new(key.1)) .map(|val| (val, TableEnumDeserializer { value })) } } /// Deserializes table values into enum variants. struct TableEnumDeserializer<'a> { value: Value<'a>, } impl<'de> de::VariantAccess<'de> for TableEnumDeserializer<'de> { type Error = Box; fn unit_variant(self) -> Result<(), Self::Error> { match self.value.e { E::InlineTable(values) | E::DottedTable(values) => { if values.is_empty() { Ok(()) } else { Err(Error::from_kind( Some(self.value.start), ErrorKind::ExpectedEmptyTable, )) } } e => Err(Error::from_kind( Some(self.value.start), ErrorKind::Wanted { expected: "table", found: e.type_name(), }, )), } } fn newtype_variant_seed(self, seed: T) -> Result where T: de::DeserializeSeed<'de>, { seed.deserialize(ValueDeserializer::new(self.value)) } fn tuple_variant(self, len: usize, visitor: V) -> Result where V: de::Visitor<'de>, { match self.value.e { E::InlineTable(values) | E::DottedTable(values) => { let tuple_values = values .into_iter() .enumerate() .map(|(index, (key, value))| match key.1.parse::() { Ok(key_index) if key_index == index => Ok(value), Ok(_) | Err(_) => Err(Error::from_kind( Some(key.0.start), ErrorKind::ExpectedTupleIndex { expected: index, found: key.1.to_string(), }, )), }) .collect::, _>>()?; if tuple_values.len() == len { de::Deserializer::deserialize_seq( ValueDeserializer::new(Value { e: E::Array(tuple_values), start: self.value.start, end: self.value.end, }), visitor, ) } else { Err(Error::from_kind( Some(self.value.start), ErrorKind::ExpectedTuple(len), )) } } e => Err(Error::from_kind( Some(self.value.start), ErrorKind::Wanted { expected: "table", found: e.type_name(), }, )), } } fn struct_variant( self, fields: &'static [&'static str], visitor: V, ) -> Result where V: de::Visitor<'de>, { de::Deserializer::deserialize_struct( ValueDeserializer::new(self.value).with_struct_key_validation(), "", // TODO: this should be the variant name fields, visitor, ) } } impl<'a> Deserializer<'a> { fn new(input: &'a str) -> Deserializer<'a> { Deserializer { tokens: Tokenizer::new(input), input, } } fn tables(&mut self) -> Result>, Box> { let mut tables = Vec::new(); let mut cur_table = Table { at: 0, header: Vec::new(), values: None, array: false, }; while let Some(line) = self.line()? { match line { Line::Table { at, mut header, array, } => { if !cur_table.header.is_empty() || cur_table.values.is_some() { tables.push(cur_table); } cur_table = Table { at, header: Vec::new(), values: Some(Vec::new()), array, }; loop { let part = header.next().map_err(|e| self.token_error(e)); match part? { Some(part) => cur_table.header.push(part), None => break, } } } Line::KeyValue(key, value) => { if cur_table.values.is_none() { cur_table.values = Some(Vec::new()); } self.add_dotted_key(key, value, cur_table.values.as_mut().unwrap())?; } } } if !cur_table.header.is_empty() || cur_table.values.is_some() { tables.push(cur_table); } Ok(tables) } fn line(&mut self) -> Result>, Box> { loop { self.eat_whitespace(); if self.eat_comment()? { continue; } if self.eat(Token::Newline)? { continue; } break; } match self.peek()? { Some((_, Token::LeftBracket)) => self.table_header().map(Some), Some(_) => self.key_value().map(Some), None => Ok(None), } } fn table_header(&mut self) -> Result, Box> { let start = self.tokens.current(); self.expect(Token::LeftBracket)?; let array = self.eat(Token::LeftBracket)?; let ret = Header::new(self.tokens.clone(), array); self.tokens.skip_to_newline(); Ok(Line::Table { at: start, header: ret, array, }) } fn key_value(&mut self) -> Result, Box> { let key = self.dotted_key()?; self.eat_whitespace(); self.expect(Token::Equals)?; self.eat_whitespace(); let value = self.value()?; self.eat_whitespace(); if !self.eat_comment()? { self.eat_newline_or_eof()?; } Ok(Line::KeyValue(key, value)) } fn value(&mut self) -> Result, Box> { let at = self.tokens.current(); let value = match self.next()? { Some((Span { start, end }, Token::String { val, .. })) => Value { e: E::String(val), start, end, }, Some((Span { start, end }, Token::Keylike("true"))) => Value { e: E::Boolean(true), start, end, }, Some((Span { start, end }, Token::Keylike("false"))) => Value { e: E::Boolean(false), start, end, }, Some((span, Token::Keylike(key))) => self.parse_keylike(at, span, key)?, Some((span, Token::Plus)) => self.number_leading_plus(span)?, Some((Span { start, .. }, Token::LeftBrace)) => { self.inline_table().map(|(Span { end, .. }, table)| Value { e: E::InlineTable(table), start, end, })? } Some((Span { start, .. }, Token::LeftBracket)) => { self.array().map(|(Span { end, .. }, array)| Value { e: E::Array(array), start, end, })? } Some(token) => { return Err(self.error( at, ErrorKind::Wanted { expected: "a value", found: token.1.describe(), }, )); } None => return Err(self.eof()), }; Ok(value) } fn parse_keylike( &mut self, at: usize, span: Span, key: &'a str, ) -> Result, Box> { if key == "inf" || key == "nan" { return self.number(span, key); } let first_char = key.chars().next().expect("key should not be empty here"); match first_char { '-' | '0'..='9' => self.number(span, key), _ => Err(self.error(at, ErrorKind::UnquotedString)), } } fn number(&mut self, Span { start, end }: Span, s: &'a str) -> Result, Box> { let to_integer = |f| Value { e: E::Integer(f), start, end, }; if let Some(s) = s.strip_prefix("0x") { self.integer(s, 16).map(to_integer) } else if let Some(s) = s.strip_prefix("0o") { self.integer(s, 8).map(to_integer) } else if let Some(s) = s.strip_prefix("0b") { self.integer(s, 2).map(to_integer) } else if s.contains('e') || s.contains('E') { self.float(s, None).map(|f| Value { e: E::Float(f), start, end, }) } else if self.eat(Token::Period)? { let at = self.tokens.current(); match self.next()? { Some((Span { start, end }, Token::Keylike(after))) => { self.float(s, Some(after)).map(|f| Value { e: E::Float(f), start, end, }) } _ => Err(self.error(at, ErrorKind::NumberInvalid)), } } else if s == "inf" { Ok(Value { e: E::Float(f64::INFINITY), start, end, }) } else if s == "-inf" { Ok(Value { e: E::Float(f64::NEG_INFINITY), start, end, }) } else if s == "nan" { Ok(Value { e: E::Float(f64::NAN.copysign(1.0)), start, end, }) } else if s == "-nan" { Ok(Value { e: E::Float(f64::NAN.copysign(-1.0)), start, end, }) } else { self.integer(s, 10).map(to_integer) } } fn number_leading_plus(&mut self, Span { start, .. }: Span) -> Result, Box> { let start_token = self.tokens.current(); match self.next()? { Some((Span { end, .. }, Token::Keylike(s))) => self.number(Span { start, end }, s), _ => Err(self.error(start_token, ErrorKind::NumberInvalid)), } } fn integer(&self, s: &'a str, radix: u32) -> Result> { let allow_sign = radix == 10; let allow_leading_zeros = radix != 10; let (prefix, suffix) = self.parse_integer(s, allow_sign, allow_leading_zeros, radix)?; let start = self.tokens.substr_offset(s); if !suffix.is_empty() { return Err(self.error(start, ErrorKind::NumberInvalid)); } i64::from_str_radix(prefix.replace('_', "").trim_start_matches('+'), radix) .map_err(|_e| self.error(start, ErrorKind::NumberInvalid)) } fn parse_integer( &self, s: &'a str, allow_sign: bool, allow_leading_zeros: bool, radix: u32, ) -> Result<(&'a str, &'a str), Box> { let start = self.tokens.substr_offset(s); let mut first = true; let mut first_zero = false; let mut underscore = false; let mut end = s.len(); for (i, c) in s.char_indices() { let at = i + start; if i == 0 && (c == '+' || c == '-') && allow_sign { continue; } if c == '0' && first { first_zero = true; } else if c.is_digit(radix) { if !first && first_zero && !allow_leading_zeros { return Err(self.error(at, ErrorKind::NumberInvalid)); } underscore = false; } else if c == '_' && first { return Err(self.error(at, ErrorKind::NumberInvalid)); } else if c == '_' && !underscore { underscore = true; } else { end = i; break; } first = false; } if first || underscore { return Err(self.error(start, ErrorKind::NumberInvalid)); } Ok((&s[..end], &s[end..])) } fn float(&mut self, s: &'a str, after_decimal: Option<&'a str>) -> Result> { let (integral, mut suffix) = self.parse_integer(s, true, false, 10)?; let start = self.tokens.substr_offset(integral); let mut fraction = None; if let Some(after) = after_decimal { if !suffix.is_empty() { return Err(self.error(start, ErrorKind::NumberInvalid)); } let (a, b) = self.parse_integer(after, false, true, 10)?; fraction = Some(a); suffix = b; } let mut exponent = None; if suffix.starts_with('e') || suffix.starts_with('E') { let (a, b) = if suffix.len() == 1 { self.eat(Token::Plus)?; match self.next()? { Some((_, Token::Keylike(s))) => self.parse_integer(s, false, true, 10)?, _ => return Err(self.error(start, ErrorKind::NumberInvalid)), } } else { self.parse_integer(&suffix[1..], true, true, 10)? }; if !b.is_empty() { return Err(self.error(start, ErrorKind::NumberInvalid)); } exponent = Some(a); } else if !suffix.is_empty() { return Err(self.error(start, ErrorKind::NumberInvalid)); } let mut number = integral .trim_start_matches('+') .chars() .filter(|c| *c != '_') .collect::(); if let Some(fraction) = fraction { number.push('.'); number.extend(fraction.chars().filter(|c| *c != '_')); } if let Some(exponent) = exponent { number.push('E'); number.extend(exponent.chars().filter(|c| *c != '_')); } number .parse() .map_err(|_e| self.error(start, ErrorKind::NumberInvalid)) .and_then(|n: f64| { if n.is_finite() { Ok(n) } else { Err(self.error(start, ErrorKind::NumberInvalid)) } }) } // TODO(#140): shouldn't buffer up this entire table in memory, it'd be // great to defer parsing everything until later. fn inline_table(&mut self) -> Result<(Span, Vec>), Box> { let mut ret = Vec::new(); self.eat_whitespace(); if let Some(span) = self.eat_spanned(Token::RightBrace)? { return Ok((span, ret)); } loop { let key = self.dotted_key()?; self.eat_whitespace(); self.expect(Token::Equals)?; self.eat_whitespace(); let value = self.value()?; self.add_dotted_key(key, value, &mut ret)?; self.eat_whitespace(); if let Some(span) = self.eat_spanned(Token::RightBrace)? { return Ok((span, ret)); } self.expect(Token::Comma)?; self.eat_whitespace(); } } // TODO(#140): shouldn't buffer up this entire array in memory, it'd be // great to defer parsing everything until later. fn array(&mut self) -> Result<(Span, Vec>), Box> { let mut ret = Vec::new(); let intermediate = |me: &mut Deserializer| -> Result<(), Box> { loop { me.eat_whitespace(); if !me.eat(Token::Newline)? && !me.eat_comment()? { break; } } Ok(()) }; loop { intermediate(self)?; if let Some(span) = self.eat_spanned(Token::RightBracket)? { return Ok((span, ret)); } let value = self.value()?; ret.push(value); intermediate(self)?; if !self.eat(Token::Comma)? { break; } } intermediate(self)?; let span = self.expect_spanned(Token::RightBracket)?; Ok((span, ret)) } fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Box> { self.tokens.table_key().map_err(|e| self.token_error(e)) } fn dotted_key(&mut self) -> Result)>, Box> { let mut result = Vec::new(); result.push(self.table_key()?); self.eat_whitespace(); while self.eat(Token::Period)? { self.eat_whitespace(); result.push(self.table_key()?); self.eat_whitespace(); } Ok(result) } /// Stores a value in the appropriate hierarchical structure positioned based on the dotted key. /// /// Given the following definition: `multi.part.key = "value"`, `multi` and `part` are /// intermediate parts which are mapped to the relevant fields in the deserialized type's data /// hierarchy. /// /// # Parameters /// /// * `key_parts`: Each segment of the dotted key, e.g. `part.one` maps to /// `vec![Cow::Borrowed("part"), Cow::Borrowed("one")].` /// * `value`: The parsed value. /// * `values`: The `Vec` to store the value in. fn add_dotted_key( &self, mut key_parts: Vec<(Span, Cow<'a, str>)>, value: Value<'a>, values: &mut Vec>, ) -> Result<(), Box> { let key = key_parts.remove(0); if key_parts.is_empty() { values.push((key, value)); return Ok(()); } match values.iter_mut().find(|&&mut (ref k, _)| *k.1 == key.1) { Some(&mut ( _, Value { e: E::DottedTable(ref mut v), .. }, )) => { return self.add_dotted_key(key_parts, value, v); } Some(&mut (_, Value { start, .. })) => { return Err(self.error(start, ErrorKind::DottedKeyInvalidType)); } None => {} } // The start/end value is somewhat misleading here. let table_values = Value { e: E::DottedTable(Vec::new()), start: value.start, end: value.end, }; values.push((key, table_values)); let last_i = values.len() - 1; if let ( _, Value { e: E::DottedTable(ref mut v), .. }, ) = values[last_i] { self.add_dotted_key(key_parts, value, v)?; } Ok(()) } fn eat_whitespace(&mut self) { self.tokens.eat_whitespace(); } fn eat_comment(&mut self) -> Result> { self.tokens.eat_comment().map_err(|e| self.token_error(e)) } fn eat_newline_or_eof(&mut self) -> Result<(), Box> { self.tokens .eat_newline_or_eof() .map_err(|e| self.token_error(e)) } fn eat(&mut self, expected: Token<'a>) -> Result> { self.tokens.eat(expected).map_err(|e| self.token_error(e)) } fn eat_spanned(&mut self, expected: Token<'a>) -> Result, Box> { self.tokens .eat_spanned(expected) .map_err(|e| self.token_error(e)) } fn expect(&mut self, expected: Token<'a>) -> Result<(), Box> { self.tokens .expect(expected) .map_err(|e| self.token_error(e)) } fn expect_spanned(&mut self, expected: Token<'a>) -> Result> { self.tokens .expect_spanned(expected) .map_err(|e| self.token_error(e)) } fn next(&mut self) -> Result)>, Box> { self.tokens.next().map_err(|e| self.token_error(e)) } fn peek(&mut self) -> Result)>, Box> { self.tokens.peek().map_err(|e| self.token_error(e)) } fn eof(&self) -> Box { self.error(self.input.len(), ErrorKind::UnexpectedEof) } fn token_error(&self, error: TokenError) -> Box { match error { TokenError::InvalidCharInString(at, ch) => { self.error(at, ErrorKind::InvalidCharInString(ch)) } TokenError::InvalidEscape(at, ch) => self.error(at, ErrorKind::InvalidEscape(ch)), TokenError::InvalidEscapeValue(at, v) => { self.error(at, ErrorKind::InvalidEscapeValue(v)) } TokenError::InvalidHexEscape(at, ch) => self.error(at, ErrorKind::InvalidHexEscape(ch)), TokenError::NewlineInString(at) => self.error(at, ErrorKind::NewlineInString), TokenError::Unexpected(at, ch) => self.error(at, ErrorKind::Unexpected(ch)), TokenError::UnterminatedString(at) => self.error(at, ErrorKind::UnterminatedString), TokenError::NewlineInTableKey(at) => self.error(at, ErrorKind::NewlineInTableKey), TokenError::Wanted { at, expected, found, } => self.error(at, ErrorKind::Wanted { expected, found }), TokenError::MultilineStringKey(at) => self.error(at, ErrorKind::MultilineStringKey), } } fn error(&self, at: usize, kind: ErrorKind) -> Box { let mut err = Error::from_kind(Some(at), kind); err.fix_linecol(|at| self.to_linecol(at)); err } /// Converts a byte offset from an error message to a (line, column) pair /// /// All indexes are 0-based. fn to_linecol(&self, offset: usize) -> (usize, usize) { let mut cur = 0; // Use split_terminator instead of lines so that if there is a `\r`, it // is included in the offset calculation. The `+1` values below account // for the `\n`. for (i, line) in self.input.split_terminator('\n').enumerate() { if cur + line.len() + 1 > offset { return (i, offset - cur); } cur += line.len() + 1; } (self.input.lines().count(), 0) } } impl Error { pub(crate) fn line_col(&self) -> Option<(usize, usize)> { self.line.map(|line| (line, self.col)) } fn from_kind(at: Option, kind: ErrorKind) -> Box { Box::new(Error { kind, line: None, col: 0, at, message: String::new(), key: Vec::new(), }) } fn custom(at: Option, s: String) -> Box { Box::new(Error { kind: ErrorKind::Custom, line: None, col: 0, at, message: s, key: Vec::new(), }) } pub(crate) fn add_key_context(&mut self, key: &str) { self.key.insert(0, key.to_string()); } fn fix_offset(&mut self, f: F) where F: FnOnce() -> Option, { // An existing offset is always better positioned than anything we might // want to add later. if self.at.is_none() { self.at = f(); } } fn fix_linecol(&mut self, f: F) where F: FnOnce(usize) -> (usize, usize), { if let Some(at) = self.at { let (line, col) = f(at); self.line = Some(line); self.col = col; } } } impl std::convert::From for std::io::Error { fn from(e: Error) -> Self { std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()) } } impl Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self.kind { ErrorKind::UnexpectedEof => "unexpected eof encountered".fmt(f)?, ErrorKind::InvalidCharInString(c) => write!( f, "invalid character in string: `{}`", c.escape_default().collect::() )?, ErrorKind::InvalidEscape(c) => write!( f, "invalid escape character in string: `{}`", c.escape_default().collect::() )?, ErrorKind::InvalidHexEscape(c) => write!( f, "invalid hex escape character in string: `{}`", c.escape_default().collect::() )?, ErrorKind::InvalidEscapeValue(c) => write!(f, "invalid escape value: `{}`", c)?, ErrorKind::NewlineInString => "newline in string found".fmt(f)?, ErrorKind::Unexpected(ch) => write!( f, "unexpected character found: `{}`", ch.escape_default().collect::() )?, ErrorKind::UnterminatedString => "unterminated string".fmt(f)?, ErrorKind::NewlineInTableKey => "found newline in table key".fmt(f)?, ErrorKind::Wanted { expected, found } => { write!(f, "expected {}, found {}", expected, found)?; } ErrorKind::NumberInvalid => "invalid number".fmt(f)?, ErrorKind::DuplicateTable(ref s) => { write!(f, "redefinition of table `{}`", s)?; } ErrorKind::DuplicateKey(ref s) => { write!(f, "duplicate key: `{}`", s)?; } ErrorKind::RedefineAsArray => "table redefined as array".fmt(f)?, ErrorKind::MultilineStringKey => "multiline strings are not allowed for key".fmt(f)?, ErrorKind::Custom => self.message.fmt(f)?, ErrorKind::ExpectedTuple(l) => write!(f, "expected table with length {}", l)?, ErrorKind::ExpectedTupleIndex { expected, ref found, } => write!(f, "expected table key `{}`, but was `{}`", expected, found)?, ErrorKind::ExpectedEmptyTable => "expected empty table".fmt(f)?, ErrorKind::DottedKeyInvalidType => { "dotted key attempted to extend non-table type".fmt(f)?; } ErrorKind::UnexpectedKeys { ref keys, available, } => write!( f, "unexpected keys in table: `{:?}`, available keys: `{:?}`", keys, available )?, ErrorKind::UnquotedString => write!( f, "invalid TOML value, did you mean to use a quoted string?" )?, } if !self.key.is_empty() { write!(f, " for key `")?; for (i, k) in self.key.iter().enumerate() { if i > 0 { write!(f, ".")?; } write!(f, "{}", k)?; } write!(f, "`")?; } if let Some(line) = self.line { write!(f, " at line {} column {}", line + 1, self.col + 1)?; } Ok(()) } } impl error::Error for Error {} impl de::Error for Box { fn custom(msg: T) -> Self { Error::custom(None, msg.to_string()) } } enum Line<'a> { Table { at: usize, header: Header<'a>, array: bool, }, KeyValue(Vec<(Span, Cow<'a, str>)>, Value<'a>), } struct Header<'a> { first: bool, array: bool, tokens: Tokenizer<'a>, } impl<'a> Header<'a> { fn new(tokens: Tokenizer<'a>, array: bool) -> Header<'a> { Header { first: true, array, tokens, } } fn next(&mut self) -> Result)>, TokenError> { self.tokens.eat_whitespace(); if self.first || self.tokens.eat(Token::Period)? { self.first = false; self.tokens.eat_whitespace(); self.tokens.table_key().map(Some) } else { self.tokens.expect(Token::RightBracket)?; if self.array { self.tokens.expect(Token::RightBracket)?; } self.tokens.eat_whitespace(); if !self.tokens.eat_comment()? { self.tokens.eat_newline_or_eof()?; } Ok(None) } } } #[derive(Debug)] struct Value<'a> { e: E<'a>, start: usize, end: usize, } #[derive(Debug)] enum E<'a> { Integer(i64), Float(f64), Boolean(bool), String(Cow<'a, str>), Array(Vec>), InlineTable(Vec>), DottedTable(Vec>), } impl<'a> E<'a> { fn type_name(&self) -> &'static str { match *self { E::String(..) => "string", E::Integer(..) => "integer", E::Float(..) => "float", E::Boolean(..) => "boolean", E::Array(..) => "array", E::InlineTable(..) => "inline table", E::DottedTable(..) => "dotted table", } } } basic-toml-0.1.10/src/error.rs000064400000000000000000000026321046102023000142260ustar 00000000000000use std::fmt::{self, Debug, Display}; /// Errors that can occur when serializing or deserializing TOML. pub struct Error(Box); pub(crate) enum ErrorInner { Ser(crate::ser::Error), De(crate::de::Error), } impl Error { /// Produces a (line, column) pair of the position of the error if /// available. /// /// All indexes are 0-based. pub fn line_col(&self) -> Option<(usize, usize)> { match &*self.0 { ErrorInner::Ser(_) => None, ErrorInner::De(error) => error.line_col(), } } } impl From for Error { fn from(error: crate::ser::Error) -> Self { Error(Box::new(ErrorInner::Ser(error))) } } impl From for Error { fn from(error: crate::de::Error) -> Self { Error(Box::new(ErrorInner::De(error))) } } impl Display for Error { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match &*self.0 { ErrorInner::Ser(error) => Display::fmt(error, formatter), ErrorInner::De(error) => Display::fmt(error, formatter), } } } impl Debug for Error { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match &*self.0 { ErrorInner::Ser(error) => Debug::fmt(error, formatter), ErrorInner::De(error) => Debug::fmt(error, formatter), } } } impl std::error::Error for Error {} basic-toml-0.1.10/src/lib.rs000064400000000000000000000110061046102023000136360ustar 00000000000000//! [![github]](https://github.com/dtolnay/basic-toml) [![crates-io]](https://crates.io/crates/basic-toml) [![docs-rs]](https://docs.rs/basic-toml) //! //! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github //! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust //! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs //! //!
//! //! A library for parsing and producing data in [TOML] format using [Serde]. //! //! TOML is designed to be "a config file format for humans": minimal and easy //! to read due to obvious semantics. //! //! ```toml //! [package] //! name = "basic-toml" #![doc = concat!("version = \"", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"), ".", env!("CARGO_PKG_VERSION_PATCH"), "\"")] //! authors = ["Alex Crichton "] //! //! [dependencies] //! serde = "1.0" //! ``` //! //! The TOML format is widely used throughout the Rust community for //! configuration, notably being used by [Cargo], Rust's package manager. //! //! [TOML]: https://toml.io //! [Serde]: https://serde.rs //! [Cargo]: https://crates.io //! //! # Deserialization //! //! ``` //! use semver::{Version, VersionReq}; //! use serde_derive::Deserialize; //! use std::collections::BTreeMap as Map; //! //! #[derive(Deserialize)] //! struct Manifest { //! package: Package, //! #[serde(default)] //! dependencies: Map, //! } //! //! #[derive(Deserialize)] //! struct Package { //! name: String, //! version: Version, //! #[serde(default)] //! authors: Vec, //! } //! //! fn main() { //! let manifest: Manifest = basic_toml::from_str(r#" //! [package] //! name = "basic-toml" #![doc = concat!(" version = \"", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"), ".", env!("CARGO_PKG_VERSION_PATCH"), "\"")] //! authors = ["Alex Crichton "] //! //! [dependencies] //! serde = "^1.0" //! "#).unwrap(); //! //! assert_eq!(manifest.package.name, "basic-toml"); #![doc = concat!(" assert_eq!(manifest.package.version, Version::new(", env!("CARGO_PKG_VERSION_MAJOR"), ", ", env!("CARGO_PKG_VERSION_MINOR"), ", ", env!("CARGO_PKG_VERSION_PATCH"), "));")] //! assert_eq!(manifest.package.authors, ["Alex Crichton "]); //! assert_eq!(manifest.dependencies["serde"].to_string(), "^1.0"); //! } //! ``` //! //! # Serialization //! //! ``` //! use semver::{Version, VersionReq}; //! use serde_derive::Serialize; //! use std::collections::BTreeMap as Map; //! //! #[derive(Serialize)] //! struct Manifest { //! package: Package, //! dependencies: Map, //! } //! //! #[derive(Serialize)] //! struct Package { //! name: String, //! version: Version, //! authors: Vec, //! } //! //! fn main() { //! let manifest = Manifest { //! package: Package { //! name: "basic-toml".to_owned(), #![doc = concat!(" version: Version::new(", env!("CARGO_PKG_VERSION_MAJOR"), ", ", env!("CARGO_PKG_VERSION_MINOR"), ", ", env!("CARGO_PKG_VERSION_PATCH"), "),")] //! authors: vec!["Alex Crichton ".to_owned()], //! }, //! dependencies: { //! let mut dependencies = Map::new(); //! dependencies.insert("serde".to_owned(), "^1.0".parse().unwrap()); //! dependencies //! }, //! }; //! //! let toml = basic_toml::to_string(&manifest).unwrap(); //! print!("{}", toml); //! } //! ``` //! //! # Spec compatibility //! //! TOML v0.5.0. //! //! TOML's date and time syntax are not supported. #![doc(html_root_url = "https://docs.rs/basic-toml/0.1.10")] #![deny(missing_docs)] #![allow( clippy::bool_to_int_with_if, clippy::elidable_lifetime_names, clippy::let_underscore_untyped, clippy::manual_let_else, clippy::manual_range_contains, clippy::match_like_matches_macro, clippy::missing_errors_doc, clippy::must_use_candidate, clippy::needless_doctest_main, clippy::needless_lifetimes, clippy::needless_pass_by_value, clippy::similar_names, clippy::type_complexity, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unwrap_or_default )] mod de; mod error; mod ser; mod tokens; pub use crate::de::{from_slice, from_str}; pub use crate::error::Error; pub use crate::ser::to_string; basic-toml-0.1.10/src/ser.rs000064400000000000000000000555461046102023000137020ustar 00000000000000use serde::ser::{self, Serialize}; use std::cell::Cell; use std::error; use std::fmt::{self, Display, Write}; /// Serialize the given data structure as a String of TOML. /// /// Serialization can fail if `T`'s implementation of `Serialize` decides to /// fail, if `T` contains a map with non-string keys, or if `T` attempts to /// serialize an unsupported datatype such as an enum, tuple, or tuple struct. pub fn to_string(value: &T) -> Result where T: ?Sized + Serialize, { let mut dst = String::with_capacity(128); value.serialize(&mut Serializer::new(&mut dst))?; Ok(dst) } #[derive(Debug)] pub(crate) enum Error { /// Indicates that a Rust type was requested to be serialized but it was not /// supported. /// /// Currently the TOML format does not support serializing types such as /// enums, tuples and tuple structs. UnsupportedType, /// The key of all TOML maps must be strings, but serialization was /// attempted where the key of a map was not a string. KeyNotString, /// All values in a TOML table must be emitted before further tables are /// emitted. If a value is emitted *after* a table then this error is /// generated. ValueAfterTable, /// None was attempted to be serialized, but it's not supported. UnsupportedNone, /// A custom error which could be generated when serializing a particular /// type. Custom(String), } struct Serializer<'a> { dst: &'a mut String, state: State<'a>, } #[derive(Debug, Copy, Clone)] enum ArrayState { Started, StartedAsATable, } #[derive(Debug, Clone)] enum State<'a> { Table { key: &'a str, parent: &'a State<'a>, first: &'a Cell, table_emitted: &'a Cell, }, Array { parent: &'a State<'a>, first: &'a Cell, type_: &'a Cell>, len: Option, }, End, } struct SerializeSeq<'a, 'b> { ser: &'b mut Serializer<'a>, first: Cell, type_: Cell>, len: Option, } struct SerializeTable<'a, 'b> { ser: &'b mut Serializer<'a>, key: String, first: Cell, table_emitted: Cell, } impl<'a> Serializer<'a> { fn new(dst: &'a mut String) -> Serializer<'a> { Serializer { dst, state: State::End, } } fn display(&mut self, t: T, type_: ArrayState) -> Result<(), Error> { self.emit_key(type_)?; write!(self.dst, "{}", t).map_err(ser::Error::custom)?; if let State::Table { .. } = self.state { self.dst.push('\n'); } Ok(()) } fn emit_key(&mut self, type_: ArrayState) -> Result<(), Error> { self.array_type(type_); let state = self.state.clone(); self.do_emit_key(&state) } // recursive implementation of `emit_key` above fn do_emit_key(&mut self, state: &State) -> Result<(), Error> { match *state { State::End => Ok(()), State::Array { parent, first, type_, len, } => { assert!(type_.get().is_some()); if first.get() { self.do_emit_key(parent)?; } self.emit_array(first, len); Ok(()) } State::Table { parent, first, table_emitted, key, } => { if table_emitted.get() { return Err(Error::ValueAfterTable); } if first.get() { self.emit_table_header(parent)?; first.set(false); } self.escape_key(key)?; self.dst.push_str(" = "); Ok(()) } } } fn emit_array(&mut self, first: &Cell, _len: Option) { if first.get() { self.dst.push('['); } else { self.dst.push_str(", "); } } fn array_type(&mut self, type_: ArrayState) { let prev = match self.state { State::Array { type_, .. } => type_, _ => return, }; if prev.get().is_none() { prev.set(Some(type_)); } } fn escape_key(&mut self, key: &str) -> Result<(), Error> { let ok = !key.is_empty() && key.chars().all(|c| match c { 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_' => true, _ => false, }); if ok { write!(self.dst, "{}", key).map_err(ser::Error::custom)?; } else { self.emit_str(key)?; } Ok(()) } fn emit_str(&mut self, value: &str) -> Result<(), Error> { self.dst.push('"'); for ch in value.chars() { match ch { '\u{8}' => self.dst.push_str("\\b"), '\u{9}' => self.dst.push_str("\\t"), '\u{a}' => self.dst.push_str("\\n"), '\u{c}' => self.dst.push_str("\\f"), '\u{d}' => self.dst.push_str("\\r"), '\u{22}' => self.dst.push_str("\\\""), '\u{5c}' => self.dst.push_str("\\\\"), c if c <= '\u{1f}' || c == '\u{7f}' => { write!(self.dst, "\\u{:04X}", ch as u32).map_err(ser::Error::custom)?; } ch => self.dst.push(ch), } } self.dst.push('"'); Ok(()) } fn emit_table_header(&mut self, state: &State) -> Result<(), Error> { let array_of_tables = match *state { State::End => return Ok(()), State::Array { .. } => true, State::Table { .. } => false, }; // Unlike [..]s, we can't omit [[..]] ancestors, so be sure to emit // table headers for them. let mut p = state; if let State::Array { first, parent, .. } = *state { if first.get() { p = parent; } } while let State::Table { first, parent, .. } = *p { p = parent; if !first.get() { break; } if let State::Array { parent: &State::Table { .. }, .. } = *parent { self.emit_table_header(parent)?; break; } } match *state { State::Table { first, .. } => { if !first.get() { // Newline if we are a table that is not the first table in // the document. self.dst.push('\n'); } } State::Array { parent, first, .. } => { if !first.get() { // Always newline if we are not the first item in the // table-array self.dst.push('\n'); } else if let State::Table { first, .. } = *parent { if !first.get() { // Newline if we are not the first item in the document self.dst.push('\n'); } } } State::End => {} } self.dst.push('['); if array_of_tables { self.dst.push('['); } self.emit_key_part(state)?; if array_of_tables { self.dst.push(']'); } self.dst.push_str("]\n"); Ok(()) } fn emit_key_part(&mut self, key: &State) -> Result { match *key { State::Array { parent, .. } => self.emit_key_part(parent), State::End => Ok(true), State::Table { key, parent, table_emitted, .. } => { table_emitted.set(true); let first = self.emit_key_part(parent)?; if !first { self.dst.push('.'); } self.escape_key(key)?; Ok(false) } } } } macro_rules! serialize_float { ($this:expr, $v:expr) => {{ $this.emit_key(ArrayState::Started)?; match ($v.is_sign_negative(), $v.is_nan(), $v == 0.0) { (_, true, _) => write!($this.dst, "nan"), (true, false, true) => write!($this.dst, "-0.0"), (false, false, true) => write!($this.dst, "0.0"), (_, false, false) => write!($this.dst, "{}", $v).and_then(|()| { if $v % 1.0 == 0.0 { write!($this.dst, ".0") } else { Ok(()) } }), } .map_err(ser::Error::custom)?; if let State::Table { .. } = $this.state { $this.dst.push_str("\n"); } return Ok(()); }}; } impl<'a, 'b> ser::Serializer for &'b mut Serializer<'a> { type Ok = (); type Error = Error; type SerializeSeq = SerializeSeq<'a, 'b>; type SerializeTuple = SerializeSeq<'a, 'b>; type SerializeTupleStruct = SerializeSeq<'a, 'b>; type SerializeTupleVariant = ser::Impossible<(), Error>; type SerializeMap = SerializeTable<'a, 'b>; type SerializeStruct = SerializeTable<'a, 'b>; type SerializeStructVariant = ser::Impossible<(), Error>; fn serialize_bool(self, v: bool) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_i8(self, v: i8) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_i16(self, v: i16) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_i32(self, v: i32) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_i64(self, v: i64) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_u8(self, v: u8) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_u16(self, v: u16) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_u32(self, v: u32) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_u64(self, v: u64) -> Result<(), Self::Error> { self.display(v, ArrayState::Started) } fn serialize_f32(self, v: f32) -> Result<(), Self::Error> { serialize_float!(self, v) } fn serialize_f64(self, v: f64) -> Result<(), Self::Error> { serialize_float!(self, v) } fn serialize_char(self, v: char) -> Result<(), Self::Error> { let mut buf = [0; 4]; self.serialize_str(v.encode_utf8(&mut buf)) } fn serialize_str(self, value: &str) -> Result<(), Self::Error> { self.emit_key(ArrayState::Started)?; self.emit_str(value)?; if let State::Table { .. } = self.state { self.dst.push('\n'); } Ok(()) } fn serialize_bytes(self, value: &[u8]) -> Result<(), Self::Error> { value.serialize(self) } fn serialize_none(self) -> Result<(), Self::Error> { Err(Error::UnsupportedNone) } fn serialize_some(self, value: &T) -> Result<(), Self::Error> where T: ?Sized + Serialize, { value.serialize(self) } fn serialize_unit(self) -> Result<(), Self::Error> { Err(Error::UnsupportedType) } fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> { Err(Error::UnsupportedType) } fn serialize_unit_variant( self, _name: &'static str, _variant_index: u32, variant: &'static str, ) -> Result<(), Self::Error> { self.serialize_str(variant) } fn serialize_newtype_struct(self, _name: &'static str, value: &T) -> Result<(), Self::Error> where T: ?Sized + Serialize, { value.serialize(self) } fn serialize_newtype_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, _value: &T, ) -> Result<(), Self::Error> where T: ?Sized + Serialize, { Err(Error::UnsupportedType) } fn serialize_seq(self, len: Option) -> Result { self.array_type(ArrayState::Started); Ok(SerializeSeq { ser: self, first: Cell::new(true), type_: Cell::new(None), len, }) } fn serialize_tuple(self, len: usize) -> Result { self.serialize_seq(Some(len)) } fn serialize_tuple_struct( self, _name: &'static str, len: usize, ) -> Result { self.serialize_seq(Some(len)) } fn serialize_tuple_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, _len: usize, ) -> Result { Err(Error::UnsupportedType) } fn serialize_map(self, _len: Option) -> Result { self.array_type(ArrayState::StartedAsATable); Ok(SerializeTable { ser: self, key: String::new(), first: Cell::new(true), table_emitted: Cell::new(false), }) } fn serialize_struct( self, _name: &'static str, _len: usize, ) -> Result { self.array_type(ArrayState::StartedAsATable); Ok(SerializeTable { ser: self, key: String::new(), first: Cell::new(true), table_emitted: Cell::new(false), }) } fn serialize_struct_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, _len: usize, ) -> Result { Err(Error::UnsupportedType) } } impl<'a, 'b> ser::SerializeSeq for SerializeSeq<'a, 'b> { type Ok = (); type Error = Error; fn serialize_element(&mut self, value: &T) -> Result<(), Error> where T: ?Sized + Serialize, { value.serialize(&mut Serializer { dst: &mut *self.ser.dst, state: State::Array { parent: &self.ser.state, first: &self.first, type_: &self.type_, len: self.len, }, })?; self.first.set(false); Ok(()) } fn end(self) -> Result<(), Error> { match self.type_.get() { Some(ArrayState::StartedAsATable) => return Ok(()), Some(ArrayState::Started) => self.ser.dst.push(']'), None => { assert!(self.first.get()); self.ser.emit_key(ArrayState::Started)?; self.ser.dst.push_str("[]"); } } if let State::Table { .. } = self.ser.state { self.ser.dst.push('\n'); } Ok(()) } } impl<'a, 'b> ser::SerializeTuple for SerializeSeq<'a, 'b> { type Ok = (); type Error = Error; fn serialize_element(&mut self, value: &T) -> Result<(), Error> where T: ?Sized + Serialize, { ser::SerializeSeq::serialize_element(self, value) } fn end(self) -> Result<(), Error> { ser::SerializeSeq::end(self) } } impl<'a, 'b> ser::SerializeTupleStruct for SerializeSeq<'a, 'b> { type Ok = (); type Error = Error; fn serialize_field(&mut self, value: &T) -> Result<(), Error> where T: ?Sized + Serialize, { ser::SerializeSeq::serialize_element(self, value) } fn end(self) -> Result<(), Error> { ser::SerializeSeq::end(self) } } impl<'a, 'b> ser::SerializeMap for SerializeTable<'a, 'b> { type Ok = (); type Error = Error; fn serialize_key(&mut self, input: &T) -> Result<(), Error> where T: ?Sized + Serialize, { self.key = input.serialize(StringExtractor)?; Ok(()) } fn serialize_value(&mut self, value: &T) -> Result<(), Error> where T: ?Sized + Serialize, { let res = value.serialize(&mut Serializer { dst: &mut *self.ser.dst, state: State::Table { key: &self.key, parent: &self.ser.state, first: &self.first, table_emitted: &self.table_emitted, }, }); match res { Ok(()) => self.first.set(false), Err(Error::UnsupportedNone) => {} Err(e) => return Err(e), } Ok(()) } fn end(self) -> Result<(), Error> { if self.first.get() { let state = self.ser.state.clone(); self.ser.emit_table_header(&state)?; } Ok(()) } } impl<'a, 'b> ser::SerializeStruct for SerializeTable<'a, 'b> { type Ok = (); type Error = Error; fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), Error> where T: ?Sized + Serialize, { let res = value.serialize(&mut Serializer { dst: &mut *self.ser.dst, state: State::Table { key, parent: &self.ser.state, first: &self.first, table_emitted: &self.table_emitted, }, }); match res { Ok(()) => self.first.set(false), Err(Error::UnsupportedNone) => {} Err(e) => return Err(e), } Ok(()) } fn end(self) -> Result<(), Error> { if self.first.get() { let state = self.ser.state.clone(); self.ser.emit_table_header(&state)?; } Ok(()) } } struct StringExtractor; impl ser::Serializer for StringExtractor { type Ok = String; type Error = Error; type SerializeSeq = ser::Impossible; type SerializeTuple = ser::Impossible; type SerializeTupleStruct = ser::Impossible; type SerializeTupleVariant = ser::Impossible; type SerializeMap = ser::Impossible; type SerializeStruct = ser::Impossible; type SerializeStructVariant = ser::Impossible; fn serialize_bool(self, _v: bool) -> Result { Err(Error::KeyNotString) } fn serialize_i8(self, _v: i8) -> Result { Err(Error::KeyNotString) } fn serialize_i16(self, _v: i16) -> Result { Err(Error::KeyNotString) } fn serialize_i32(self, _v: i32) -> Result { Err(Error::KeyNotString) } fn serialize_i64(self, _v: i64) -> Result { Err(Error::KeyNotString) } fn serialize_u8(self, _v: u8) -> Result { Err(Error::KeyNotString) } fn serialize_u16(self, _v: u16) -> Result { Err(Error::KeyNotString) } fn serialize_u32(self, _v: u32) -> Result { Err(Error::KeyNotString) } fn serialize_u64(self, _v: u64) -> Result { Err(Error::KeyNotString) } fn serialize_f32(self, _v: f32) -> Result { Err(Error::KeyNotString) } fn serialize_f64(self, _v: f64) -> Result { Err(Error::KeyNotString) } fn serialize_char(self, _v: char) -> Result { Err(Error::KeyNotString) } fn serialize_str(self, value: &str) -> Result { Ok(value.to_string()) } fn serialize_bytes(self, _value: &[u8]) -> Result { Err(Error::KeyNotString) } fn serialize_none(self) -> Result { Err(Error::KeyNotString) } fn serialize_some(self, _value: &T) -> Result where T: ?Sized + Serialize, { Err(Error::KeyNotString) } fn serialize_unit(self) -> Result { Err(Error::KeyNotString) } fn serialize_unit_struct(self, _name: &'static str) -> Result { Err(Error::KeyNotString) } fn serialize_unit_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, ) -> Result { Err(Error::KeyNotString) } fn serialize_newtype_struct( self, _name: &'static str, value: &T, ) -> Result where T: ?Sized + Serialize, { value.serialize(self) } fn serialize_newtype_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, _value: &T, ) -> Result where T: ?Sized + Serialize, { Err(Error::KeyNotString) } fn serialize_seq(self, _len: Option) -> Result { Err(Error::KeyNotString) } fn serialize_tuple(self, _len: usize) -> Result { Err(Error::KeyNotString) } fn serialize_tuple_struct( self, _name: &'static str, _len: usize, ) -> Result { Err(Error::KeyNotString) } fn serialize_tuple_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, _len: usize, ) -> Result { Err(Error::KeyNotString) } fn serialize_map(self, _len: Option) -> Result { Err(Error::KeyNotString) } fn serialize_struct( self, _name: &'static str, _len: usize, ) -> Result { Err(Error::KeyNotString) } fn serialize_struct_variant( self, _name: &'static str, _variant_index: u32, _variant: &'static str, _len: usize, ) -> Result { Err(Error::KeyNotString) } } impl Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::UnsupportedType => "unsupported Rust type".fmt(f), Error::KeyNotString => "map key was not a string".fmt(f), Error::ValueAfterTable => "values must be emitted before tables".fmt(f), Error::UnsupportedNone => "unsupported None value".fmt(f), Error::Custom(ref s) => s.fmt(f), } } } impl error::Error for Error {} impl ser::Error for Error { fn custom(msg: T) -> Error { Error::Custom(msg.to_string()) } } basic-toml-0.1.10/src/tokens.rs000064400000000000000000000412701046102023000144010ustar 00000000000000use std::borrow::Cow; use std::char; use std::str; /// A span, designating a range of bytes where a token is located. #[derive(Eq, PartialEq, Debug, Clone, Copy)] pub struct Span { /// The start of the range. pub start: usize, /// The end of the range (exclusive). pub end: usize, } impl From for (usize, usize) { fn from(Span { start, end }: Span) -> (usize, usize) { (start, end) } } #[derive(Eq, PartialEq, Debug)] pub enum Token<'a> { Whitespace(&'a str), Newline, Comment(&'a str), Equals, Period, Comma, Colon, Plus, LeftBrace, RightBrace, LeftBracket, RightBracket, Keylike(&'a str), String { src: &'a str, val: Cow<'a, str>, multiline: bool, }, } #[derive(Eq, PartialEq, Debug)] pub enum Error { InvalidCharInString(usize, char), InvalidEscape(usize, char), InvalidHexEscape(usize, char), InvalidEscapeValue(usize, u32), NewlineInString(usize), Unexpected(usize, char), UnterminatedString(usize), NewlineInTableKey(usize), MultilineStringKey(usize), Wanted { at: usize, expected: &'static str, found: &'static str, }, } #[derive(Clone)] pub struct Tokenizer<'a> { input: &'a str, chars: CrlfFold<'a>, } #[derive(Clone)] struct CrlfFold<'a> { chars: str::CharIndices<'a>, } #[derive(Debug)] enum MaybeString { NotEscaped(usize), Owned(String), } impl<'a> Tokenizer<'a> { pub fn new(input: &'a str) -> Tokenizer<'a> { let mut t = Tokenizer { input, chars: CrlfFold { chars: input.char_indices(), }, }; // Eat utf-8 BOM t.eatc('\u{feff}'); t } pub fn next(&mut self) -> Result)>, Error> { let (start, token) = match self.one() { Some((start, '\n')) => (start, Token::Newline), Some((start, ' ' | '\t')) => (start, self.whitespace_token(start)), Some((start, '#')) => (start, self.comment_token(start)), Some((start, '=')) => (start, Token::Equals), Some((start, '.')) => (start, Token::Period), Some((start, ',')) => (start, Token::Comma), Some((start, ':')) => (start, Token::Colon), Some((start, '+')) => (start, Token::Plus), Some((start, '{')) => (start, Token::LeftBrace), Some((start, '}')) => (start, Token::RightBrace), Some((start, '[')) => (start, Token::LeftBracket), Some((start, ']')) => (start, Token::RightBracket), Some((start, '\'')) => { return self .literal_string(start) .map(|t| Some((self.step_span(start), t))) } Some((start, '"')) => { return self .basic_string(start) .map(|t| Some((self.step_span(start), t))) } Some((start, ch)) if is_keylike(ch) => (start, self.keylike(start)), Some((start, ch)) => return Err(Error::Unexpected(start, ch)), None => return Ok(None), }; let span = self.step_span(start); Ok(Some((span, token))) } pub fn peek(&mut self) -> Result)>, Error> { self.clone().next() } pub fn eat(&mut self, expected: Token<'a>) -> Result { self.eat_spanned(expected).map(|s| s.is_some()) } /// Eat a value, returning it's span if it was consumed. pub fn eat_spanned(&mut self, expected: Token<'a>) -> Result, Error> { let span = match self.peek()? { Some((span, ref found)) if expected == *found => span, Some(_) | None => return Ok(None), }; drop(self.next()); Ok(Some(span)) } pub fn expect(&mut self, expected: Token<'a>) -> Result<(), Error> { // ignore span let _ = self.expect_spanned(expected)?; Ok(()) } /// Expect the given token returning its span. pub fn expect_spanned(&mut self, expected: Token<'a>) -> Result { let current = self.current(); match self.next()? { Some((span, found)) => { if expected == found { Ok(span) } else { Err(Error::Wanted { at: current, expected: expected.describe(), found: found.describe(), }) } } None => Err(Error::Wanted { at: self.input.len(), expected: expected.describe(), found: "eof", }), } } pub fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Error> { let current = self.current(); match self.next()? { Some((span, Token::Keylike(k))) => Ok((span, k.into())), Some(( span, Token::String { src, val, multiline, }, )) => { let offset = self.substr_offset(src); if multiline { return Err(Error::MultilineStringKey(offset)); } match src.find('\n') { None => Ok((span, val)), Some(i) => Err(Error::NewlineInTableKey(offset + i)), } } Some((_, other)) => Err(Error::Wanted { at: current, expected: "a table key", found: other.describe(), }), None => Err(Error::Wanted { at: self.input.len(), expected: "a table key", found: "eof", }), } } pub fn eat_whitespace(&mut self) { while self.eatc(' ') || self.eatc('\t') { // ... } } pub fn eat_comment(&mut self) -> Result { if !self.eatc('#') { return Ok(false); } drop(self.comment_token(0)); self.eat_newline_or_eof().map(|()| true) } pub fn eat_newline_or_eof(&mut self) -> Result<(), Error> { let current = self.current(); match self.next()? { None | Some((_, Token::Newline)) => Ok(()), Some((_, other)) => Err(Error::Wanted { at: current, expected: "newline", found: other.describe(), }), } } pub fn skip_to_newline(&mut self) { loop { match self.one() { Some((_, '\n')) | None => break, _ => {} } } } fn eatc(&mut self, ch: char) -> bool { match self.chars.clone().next() { Some((_, ch2)) if ch == ch2 => { self.one(); true } _ => false, } } pub fn current(&mut self) -> usize { match self.chars.clone().next() { Some(i) => i.0, None => self.input.len(), } } fn whitespace_token(&mut self, start: usize) -> Token<'a> { while self.eatc(' ') || self.eatc('\t') { // ... } Token::Whitespace(&self.input[start..self.current()]) } fn comment_token(&mut self, start: usize) -> Token<'a> { while let Some((_, ch)) = self.chars.clone().next() { if ch != '\t' && (ch < '\u{20}' || ch > '\u{10ffff}') { break; } self.one(); } Token::Comment(&self.input[start..self.current()]) } fn read_string( &mut self, delim: char, start: usize, new_ch: &mut dyn FnMut( &mut Tokenizer, &mut MaybeString, bool, usize, char, ) -> Result<(), Error>, ) -> Result, Error> { let mut multiline = false; if self.eatc(delim) { if self.eatc(delim) { multiline = true; } else { return Ok(Token::String { src: &self.input[start..start + 2], val: Cow::Borrowed(""), multiline: false, }); } } let mut val = MaybeString::NotEscaped(self.current()); let mut n = 0; loop { n += 1; match self.one() { Some((i, '\n')) => { if multiline { if self.input.as_bytes()[i] == b'\r' { val.make_owned(&self.input[..i]); } if n == 1 { val = MaybeString::NotEscaped(self.current()); } else { val.push('\n'); } } else { return Err(Error::NewlineInString(i)); } } Some((mut i, ch)) if ch == delim => { if multiline { if !self.eatc(delim) { val.push(delim); continue; } if !self.eatc(delim) { val.push(delim); val.push(delim); continue; } if self.eatc(delim) { val.push(delim); i += 1; } if self.eatc(delim) { val.push(delim); i += 1; } } return Ok(Token::String { src: &self.input[start..self.current()], val: val.into_cow(&self.input[..i]), multiline, }); } Some((i, c)) => new_ch(self, &mut val, multiline, i, c)?, None => return Err(Error::UnterminatedString(start)), } } } fn literal_string(&mut self, start: usize) -> Result, Error> { self.read_string('\'', start, &mut |_me, val, _multi, i, ch| { if ch == '\u{09}' || ('\u{20}' <= ch && ch <= '\u{10ffff}' && ch != '\u{7f}') { val.push(ch); Ok(()) } else { Err(Error::InvalidCharInString(i, ch)) } }) } fn basic_string(&mut self, start: usize) -> Result, Error> { self.read_string('"', start, &mut |me, val, multi, i, ch| match ch { '\\' => { val.make_owned(&me.input[..i]); match me.chars.next() { Some((_, '"')) => val.push('"'), Some((_, '\\')) => val.push('\\'), Some((_, 'b')) => val.push('\u{8}'), Some((_, 'f')) => val.push('\u{c}'), Some((_, 'n')) => val.push('\n'), Some((_, 'r')) => val.push('\r'), Some((_, 't')) => val.push('\t'), Some((i, c @ ('u' | 'U'))) => { let len = if c == 'u' { 4 } else { 8 }; val.push(me.hex(start, i, len)?); } Some((i, c @ (' ' | '\t' | '\n'))) if multi => { if c != '\n' { while let Some((_, ch)) = me.chars.clone().next() { match ch { ' ' | '\t' => { me.chars.next(); } '\n' => { me.chars.next(); break; } _ => return Err(Error::InvalidEscape(i, c)), } } } while let Some((_, ch)) = me.chars.clone().next() { match ch { ' ' | '\t' | '\n' => { me.chars.next(); } _ => break, } } } Some((i, c)) => return Err(Error::InvalidEscape(i, c)), None => return Err(Error::UnterminatedString(start)), } Ok(()) } ch if ch == '\u{09}' || ('\u{20}' <= ch && ch <= '\u{10ffff}' && ch != '\u{7f}') => { val.push(ch); Ok(()) } _ => Err(Error::InvalidCharInString(i, ch)), }) } fn hex(&mut self, start: usize, i: usize, len: usize) -> Result { let mut buf = String::with_capacity(len); for _ in 0..len { match self.one() { Some((_, ch)) if ch as u32 <= 0x7F && ch.is_ascii_hexdigit() => buf.push(ch), Some((i, ch)) => return Err(Error::InvalidHexEscape(i, ch)), None => return Err(Error::UnterminatedString(start)), } } let val = u32::from_str_radix(&buf, 16).unwrap(); match char::from_u32(val) { Some(ch) => Ok(ch), None => Err(Error::InvalidEscapeValue(i, val)), } } fn keylike(&mut self, start: usize) -> Token<'a> { while let Some((_, ch)) = self.peek_one() { if !is_keylike(ch) { break; } self.one(); } Token::Keylike(&self.input[start..self.current()]) } pub fn substr_offset(&self, s: &'a str) -> usize { assert!(s.len() <= self.input.len()); let a = self.input.as_ptr() as usize; let b = s.as_ptr() as usize; assert!(a <= b); b - a } /// Calculate the span of a single character. fn step_span(&mut self, start: usize) -> Span { let end = match self.peek_one() { Some(t) => t.0, None => self.input.len(), }; Span { start, end } } /// Peek one char without consuming it. fn peek_one(&mut self) -> Option<(usize, char)> { self.chars.clone().next() } /// Take one char. pub fn one(&mut self) -> Option<(usize, char)> { self.chars.next() } } impl<'a> Iterator for CrlfFold<'a> { type Item = (usize, char); fn next(&mut self) -> Option<(usize, char)> { self.chars.next().map(|(i, c)| { if c == '\r' { let mut attempt = self.chars.clone(); if let Some((_, '\n')) = attempt.next() { self.chars = attempt; return (i, '\n'); } } (i, c) }) } } impl MaybeString { fn push(&mut self, ch: char) { match *self { MaybeString::NotEscaped(..) => {} MaybeString::Owned(ref mut s) => s.push(ch), } } fn make_owned(&mut self, input: &str) { match *self { MaybeString::NotEscaped(start) => { *self = MaybeString::Owned(input[start..].to_owned()); } MaybeString::Owned(..) => {} } } fn into_cow(self, input: &str) -> Cow { match self { MaybeString::NotEscaped(start) => Cow::Borrowed(&input[start..]), MaybeString::Owned(s) => Cow::Owned(s), } } } fn is_keylike(ch: char) -> bool { ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z') || ('0' <= ch && ch <= '9') || ch == '-' || ch == '_' } impl<'a> Token<'a> { pub fn describe(&self) -> &'static str { match *self { Token::Keylike(_) => "an identifier", Token::Equals => "an equals", Token::Period => "a period", Token::Comment(_) => "a comment", Token::Newline => "a newline", Token::Whitespace(_) => "whitespace", Token::Comma => "a comma", Token::RightBrace => "a right brace", Token::LeftBrace => "a left brace", Token::RightBracket => "a right bracket", Token::LeftBracket => "a left bracket", Token::String { multiline, .. } => { if multiline { "a multiline string" } else { "a string" } } Token::Colon => "a colon", Token::Plus => "a plus", } } } basic-toml-0.1.10/tests/README.md000064400000000000000000000000671046102023000143610ustar 00000000000000Tests are from https://github.com/BurntSushi/toml-test basic-toml-0.1.10/tests/datetime.rs000064400000000000000000000075251046102023000152520ustar 00000000000000use serde_json::Value; macro_rules! bad { ($toml:expr, $msg:expr) => { match basic_toml::from_str::($toml) { Ok(s) => panic!("parsed to: {:#?}", s), Err(e) => assert_eq!(e.to_string(), $msg), } }; } #[test] fn times() { fn multi_bad(s: &str, msg: &str) { bad!(s, msg); bad!(&s.replace('T', " "), msg); bad!(&s.replace('T', "t"), msg); bad!(&s.replace('Z', "z"), msg); } multi_bad( "foo = 1997-09-09T09:09:09Z", "invalid number at line 1 column 7", ); multi_bad( "foo = 1997-09-09T09:09:09+09:09", "invalid number at line 1 column 7", ); multi_bad( "foo = 1997-09-09T09:09:09-09:09", "invalid number at line 1 column 7", ); multi_bad( "foo = 1997-09-09T09:09:09", "invalid number at line 1 column 7", ); multi_bad("foo = 1997-09-09", "invalid number at line 1 column 7"); bad!("foo = 1997-09-09 ", "invalid number at line 1 column 7"); bad!( "foo = 1997-09-09 # comment", "invalid number at line 1 column 7" ); multi_bad("foo = 09:09:09", "invalid number at line 1 column 8"); multi_bad( "foo = 1997-09-09T09:09:09.09Z", "invalid number at line 1 column 7", ); multi_bad( "foo = 1997-09-09T09:09:09.09+09:09", "invalid number at line 1 column 7", ); multi_bad( "foo = 1997-09-09T09:09:09.09-09:09", "invalid number at line 1 column 7", ); multi_bad( "foo = 1997-09-09T09:09:09.09", "invalid number at line 1 column 7", ); multi_bad("foo = 09:09:09.09", "invalid number at line 1 column 8"); } #[test] fn bad_times() { bad!("foo = 199-09-09", "invalid number at line 1 column 7"); bad!("foo = 199709-09", "invalid number at line 1 column 7"); bad!("foo = 1997-9-09", "invalid number at line 1 column 7"); bad!("foo = 1997-09-9", "invalid number at line 1 column 7"); bad!( "foo = 1997-09-0909:09:09", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.", "invalid number at line 1 column 7" ); bad!( "foo = T", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!( "foo = T.", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!( "foo = TZ", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09+", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09+09", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09+09:9", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09+0909", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09-", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09-09", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09-09:9", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T09:09:09.09-0909", "invalid number at line 1 column 7" ); bad!( "foo = 1997-00-09T09:09:09.09Z", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-00T09:09:09.09Z", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T30:09:09.09Z", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T12:69:09.09Z", "invalid number at line 1 column 7" ); bad!( "foo = 1997-09-09T12:09:69.09Z", "invalid number at line 1 column 7" ); } basic-toml-0.1.10/tests/de-errors.rs000064400000000000000000000205641046102023000153560ustar 00000000000000#![allow( clippy::elidable_lifetime_names, clippy::needless_lifetimes, clippy::too_many_lines )] use serde::{de, Deserialize}; use std::fmt; macro_rules! bad { ($toml:expr, $ty:ty, $msg:expr) => { match basic_toml::from_str::<$ty>($toml) { Ok(s) => panic!("parsed to: {:#?}", s), Err(e) => assert_eq!(e.to_string(), $msg), } }; } #[derive(Debug, Deserialize, PartialEq)] struct Parent { p_a: T, p_b: Vec>, } #[derive(Debug, Deserialize, PartialEq)] #[serde(deny_unknown_fields)] struct Child { c_a: T, c_b: T, } #[derive(Debug, PartialEq)] enum CasedString { Lowercase(String), Uppercase(String), } impl<'de> de::Deserialize<'de> for CasedString { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { struct CasedStringVisitor; impl<'de> de::Visitor<'de> for CasedStringVisitor { type Value = CasedString; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a string") } fn visit_str(self, s: &str) -> Result where E: de::Error, { if s.is_empty() { Err(de::Error::invalid_length(0, &"a non-empty string")) } else if s.chars().all(|x| x.is_ascii_lowercase()) { Ok(CasedString::Lowercase(s.to_string())) } else if s.chars().all(|x| x.is_ascii_uppercase()) { Ok(CasedString::Uppercase(s.to_string())) } else { Err(de::Error::invalid_value( de::Unexpected::Str(s), &"all lowercase or all uppercase", )) } } } deserializer.deserialize_any(CasedStringVisitor) } } #[test] fn custom_errors() { basic_toml::from_str::>( " p_a = 'a' p_b = [{c_a = 'a', c_b = 'c'}] ", ) .unwrap(); // Custom error at p_b value. bad!( " p_a = '' # ^ ", Parent, "invalid length 0, expected a non-empty string for key `p_a` at line 2 column 19" ); // Missing field in table. bad!( " p_a = 'a' # ^ ", Parent, "missing field `p_b` at line 1 column 1" ); // Invalid type in p_b. bad!( " p_a = 'a' p_b = 1 # ^ ", Parent, "invalid type: integer `1`, expected a sequence for key `p_b` at line 3 column 19" ); // Sub-table in Vec is missing a field. bad!( " p_a = 'a' p_b = [ {c_a = 'a'} # ^ ] ", Parent, "missing field `c_b` for key `p_b` at line 4 column 17" ); // Sub-table in Vec has a field with a bad value. bad!( " p_a = 'a' p_b = [ {c_a = 'a', c_b = '*'} # ^ ] ", Parent, "invalid value: string \"*\", expected all lowercase or all uppercase for key `p_b` at line 4 column 35" ); // Sub-table in Vec is missing a field. bad!( " p_a = 'a' p_b = [ {c_a = 'a', c_b = 'b'}, {c_a = 'aa'} # ^ ] ", Parent, "missing field `c_b` for key `p_b` at line 5 column 17" ); // Sub-table in the middle of a Vec is missing a field. bad!( " p_a = 'a' p_b = [ {c_a = 'a', c_b = 'b'}, {c_a = 'aa'}, # ^ {c_a = 'aaa', c_b = 'bbb'}, ] ", Parent, "missing field `c_b` for key `p_b` at line 5 column 17" ); // Sub-table in the middle of a Vec has a field with a bad value. bad!( " p_a = 'a' p_b = [ {c_a = 'a', c_b = 'b'}, {c_a = 'aa', c_b = 1}, # ^ {c_a = 'aaa', c_b = 'bbb'}, ] ", Parent, "invalid type: integer `1`, expected a string for key `p_b` at line 5 column 36" ); // Sub-table in the middle of a Vec has an extra field. // FIXME: This location could be better. bad!( " p_a = 'a' p_b = [ {c_a = 'a', c_b = 'b'}, {c_a = 'aa', c_b = 'bb', c_d = 'd'}, # ^ {c_a = 'aaa', c_b = 'bbb'}, {c_a = 'aaaa', c_b = 'bbbb'}, ] ", Parent, "unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 5 column 17" ); // Sub-table in the middle of a Vec is missing a field. // FIXME: This location is pretty off. bad!( " p_a = 'a' [[p_b]] c_a = 'a' c_b = 'b' [[p_b]] c_a = 'aa' # c_b = 'bb' # <- missing field [[p_b]] c_a = 'aaa' c_b = 'bbb' [[p_b]] # ^ c_a = 'aaaa' c_b = 'bbbb' ", Parent, "missing field `c_b` for key `p_b` at line 12 column 13" ); // Sub-table in the middle of a Vec has a field with a bad value. bad!( " p_a = 'a' [[p_b]] c_a = 'a' c_b = 'b' [[p_b]] c_a = 'aa' c_b = '*' # ^ [[p_b]] c_a = 'aaa' c_b = 'bbb' ", Parent, "invalid value: string \"*\", expected all lowercase or all uppercase for key `p_b.c_b` at line 8 column 19" ); // Sub-table in the middle of a Vec has an extra field. // FIXME: This location is pretty off. bad!( " p_a = 'a' [[p_b]] c_a = 'a' c_b = 'b' [[p_b]] c_a = 'aa' c_d = 'dd' # unknown field [[p_b]] c_a = 'aaa' c_b = 'bbb' [[p_b]] # ^ c_a = 'aaaa' c_b = 'bbbb' ", Parent, "unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 12 column 13" ); } #[test] fn serde_derive_deserialize_errors() { bad!( " p_a = '' # ^ ", Parent, "missing field `p_b` at line 1 column 1" ); bad!( " p_a = '' p_b = [ {c_a = ''} # ^ ] ", Parent, "missing field `c_b` for key `p_b` at line 4 column 17" ); bad!( " p_a = '' p_b = [ {c_a = '', c_b = 1} # ^ ] ", Parent, "invalid type: integer `1`, expected a string for key `p_b` at line 4 column 34" ); // FIXME: This location could be better. bad!( " p_a = '' p_b = [ {c_a = '', c_b = '', c_d = ''}, # ^ ] ", Parent, "unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 4 column 17" ); bad!( " p_a = 'a' p_b = [ {c_a = '', c_b = 1, c_d = ''}, # ^ ] ", Parent, "invalid type: integer `1`, expected a string for key `p_b` at line 4 column 34" ); } #[test] fn error_handles_crlf() { bad!( "\r\n\ [t1]\r\n\ [t2]\r\n\ a = 1\r\n\ . = 2\r\n\ ", serde_json::Value, "expected a table key, found a period at line 5 column 1" ); // Should be the same as above. bad!( "\n\ [t1]\n\ [t2]\n\ a = 1\n\ . = 2\n\ ", serde_json::Value, "expected a table key, found a period at line 5 column 1" ); } basic-toml-0.1.10/tests/display-tricky.rs000064400000000000000000000022131046102023000164130ustar 00000000000000use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] pub struct Recipe { pub name: String, pub description: Option, #[serde(default)] pub modules: Vec, #[serde(default)] pub packages: Vec, } #[derive(Debug, Serialize, Deserialize)] pub struct Modules { pub name: String, pub version: Option, } #[derive(Debug, Serialize, Deserialize)] pub struct Packages { pub name: String, pub version: Option, } #[test] fn both_ends() { let recipe_works = basic_toml::from_str::( r#" name = "testing" description = "example" modules = [] [[packages]] name = "base" "#, ) .unwrap(); basic_toml::to_string(&recipe_works).unwrap(); let recipe_fails = basic_toml::from_str::( r#" name = "testing" description = "example" packages = [] [[modules]] name = "base" "#, ) .unwrap(); let err = basic_toml::to_string(&recipe_fails).unwrap_err(); assert_eq!(err.to_string(), "values must be emitted before tables"); } basic-toml-0.1.10/tests/enum_external_deserialize.rs000064400000000000000000000011531046102023000206730ustar 00000000000000#![allow(clippy::wildcard_imports)] use serde::Deserialize; #[derive(Debug, Deserialize, PartialEq)] struct Struct { value: Enum, } #[derive(Debug, Deserialize, PartialEq)] enum Enum { Variant, } #[test] fn unknown_variant() { let error = basic_toml::from_str::("value = \"NonExistent\"").unwrap_err(); assert_eq!( error.to_string(), "unknown variant `NonExistent`, expected `Variant` for key `value` at line 1 column 1" ); } #[test] fn from_str() { let s = basic_toml::from_str::("value = \"Variant\"").unwrap(); assert_eq!(Enum::Variant, s.value); } basic-toml-0.1.10/tests/float.rs000064400000000000000000000037501046102023000145570ustar 00000000000000#![allow(clippy::float_cmp)] use serde::{Deserialize, Serialize}; use serde_json::Value; #[rustfmt::skip] // appears to be a bug in rustfmt to make this converge... macro_rules! float_inf_tests { ($ty:ty) => {{ #[derive(Serialize, Deserialize)] struct S { sf1: $ty, sf2: $ty, sf3: $ty, sf4: $ty, sf5: $ty, sf6: $ty, sf7: $ty, sf8: $ty, } let inf: S = basic_toml::from_str( " # infinity sf1 = inf # positive infinity sf2 = +inf # positive infinity sf3 = -inf # negative infinity # not a number sf4 = nan # actual sNaN/qNaN encoding is implementation specific sf5 = +nan # same as `nan` sf6 = -nan # valid, actual encoding is implementation specific # zero sf7 = +0.0 sf8 = -0.0 ", ) .expect("Parse infinities."); assert!(inf.sf1.is_infinite()); assert!(inf.sf1.is_sign_positive()); assert!(inf.sf2.is_infinite()); assert!(inf.sf2.is_sign_positive()); assert!(inf.sf3.is_infinite()); assert!(inf.sf3.is_sign_negative()); assert!(inf.sf4.is_nan()); assert!(inf.sf4.is_sign_positive()); assert!(inf.sf5.is_nan()); assert!(inf.sf5.is_sign_positive()); assert!(inf.sf6.is_nan()); assert!(inf.sf6.is_sign_negative()); // NOTE: but serializes to just `nan` assert_eq!(inf.sf7, 0.0); assert!(inf.sf7.is_sign_positive()); assert_eq!(inf.sf8, 0.0); assert!(inf.sf8.is_sign_negative()); let s = basic_toml::to_string(&inf).unwrap(); assert_eq!( s, "\ sf1 = inf sf2 = inf sf3 = -inf sf4 = nan sf5 = nan sf6 = nan sf7 = 0.0 sf8 = -0.0 " ); basic_toml::from_str::(&s).expect("roundtrip"); }}; } #[test] fn float_inf() { float_inf_tests!(f32); float_inf_tests!(f64); } basic-toml-0.1.10/tests/formatting.rs000064400000000000000000000022511046102023000156170ustar 00000000000000use basic_toml::to_string; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] struct User { pub name: String, pub surname: String, } #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] struct Users { pub user: Vec, } #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] struct TwoUsers { pub user0: User, pub user1: User, } #[test] fn no_unnecessary_newlines_array() { assert!(!to_string(&Users { user: vec![ User { name: "John".to_string(), surname: "Doe".to_string(), }, User { name: "Jane".to_string(), surname: "Dough".to_string(), }, ], }) .unwrap() .starts_with('\n')); } #[test] fn no_unnecessary_newlines_table() { assert!(!to_string(&TwoUsers { user0: User { name: "John".to_string(), surname: "Doe".to_string(), }, user1: User { name: "Jane".to_string(), surname: "Dough".to_string(), }, }) .unwrap() .starts_with('\n')); } basic-toml-0.1.10/tests/invalid/datetime-malformed-no-leads.toml000064400000000000000000000000371046102023000226620ustar 00000000000000no-leads = 1987-7-05T17:45:00Z basic-toml-0.1.10/tests/invalid/datetime-malformed-no-secs.toml000064400000000000000000000000341046102023000225240ustar 00000000000000no-secs = 1987-07-05T17:45Z basic-toml-0.1.10/tests/invalid/datetime-malformed-no-t.toml000064400000000000000000000000331046102023000220310ustar 00000000000000no-t = 1987-07-0517:45:00Z basic-toml-0.1.10/tests/invalid/datetime-malformed-with-milli.toml000064400000000000000000000000441046102023000232350ustar 00000000000000with-milli = 1987-07-5T17:45:00.12Z basic-toml-0.1.10/tests/invalid/duplicate-key-table.toml000064400000000000000000000000631046102023000212460ustar 00000000000000[fruit] type = "apple" [fruit.type] apple = "yes" basic-toml-0.1.10/tests/invalid/duplicate-keys.toml000064400000000000000000000000311046102023000203370ustar 00000000000000dupe = false dupe = true basic-toml-0.1.10/tests/invalid/duplicate-table.toml000064400000000000000000000001551046102023000204620ustar 00000000000000[dependencies.openssl-sys] version = "0.5.2" [dependencies] libc = "0.1" [dependencies] bitflags = "0.1.1" basic-toml-0.1.10/tests/invalid/duplicate-tables.toml000064400000000000000000000000101046102023000206330ustar 00000000000000[a] [a] basic-toml-0.1.10/tests/invalid/empty-implicit-table.toml000064400000000000000000000000231046102023000214500ustar 00000000000000[naughty..naughty] basic-toml-0.1.10/tests/invalid/empty-table.toml000064400000000000000000000000031046102023000176360ustar 00000000000000[] basic-toml-0.1.10/tests/invalid/float-no-leading-zero.toml000064400000000000000000000000441046102023000215150ustar 00000000000000answer = .12345 neganswer = -.12345 basic-toml-0.1.10/tests/invalid/float-no-suffix.toml000064400000000000000000000000111046102023000204330ustar 00000000000000a = 1.2f basic-toml-0.1.10/tests/invalid/float-no-trailing-digits.toml000064400000000000000000000000341046102023000222260ustar 00000000000000answer = 1. neganswer = -1. basic-toml-0.1.10/tests/invalid/key-after-array.toml000064400000000000000000000000401046102023000204170ustar 00000000000000[[agencies]] owner = "S Cjelli" basic-toml-0.1.10/tests/invalid/key-after-table.toml000064400000000000000000000000351046102023000203740ustar 00000000000000[history] guard = "sleeping" basic-toml-0.1.10/tests/invalid/key-empty.toml000064400000000000000000000000051046102023000173410ustar 00000000000000 = 1 basic-toml-0.1.10/tests/invalid/key-hash.toml000064400000000000000000000000071046102023000171300ustar 00000000000000a# = 1 basic-toml-0.1.10/tests/invalid/key-newline.toml000064400000000000000000000000061046102023000176450ustar 00000000000000a = 1 basic-toml-0.1.10/tests/invalid/key-open-bracket.toml000064400000000000000000000000111046102023000205520ustar 00000000000000[abc = 1 basic-toml-0.1.10/tests/invalid/key-single-open-bracket.toml000064400000000000000000000000011046102023000220300ustar 00000000000000[basic-toml-0.1.10/tests/invalid/key-space.toml000064400000000000000000000000071046102023000173000ustar 00000000000000a b = 1basic-toml-0.1.10/tests/invalid/key-start-bracket.toml000064400000000000000000000000211046102023000207470ustar 00000000000000[a] [xyz = 5 [b] basic-toml-0.1.10/tests/invalid/key-two-equals.toml000064400000000000000000000000111046102023000203010ustar 00000000000000key= = 1 basic-toml-0.1.10/tests/invalid/string-bad-byte-escape.toml000064400000000000000000000000211046102023000216440ustar 00000000000000naughty = "\xAg" basic-toml-0.1.10/tests/invalid/string-bad-escape.toml000064400000000000000000000000761046102023000207150ustar 00000000000000invalid-escape = "This string has a bad \a escape character." basic-toml-0.1.10/tests/invalid/string-bad-line-ending-escape.toml000064400000000000000000000001541046102023000231010ustar 00000000000000invalid-escape = """\ This string has a non whitespace-character after the line ending escape. \ a """ basic-toml-0.1.10/tests/invalid/string-byte-escapes.toml000064400000000000000000000000201046102023000213020ustar 00000000000000answer = "\x33" basic-toml-0.1.10/tests/invalid/string-no-close.toml000064400000000000000000000000521046102023000204420ustar 00000000000000no-ending-quote = "One time, at band camp basic-toml-0.1.10/tests/invalid/table-array-implicit.toml000064400000000000000000000011561046102023000214400ustar 00000000000000# This test is a bit tricky. It should fail because the first use of # `[[albums.songs]]` without first declaring `albums` implies that `albums` # must be a table. The alternative would be quite weird. Namely, it wouldn't # comply with the TOML spec: "Each double-bracketed sub-table will belong to # the most *recently* defined table element *above* it." # # This is in contrast to the *valid* test, table-array-implicit where # `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared # later. (Although, `[albums]` could be.) [[albums.songs]] name = "Glory Days" [[albums]] name = "Born in the USA" basic-toml-0.1.10/tests/invalid/table-array-malformed-bracket.toml000064400000000000000000000000371046102023000232020ustar 00000000000000[[albums] name = "Born to Run" basic-toml-0.1.10/tests/invalid/table-array-malformed-empty.toml000064400000000000000000000000321046102023000227200ustar 00000000000000[[]] name = "Born to Run" basic-toml-0.1.10/tests/invalid/table-empty.toml000064400000000000000000000000031046102023000176360ustar 00000000000000[] basic-toml-0.1.10/tests/invalid/table-nested-brackets-close.toml000064400000000000000000000000171046102023000226660ustar 00000000000000[a]b] zyx = 42 basic-toml-0.1.10/tests/invalid/table-nested-brackets-open.toml000064400000000000000000000000171046102023000225220ustar 00000000000000[a[b] zyx = 42 basic-toml-0.1.10/tests/invalid/table-whitespace.toml000064400000000000000000000000151046102023000206370ustar 00000000000000[invalid key]basic-toml-0.1.10/tests/invalid/table-with-pound.toml000064400000000000000000000000271046102023000206040ustar 00000000000000[key#group] answer = 42basic-toml-0.1.10/tests/invalid/text-after-array-entries.toml000064400000000000000000000001061046102023000222650ustar 00000000000000array = [ "Is there life after an array separator?", No "Entry" ] basic-toml-0.1.10/tests/invalid/text-after-integer.toml000064400000000000000000000000411046102023000211330ustar 00000000000000answer = 42 the ultimate answer? basic-toml-0.1.10/tests/invalid/text-after-string.toml000064400000000000000000000000541046102023000210100ustar 00000000000000string = "Is there life after strings?" No. basic-toml-0.1.10/tests/invalid/text-after-table.toml000064400000000000000000000000371046102023000205720ustar 00000000000000[error] this shouldn't be here basic-toml-0.1.10/tests/invalid/text-before-array-separator.toml000064400000000000000000000001071046102023000227560ustar 00000000000000array = [ "Is there life before an array separator?" No, "Entry" ] basic-toml-0.1.10/tests/invalid/text-in-array.toml000064400000000000000000000000701046102023000201230ustar 00000000000000array = [ "Entry 1", I don't belong, "Entry 2", ] basic-toml-0.1.10/tests/invalid-encoder/array-mixed-types-ints-and-floats.json000064400000000000000000000004221046102023000254200ustar 00000000000000{ "ints-and-floats": { "type": "array", "value": [ { "type": "integer", "value": "1" }, { "type": "float", "value": "1.1" } ] } } basic-toml-0.1.10/tests/invalid-misc.rs000064400000000000000000000024601046102023000160260ustar 00000000000000use serde_json::Value; macro_rules! bad { ($toml:expr, $msg:expr) => { match basic_toml::from_str::($toml) { Ok(s) => panic!("parsed to: {:#?}", s), Err(e) => assert_eq!(e.to_string(), $msg), } }; } #[test] fn bad() { bad!("a = 01", "invalid number at line 1 column 6"); bad!("a = 1__1", "invalid number at line 1 column 5"); bad!("a = 1_", "invalid number at line 1 column 5"); bad!("''", "expected an equals, found eof at line 1 column 3"); bad!("a = 9e99999", "invalid number at line 1 column 5"); bad!( "a = \"\u{7f}\"", "invalid character in string: `\\u{7f}` at line 1 column 6" ); bad!( "a = '\u{7f}'", "invalid character in string: `\\u{7f}` at line 1 column 6" ); bad!("a = -0x1", "invalid number at line 1 column 5"); bad!("a = 0x-1", "invalid number at line 1 column 7"); // Dotted keys. bad!( "a.b.c = 1 a.b = 2 ", "duplicate key: `b` for key `a` at line 2 column 12" ); bad!( "a = 1 a.b = 2", "dotted key attempted to extend non-table type at line 1 column 5" ); bad!( "a = {k1 = 1, k1.name = \"joe\"}", "dotted key attempted to extend non-table type at line 1 column 11" ); } basic-toml-0.1.10/tests/invalid.rs000064400000000000000000000150441046102023000150770ustar 00000000000000use serde_json::Value; macro_rules! bad { ($toml:expr, $msg:expr) => { match basic_toml::from_str::($toml) { Ok(s) => panic!("parsed to: {:#?}", s), Err(e) => assert_eq!(e.to_string(), $msg), } }; } macro_rules! test( ($name:ident, $s:expr, $msg:expr) => ( #[test] fn $name() { bad!($s, $msg); } ) ); test!( datetime_malformed_no_leads, include_str!("invalid/datetime-malformed-no-leads.toml"), "invalid number at line 1 column 12" ); test!( datetime_malformed_no_secs, include_str!("invalid/datetime-malformed-no-secs.toml"), "invalid number at line 1 column 11" ); test!( datetime_malformed_no_t, include_str!("invalid/datetime-malformed-no-t.toml"), "invalid number at line 1 column 8" ); test!( datetime_malformed_with_milli, include_str!("invalid/datetime-malformed-with-milli.toml"), "invalid number at line 1 column 14" ); test!( duplicate_key_table, include_str!("invalid/duplicate-key-table.toml"), "duplicate key: `type` for key `fruit` at line 4 column 8" ); test!( duplicate_keys, include_str!("invalid/duplicate-keys.toml"), "duplicate key: `dupe` at line 2 column 1" ); test!( duplicate_table, include_str!("invalid/duplicate-table.toml"), "redefinition of table `dependencies` for key `dependencies` at line 7 column 1" ); test!( duplicate_tables, include_str!("invalid/duplicate-tables.toml"), "redefinition of table `a` for key `a` at line 2 column 1" ); test!( empty_implicit_table, include_str!("invalid/empty-implicit-table.toml"), "expected a table key, found a period at line 1 column 10" ); test!( empty_table, include_str!("invalid/empty-table.toml"), "expected a table key, found a right bracket at line 1 column 2" ); test!( float_no_leading_zero, include_str!("invalid/float-no-leading-zero.toml"), "expected a value, found a period at line 1 column 10" ); test!( float_no_suffix, include_str!("invalid/float-no-suffix.toml"), "invalid number at line 1 column 5" ); test!( float_no_trailing_digits, include_str!("invalid/float-no-trailing-digits.toml"), "invalid number at line 1 column 12" ); test!( key_after_array, include_str!("invalid/key-after-array.toml"), "expected newline, found an identifier at line 1 column 14" ); test!( key_after_table, include_str!("invalid/key-after-table.toml"), "expected newline, found an identifier at line 1 column 11" ); test!( key_empty, include_str!("invalid/key-empty.toml"), "expected a table key, found an equals at line 1 column 2" ); test!( key_hash, include_str!("invalid/key-hash.toml"), "expected an equals, found a comment at line 1 column 2" ); test!( key_newline, include_str!("invalid/key-newline.toml"), "expected an equals, found a newline at line 1 column 2" ); test!( key_open_bracket, include_str!("invalid/key-open-bracket.toml"), "expected a right bracket, found an equals at line 1 column 6" ); test!( key_single_open_bracket, include_str!("invalid/key-single-open-bracket.toml"), "expected a table key, found eof at line 1 column 2" ); test!( key_space, include_str!("invalid/key-space.toml"), "expected an equals, found an identifier at line 1 column 3" ); test!( key_start_bracket, include_str!("invalid/key-start-bracket.toml"), "expected a right bracket, found an equals at line 2 column 6" ); test!( key_two_equals, include_str!("invalid/key-two-equals.toml"), "expected a value, found an equals at line 1 column 6" ); test!( string_bad_byte_escape, include_str!("invalid/string-bad-byte-escape.toml"), "invalid escape character in string: `x` at line 1 column 13" ); test!( string_bad_escape, include_str!("invalid/string-bad-escape.toml"), "invalid escape character in string: `a` at line 1 column 42" ); test!( string_bad_line_ending_escape, include_str!("invalid/string-bad-line-ending-escape.toml"), "invalid escape character in string: ` ` at line 2 column 79" ); test!( string_byte_escapes, include_str!("invalid/string-byte-escapes.toml"), "invalid escape character in string: `x` at line 1 column 12" ); test!( string_no_close, include_str!("invalid/string-no-close.toml"), "newline in string found at line 1 column 42" ); test!( table_array_implicit, include_str!("invalid/table-array-implicit.toml"), "table redefined as array for key `albums` at line 13 column 1" ); test!( table_array_malformed_bracket, include_str!("invalid/table-array-malformed-bracket.toml"), "expected a right bracket, found a newline at line 1 column 10" ); test!( table_array_malformed_empty, include_str!("invalid/table-array-malformed-empty.toml"), "expected a table key, found a right bracket at line 1 column 3" ); test!( table_empty, include_str!("invalid/table-empty.toml"), "expected a table key, found a right bracket at line 1 column 2" ); test!( table_nested_brackets_close, include_str!("invalid/table-nested-brackets-close.toml"), "expected newline, found an identifier at line 1 column 4" ); test!( table_nested_brackets_open, include_str!("invalid/table-nested-brackets-open.toml"), "expected a right bracket, found a left bracket at line 1 column 3" ); test!( table_whitespace, include_str!("invalid/table-whitespace.toml"), "expected a right bracket, found an identifier at line 1 column 10" ); test!( table_with_pound, include_str!("invalid/table-with-pound.toml"), "expected a right bracket, found a comment at line 1 column 5" ); test!( text_after_array_entries, include_str!("invalid/text-after-array-entries.toml"), "invalid TOML value, did you mean to use a quoted string? at line 2 column 46" ); test!( text_after_integer, include_str!("invalid/text-after-integer.toml"), "expected newline, found an identifier at line 1 column 13" ); test!( text_after_string, include_str!("invalid/text-after-string.toml"), "expected newline, found an identifier at line 1 column 41" ); test!( text_after_table, include_str!("invalid/text-after-table.toml"), "expected newline, found an identifier at line 1 column 9" ); test!( text_before_array_separator, include_str!("invalid/text-before-array-separator.toml"), "expected a right bracket, found an identifier at line 2 column 46" ); test!( text_in_array, include_str!("invalid/text-in-array.toml"), "invalid TOML value, did you mean to use a quoted string? at line 3 column 3" ); basic-toml-0.1.10/tests/parser.rs000064400000000000000000000407701046102023000147510ustar 00000000000000#![allow(clippy::let_underscore_untyped, clippy::uninlined_format_args)] use serde_json::{json, Value}; macro_rules! bad { ($toml:expr, $msg:expr) => { match basic_toml::from_str::($toml) { Ok(s) => panic!("parsed to: {:#?}", s), Err(e) => assert_eq!(e.to_string(), $msg), } }; } #[test] fn crlf() { let toml = "\ [project]\r\n\ \r\n\ name = \"splay\"\r\n\ version = \"0.1.0\"\r\n\ authors = [\"alex@crichton.co\"]\r\n\ \r\n\ [[lib]]\r\n\ \r\n\ path = \"lib.rs\"\r\n\ name = \"splay\"\r\n\ description = \"\"\"\ A Rust implementation of a TAR file reader and writer. This library does not\r\n\ currently handle compression, but it is abstract over all I/O readers and\r\n\ writers. Additionally, great lengths are taken to ensure that the entire\r\n\ contents are never required to be entirely resident in memory all at once.\r\n\ \"\"\"\ "; basic_toml::from_str::(toml).unwrap(); } #[test] fn fun_with_strings() { let toml = r#" bar = "\U00000000" key1 = "One\nTwo" key2 = """One\nTwo""" key3 = """ One Two""" key4 = "The quick brown fox jumps over the lazy dog." key5 = """ The quick brown \ fox jumps over \ the lazy dog.""" key6 = """\ The quick brown \ fox jumps over \ the lazy dog.\ """ # What you see is what you get. winpath = 'C:\Users\nodejs\templates' winpath2 = '\\ServerX\admin$\system32\' quoted = 'Tom "Dubs" Preston-Werner' regex = '<\i\c*\s*>' regex2 = '''I [dw]on't need \d{2} apples''' lines = ''' The first newline is trimmed in raw strings. All other whitespace is preserved. ''' "#; let table: Value = basic_toml::from_str(toml).unwrap(); assert_eq!(table["bar"], json!("\0")); assert_eq!(table["key1"], json!("One\nTwo")); assert_eq!(table["key2"], json!("One\nTwo")); assert_eq!(table["key3"], json!("One\nTwo")); let msg = "The quick brown fox jumps over the lazy dog."; assert_eq!(table["key4"], json!(msg)); assert_eq!(table["key5"], json!(msg)); assert_eq!(table["key6"], json!(msg)); assert_eq!(table["winpath"], json!(r"C:\Users\nodejs\templates")); assert_eq!(table["winpath2"], json!(r"\\ServerX\admin$\system32\")); assert_eq!(table["quoted"], json!(r#"Tom "Dubs" Preston-Werner"#)); assert_eq!(table["regex"], json!(r"<\i\c*\s*>")); assert_eq!(table["regex2"], json!(r"I [dw]on't need \d{2} apples")); assert_eq!( table["lines"], json!( "The first newline is\n\ trimmed in raw strings.\n\ All other whitespace\n\ is preserved.\n" ) ); } #[test] fn tables_in_arrays() { let toml = " [[foo]] #… [foo.bar] #… [[foo]] # ... #… [foo.bar] #... "; let table: Value = basic_toml::from_str(toml).unwrap(); table["foo"][0]["bar"].as_object().unwrap(); table["foo"][1]["bar"].as_object().unwrap(); } #[test] fn empty_table() { let toml = " [foo]"; let table: Value = basic_toml::from_str(toml).unwrap(); table["foo"].as_object().unwrap(); } #[test] fn fruit() { let toml = r#" [[fruit]] name = "apple" [fruit.physical] color = "red" shape = "round" [[fruit.variety]] name = "red delicious" [[fruit.variety]] name = "granny smith" [[fruit]] name = "banana" [[fruit.variety]] name = "plantain" "#; let table: Value = basic_toml::from_str(toml).unwrap(); assert_eq!(table["fruit"][0]["name"], json!("apple")); assert_eq!(table["fruit"][0]["physical"]["color"], json!("red")); assert_eq!(table["fruit"][0]["physical"]["shape"], json!("round")); assert_eq!( table["fruit"][0]["variety"][0]["name"], json!("red delicious") ); assert_eq!( table["fruit"][0]["variety"][1]["name"], json!("granny smith") ); assert_eq!(table["fruit"][1]["name"], json!("banana")); assert_eq!(table["fruit"][1]["variety"][0]["name"], json!("plantain")); } #[test] fn stray_cr() { bad!("\r", "unexpected character found: `\\r` at line 1 column 1"); bad!( "a = [ \r ]", "unexpected character found: `\\r` at line 1 column 7" ); bad!( "a = \"\"\"\r\"\"\"", "invalid character in string: `\\r` at line 1 column 8" ); bad!( "a = \"\"\"\\ \r \"\"\"", "invalid escape character in string: ` ` at line 1 column 9" ); bad!( "a = '''\r'''", "invalid character in string: `\\r` at line 1 column 8" ); bad!( "a = '\r'", "invalid character in string: `\\r` at line 1 column 6" ); bad!( "a = \"\r\"", "invalid character in string: `\\r` at line 1 column 6" ); } #[test] fn blank_literal_string() { let table: Value = basic_toml::from_str("foo = ''").unwrap(); assert_eq!(table["foo"], json!("")); } #[test] fn many_blank() { let table: Value = basic_toml::from_str("foo = \"\"\"\n\n\n\"\"\"").unwrap(); assert_eq!(table["foo"], json!("\n\n")); } #[test] fn literal_eats_crlf() { let toml = " foo = \"\"\"\\\r\n\"\"\" bar = \"\"\"\\\r\n \r\n \r\n a\"\"\" "; let table: Value = basic_toml::from_str(toml).unwrap(); assert_eq!(table["foo"], json!("")); assert_eq!(table["bar"], json!("a")); } #[test] fn string_no_newline() { bad!("a = \"\n\"", "newline in string found at line 1 column 6"); bad!("a = '\n'", "newline in string found at line 1 column 6"); } #[test] fn bad_leading_zeros() { bad!("a = 00", "invalid number at line 1 column 6"); bad!("a = -00", "invalid number at line 1 column 7"); bad!("a = +00", "invalid number at line 1 column 7"); bad!("a = 00.0", "invalid number at line 1 column 6"); bad!("a = -00.0", "invalid number at line 1 column 7"); bad!("a = +00.0", "invalid number at line 1 column 7"); bad!( "a = 9223372036854775808", "invalid number at line 1 column 5" ); bad!( "a = -9223372036854775809", "invalid number at line 1 column 5" ); } #[test] fn bad_floats() { bad!("a = 0.", "invalid number at line 1 column 7"); bad!("a = 0.e", "invalid number at line 1 column 7"); bad!("a = 0.E", "invalid number at line 1 column 7"); bad!("a = 0.0E", "invalid number at line 1 column 5"); bad!("a = 0.0e", "invalid number at line 1 column 5"); bad!("a = 0.0e-", "invalid number at line 1 column 9"); bad!("a = 0.0e+", "invalid number at line 1 column 5"); } #[test] fn floats() { macro_rules! t { ($actual:expr, $expected:expr) => {{ let f = format!("foo = {}", $actual); println!("{}", f); let a: Value = basic_toml::from_str(&f).unwrap(); assert_eq!(a["foo"], json!($expected)); }}; } t!("1.0", 1.0); t!("1.0e0", 1.0); t!("1.0e+0", 1.0); t!("1.0e-0", 1.0); t!("1E-0", 1.0); t!("1.001e-0", 1.001); t!("2e10", 2e10); t!("2e+10", 2e10); t!("2e-10", 2e-10); t!("2_0.0", 20.0); t!("2_0.0_0e1_0", 20.0e10); t!("2_0.1_0e1_0", 20.1e10); } #[test] fn bare_key_names() { let toml = " foo = 3 foo_3 = 3 foo_-2--3--r23f--4-f2-4 = 3 _ = 3 - = 3 8 = 8 \"a\" = 3 \"!\" = 3 \"a^b\" = 3 \"\\\"\" = 3 \"character encoding\" = \"value\" 'ʎǝʞ' = \"value\" "; let a: Value = basic_toml::from_str(toml).unwrap(); let _ = &a["foo"]; let _ = &a["-"]; let _ = &a["_"]; let _ = &a["8"]; let _ = &a["foo_3"]; let _ = &a["foo_-2--3--r23f--4-f2-4"]; let _ = &a["a"]; let _ = &a["!"]; let _ = &a["\""]; let _ = &a["character encoding"]; let _ = &a["ʎǝʞ"]; } #[test] fn bad_keys() { bad!( "key\n=3", "expected an equals, found a newline at line 1 column 4" ); bad!( "key=\n3", "expected a value, found a newline at line 1 column 5" ); bad!( "key|=3", "unexpected character found: `|` at line 1 column 4" ); bad!( "=3", "expected a table key, found an equals at line 1 column 1" ); bad!( "\"\"|=3", "unexpected character found: `|` at line 1 column 3" ); bad!("\"\n\"|=3", "newline in string found at line 1 column 2"); bad!( "\"\r\"|=3", "invalid character in string: `\\r` at line 1 column 2" ); bad!( "''''''=3", "multiline strings are not allowed for key at line 1 column 1" ); bad!( "\"\"\"\"\"\"=3", "multiline strings are not allowed for key at line 1 column 1" ); bad!( "'''key'''=3", "multiline strings are not allowed for key at line 1 column 1" ); bad!( "\"\"\"key\"\"\"=3", "multiline strings are not allowed for key at line 1 column 1" ); } #[test] fn bad_table_names() { bad!( "[]", "expected a table key, found a right bracket at line 1 column 2" ); bad!( "[.]", "expected a table key, found a period at line 1 column 2" ); bad!( "[a.]", "expected a table key, found a right bracket at line 1 column 4" ); bad!("[!]", "unexpected character found: `!` at line 1 column 2"); bad!("[\"\n\"]", "newline in string found at line 1 column 3"); bad!( "[a.b]\n[a.\"b\"]", "redefinition of table `a.b` for key `a.b` at line 2 column 1" ); bad!("[']", "unterminated string at line 1 column 2"); bad!("[''']", "unterminated string at line 1 column 2"); bad!( "['''''']", "multiline strings are not allowed for key at line 1 column 2" ); bad!( "['''foo''']", "multiline strings are not allowed for key at line 1 column 2" ); bad!( "[\"\"\"bar\"\"\"]", "multiline strings are not allowed for key at line 1 column 2" ); bad!("['\n']", "newline in string found at line 1 column 3"); bad!("['\r\n']", "newline in string found at line 1 column 3"); } #[test] fn table_names() { let toml = " [a.\"b\"] [\"f f\"] [\"f.f\"] [\"\\\"\"] ['a.a'] ['\"\"'] "; let a: Value = basic_toml::from_str(toml).unwrap(); println!("{:?}", a); let _ = &a["a"]["b"]; let _ = &a["f f"]; let _ = &a["f.f"]; let _ = &a["\""]; let _ = &a["\"\""]; } #[test] fn invalid_bare_numeral() { bad!("4", "expected an equals, found eof at line 1 column 2"); } #[test] fn inline_tables() { basic_toml::from_str::("a = {}").unwrap(); basic_toml::from_str::("a = {b=1}").unwrap(); basic_toml::from_str::("a = { b = 1 }").unwrap(); basic_toml::from_str::("a = {a=1,b=2}").unwrap(); basic_toml::from_str::("a = {a=1,b=2,c={}}").unwrap(); bad!( "a = {a=1,}", "expected a table key, found a right brace at line 1 column 10" ); bad!( "a = {,}", "expected a table key, found a comma at line 1 column 6" ); bad!( "a = {a=1,a=1}", "duplicate key: `a` for key `a` at line 1 column 10" ); bad!( "a = {\n}", "expected a table key, found a newline at line 1 column 6" ); bad!( "a = {", "expected a table key, found eof at line 1 column 6" ); basic_toml::from_str::("a = {a=[\n]}").unwrap(); basic_toml::from_str::("a = {\"a\"=[\n]}").unwrap(); basic_toml::from_str::("a = [\n{},\n{},\n]").unwrap(); } #[test] fn number_underscores() { macro_rules! t { ($actual:expr, $expected:expr) => {{ let f = format!("foo = {}", $actual); let table: Value = basic_toml::from_str(&f).unwrap(); assert_eq!(table["foo"], json!($expected)); }}; } t!("1_0", 10); t!("1_0_0", 100); t!("1_000", 1000); t!("+1_000", 1000); t!("-1_000", -1000); } #[test] fn bad_underscores() { bad!("foo = 0_", "invalid number at line 1 column 7"); bad!("foo = 0__0", "invalid number at line 1 column 7"); bad!( "foo = __0", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!("foo = 1_0_", "invalid number at line 1 column 7"); } #[test] fn bad_unicode_codepoint() { bad!( "foo = \"\\uD800\"", "invalid escape value: `55296` at line 1 column 9" ); } #[test] fn bad_strings() { bad!( "foo = \"\\uxx\"", "invalid hex escape character in string: `x` at line 1 column 10" ); bad!( "foo = \"\\u\"", "invalid hex escape character in string: `\\\"` at line 1 column 10" ); bad!("foo = \"\\", "unterminated string at line 1 column 7"); bad!("foo = '", "unterminated string at line 1 column 7"); } #[test] fn empty_string() { let table: Value = basic_toml::from_str::("foo = \"\"").unwrap(); assert_eq!(table["foo"], json!("")); } #[test] fn booleans() { let table: Value = basic_toml::from_str("foo = true").unwrap(); assert_eq!(table["foo"], json!(true)); let table: Value = basic_toml::from_str("foo = false").unwrap(); assert_eq!(table["foo"], json!(false)); bad!( "foo = true2", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!( "foo = false2", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!( "foo = t1", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); bad!( "foo = f2", "invalid TOML value, did you mean to use a quoted string? at line 1 column 7" ); } #[test] fn bad_nesting() { bad!( " a = [2] [[a]] b = 5 ", "duplicate key: `a` at line 3 column 11" ); bad!( " a = 1 [a.b] ", "duplicate key: `a` at line 3 column 10" ); bad!( " a = [] [a.b] ", "duplicate key: `a` at line 3 column 10" ); bad!( " a = [] [[a.b]] ", "duplicate key: `a` at line 3 column 11" ); bad!( " [a] b = { c = 2, d = {} } [a.b] c = 2 ", "duplicate key: `b` for key `a` at line 4 column 12" ); } #[test] fn bad_table_redefine() { bad!( " [a] foo=\"bar\" [a.b] foo=\"bar\" [a] ", "redefinition of table `a` for key `a` at line 6 column 9" ); bad!( " [a] foo=\"bar\" b = { foo = \"bar\" } [a] ", "redefinition of table `a` for key `a` at line 5 column 9" ); bad!( " [a] b = {} [a.b] ", "duplicate key: `b` for key `a` at line 4 column 12" ); bad!( " [a] b = {} [a] ", "redefinition of table `a` for key `a` at line 4 column 9" ); } #[test] fn datetimes() { bad!( "foo = 2016-09-09T09:09:09Z", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09.1Z", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09.2+10:00", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09.123456789-02:00", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09.Z", "invalid number at line 1 column 7" ); bad!( "foo = 2016-9-09T09:09:09Z", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09+2:00", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09-2:00", "invalid number at line 1 column 7" ); bad!( "foo = 2016-09-09T09:09:09Z-2:00", "invalid number at line 1 column 7" ); } #[test] fn require_newline_after_value() { bad!("0=0r=false", "invalid number at line 1 column 3"); bad!( r#" 0=""o=""m=""r=""00="0"q="""0"""e="""0""" "#, "expected newline, found an identifier at line 2 column 5" ); bad!( r#" [[0000l0]] 0="0"[[0000l0]] 0="0"[[0000l0]] 0="0"l="0" "#, "expected newline, found a left bracket at line 3 column 6" ); bad!( r#" 0=[0]00=[0,0,0]t=["0","0","0"]s=[1000-00-00T00:00:00Z,2000-00-00T00:00:00Z] "#, "expected newline, found an identifier at line 2 column 6" ); bad!( " 0=0r0=0r=false ", "invalid number at line 2 column 3" ); bad!( " 0=0r0=0r=falsefal=false ", "invalid number at line 2 column 3" ); } basic-toml-0.1.10/tests/tokens.rs000064400000000000000000000132611046102023000147530ustar 00000000000000#![allow( clippy::elidable_lifetime_names, clippy::let_underscore_untyped, clippy::manual_range_contains, clippy::needless_lifetimes, clippy::needless_pass_by_value, clippy::type_complexity )] #[path = "../src/tokens.rs"] #[allow(dead_code)] mod tokens; use crate::tokens::{Error, Token, Tokenizer}; use std::borrow::Cow; fn err(input: &str, err: Error) { let mut t = Tokenizer::new(input); let token = t.next().unwrap_err(); assert_eq!(token, err); assert!(t.next().unwrap().is_none()); } #[test] fn literal_strings() { fn t(input: &str, val: &str, multiline: bool) { let mut t = Tokenizer::new(input); let (_, token) = t.next().unwrap().unwrap(); assert_eq!( token, Token::String { src: input, val: Cow::Borrowed(val), multiline, } ); assert!(t.next().unwrap().is_none()); } t("''", "", false); t("''''''", "", true); t("'''\n'''", "", true); t("'a'", "a", false); t("'\"a'", "\"a", false); t("''''a'''", "'a", true); t("'''\n'a\n'''", "'a\n", true); t("'''a\n'a\r\n'''", "a\n'a\n", true); } #[test] fn basic_strings() { fn t(input: &str, val: &str, multiline: bool) { let mut t = Tokenizer::new(input); let (_, token) = t.next().unwrap().unwrap(); assert_eq!( token, Token::String { src: input, val: Cow::Borrowed(val), multiline, } ); assert!(t.next().unwrap().is_none()); } t(r#""""#, "", false); t(r#""""""""#, "", true); t(r#""a""#, "a", false); t(r#""""a""""#, "a", true); t(r#""\t""#, "\t", false); t(r#""\u0000""#, "\0", false); t(r#""\U00000000""#, "\0", false); t(r#""\U000A0000""#, "\u{A0000}", false); t(r#""\\t""#, "\\t", false); t("\"\t\"", "\t", false); t("\"\"\"\n\t\"\"\"", "\t", true); t("\"\"\"\\\n\"\"\"", "", true); t( "\"\"\"\\\n \t \t \\\r\n \t \n \t \r\n\"\"\"", "", true, ); t(r#""\r""#, "\r", false); t(r#""\n""#, "\n", false); t(r#""\b""#, "\u{8}", false); t(r#""a\fa""#, "a\u{c}a", false); t(r#""\"a""#, "\"a", false); t("\"\"\"\na\"\"\"", "a", true); t("\"\"\"\n\"\"\"", "", true); t(r#""""a\"""b""""#, "a\"\"\"b", true); err(r#""\a"#, Error::InvalidEscape(2, 'a')); err("\"\\\n", Error::InvalidEscape(2, '\n')); err("\"\\\r\n", Error::InvalidEscape(2, '\n')); err("\"\\", Error::UnterminatedString(0)); err("\"\u{0}", Error::InvalidCharInString(1, '\u{0}')); err(r#""\U00""#, Error::InvalidHexEscape(5, '"')); err(r#""\U00"#, Error::UnterminatedString(0)); err(r#""\uD800"#, Error::InvalidEscapeValue(2, 0xd800)); err(r#""\UFFFFFFFF"#, Error::InvalidEscapeValue(2, 0xffff_ffff)); } #[test] fn keylike() { fn t(input: &str) { let mut t = Tokenizer::new(input); let (_, token) = t.next().unwrap().unwrap(); assert_eq!(token, Token::Keylike(input)); assert!(t.next().unwrap().is_none()); } t("foo"); t("0bar"); t("bar0"); t("1234"); t("a-b"); t("a_B"); t("-_-"); t("___"); } #[test] fn all() { fn t(input: &str, expected: &[((usize, usize), Token, &str)]) { let mut tokens = Tokenizer::new(input); let mut actual: Vec<((usize, usize), Token, &str)> = Vec::new(); while let Some((span, token)) = tokens.next().unwrap() { actual.push((span.into(), token, &input[span.start..span.end])); } for (a, b) in actual.iter().zip(expected) { assert_eq!(a, b); } assert_eq!(actual.len(), expected.len()); } t( " a ", &[ ((0, 1), Token::Whitespace(" "), " "), ((1, 2), Token::Keylike("a"), "a"), ((2, 3), Token::Whitespace(" "), " "), ], ); t( " a\t [[]] \t [] {} , . =\n# foo \r\n#foo \n ", &[ ((0, 1), Token::Whitespace(" "), " "), ((1, 2), Token::Keylike("a"), "a"), ((2, 4), Token::Whitespace("\t "), "\t "), ((4, 5), Token::LeftBracket, "["), ((5, 6), Token::LeftBracket, "["), ((6, 7), Token::RightBracket, "]"), ((7, 8), Token::RightBracket, "]"), ((8, 11), Token::Whitespace(" \t "), " \t "), ((11, 12), Token::LeftBracket, "["), ((12, 13), Token::RightBracket, "]"), ((13, 14), Token::Whitespace(" "), " "), ((14, 15), Token::LeftBrace, "{"), ((15, 16), Token::RightBrace, "}"), ((16, 17), Token::Whitespace(" "), " "), ((17, 18), Token::Comma, ","), ((18, 19), Token::Whitespace(" "), " "), ((19, 20), Token::Period, "."), ((20, 21), Token::Whitespace(" "), " "), ((21, 22), Token::Equals, "="), ((22, 23), Token::Newline, "\n"), ((23, 29), Token::Comment("# foo "), "# foo "), ((29, 31), Token::Newline, "\r\n"), ((31, 36), Token::Comment("#foo "), "#foo "), ((36, 37), Token::Newline, "\n"), ((37, 38), Token::Whitespace(" "), " "), ], ); } #[test] fn bare_cr_bad() { err("\r", Error::Unexpected(0, '\r')); err("'\n", Error::NewlineInString(1)); err("'\u{0}", Error::InvalidCharInString(1, '\u{0}')); err("'", Error::UnterminatedString(0)); err("\u{0}", Error::Unexpected(0, '\u{0}')); } #[test] fn bad_comment() { let mut t = Tokenizer::new("#\u{0}"); t.next().unwrap().unwrap(); assert_eq!(t.next(), Err(Error::Unexpected(1, '\u{0}'))); assert!(t.next().unwrap().is_none()); } basic-toml-0.1.10/tests/valid/array-empty.json000064400000000000000000000004231046102023000173420ustar 00000000000000{ "thevoid": { "type": "array", "value": [ {"type": "array", "value": [ {"type": "array", "value": [ {"type": "array", "value": [ {"type": "array", "value": []} ]} ]} ]} ]} } basic-toml-0.1.10/tests/valid/array-empty.toml000064400000000000000000000000251046102023000173420ustar 00000000000000thevoid = [[[[[]]]]] basic-toml-0.1.10/tests/valid/array-mixed-types-arrays-and-ints.json000064400000000000000000000004101046102023000234620ustar 00000000000000{ "arrays-and-ints": { "type": "array", "value": [ {"type": "integer", "value": "1"}, {"type": "array", "value": [ { "type": "string", "value":"Arrays are not integers."} ]} ] } } basic-toml-0.1.10/tests/valid/array-mixed-types-arrays-and-ints.toml000064400000000000000000000000651046102023000234720ustar 00000000000000arrays-and-ints = [1, ["Arrays are not integers."]] basic-toml-0.1.10/tests/valid/array-mixed-types-ints-and-floats.json000064400000000000000000000002661046102023000234620ustar 00000000000000{ "ints-and-floats": { "type": "array", "value": [ {"type": "integer", "value": "1"}, {"type": "float", "value": "1.1"} ] } } basic-toml-0.1.10/tests/valid/array-mixed-types-ints-and-floats.toml000064400000000000000000000000331046102023000234540ustar 00000000000000ints-and-floats = [1, 1.1] basic-toml-0.1.10/tests/valid/array-mixed-types-strings-and-ints.json000064400000000000000000000002701046102023000236560ustar 00000000000000{ "strings-and-ints": { "type": "array", "value": [ {"type": "string", "value": "hi"}, {"type": "integer", "value": "42"} ] } } basic-toml-0.1.10/tests/valid/array-mixed-types-strings-and-ints.toml000064400000000000000000000000361046102023000236600ustar 00000000000000strings-and-ints = ["hi", 42] basic-toml-0.1.10/tests/valid/array-nospaces.json000064400000000000000000000003321046102023000200160ustar 00000000000000{ "ints": { "type": "array", "value": [ {"type": "integer", "value": "1"}, {"type": "integer", "value": "2"}, {"type": "integer", "value": "3"} ] } } basic-toml-0.1.10/tests/valid/array-nospaces.toml000064400000000000000000000000171046102023000200200ustar 00000000000000ints = [1,2,3] basic-toml-0.1.10/tests/valid/arrays-hetergeneous.json000064400000000000000000000010461046102023000210660ustar 00000000000000{ "mixed": { "type": "array", "value": [ {"type": "array", "value": [ {"type": "integer", "value": "1"}, {"type": "integer", "value": "2"} ]}, {"type": "array", "value": [ {"type": "string", "value": "a"}, {"type": "string", "value": "b"} ]}, {"type": "array", "value": [ {"type": "float", "value": "1.1"}, {"type": "float", "value": "2.1"} ]} ] } } basic-toml-0.1.10/tests/valid/arrays-hetergeneous.toml000064400000000000000000000000511046102023000210630ustar 00000000000000mixed = [[1, 2], ["a", "b"], [1.1, 2.1]] basic-toml-0.1.10/tests/valid/arrays-nested.json000064400000000000000000000004411046102023000176510ustar 00000000000000{ "nest": { "type": "array", "value": [ {"type": "array", "value": [ {"type": "string", "value": "a"} ]}, {"type": "array", "value": [ {"type": "string", "value": "b"} ]} ] } } basic-toml-0.1.10/tests/valid/arrays-nested.toml000064400000000000000000000000261046102023000176520ustar 00000000000000nest = [["a"], ["b"]] basic-toml-0.1.10/tests/valid/arrays.json000064400000000000000000000016361046102023000164000ustar 00000000000000{ "ints": { "type": "array", "value": [ {"type": "integer", "value": "1"}, {"type": "integer", "value": "2"}, {"type": "integer", "value": "3"} ] }, "floats": { "type": "array", "value": [ {"type": "float", "value": "1.1"}, {"type": "float", "value": "2.1"}, {"type": "float", "value": "3.1"} ] }, "strings": { "type": "array", "value": [ {"type": "string", "value": "a"}, {"type": "string", "value": "b"}, {"type": "string", "value": "c"} ] }, "dates": { "type": "array", "value": [ {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, {"type": "datetime", "value": "1979-05-27T07:32:00Z"}, {"type": "datetime", "value": "2006-06-01T11:00:00Z"} ] } } basic-toml-0.1.10/tests/valid/arrays.toml000064400000000000000000000002301046102023000163670ustar 00000000000000ints = [1, 2, 3] floats = [1.1, 2.1, 3.1] strings = ["a", "b", "c"] dates = [ 1987-07-05T17:45:00Z, 1979-05-27T07:32:00Z, 2006-06-01T11:00:00Z, ] basic-toml-0.1.10/tests/valid/bool.json000064400000000000000000000001341046102023000160220ustar 00000000000000{ "f": {"type": "bool", "value": "false"}, "t": {"type": "bool", "value": "true"} } basic-toml-0.1.10/tests/valid/bool.toml000064400000000000000000000000231046102023000160210ustar 00000000000000t = true f = false basic-toml-0.1.10/tests/valid/comments-everywhere.json000064400000000000000000000004241046102023000211010ustar 00000000000000{ "group": { "answer": {"type": "integer", "value": "42"}, "more": { "type": "array", "value": [ {"type": "integer", "value": "42"}, {"type": "integer", "value": "42"} ] } } } basic-toml-0.1.10/tests/valid/comments-everywhere.toml000064400000000000000000000007111046102023000211020ustar 00000000000000# Top comment. # Top comment. # Top comment. # [no-extraneous-groups-please] [group] # Comment answer = 42 # Comment # no-extraneous-keys-please = 999 # In between comment. more = [ # Comment # What about multiple # comments? # Can you handle it? # # Evil. # Evil. 42, 42, # Comments within arrays are fun. # What about multiple # comments? # Can you handle it? # # Evil. # Evil. # ] Did I fool you? ] # Hopefully not. basic-toml-0.1.10/tests/valid/datetime-truncate.json000064400000000000000000000001551046102023000205110ustar 00000000000000{ "bestdayever": { "type": "datetime", "value": "1987-07-05T17:45:00.123456789Z" } } basic-toml-0.1.10/tests/valid/datetime-truncate.toml000064400000000000000000000000631046102023000205110ustar 00000000000000bestdayever = 1987-07-05T17:45:00.123456789012345Z basic-toml-0.1.10/tests/valid/datetime.json000064400000000000000000000001151046102023000166620ustar 00000000000000{ "bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"} } basic-toml-0.1.10/tests/valid/datetime.toml000064400000000000000000000000431046102023000166640ustar 00000000000000bestdayever = 1987-07-05T17:45:00Z basic-toml-0.1.10/tests/valid/dotted-keys.json000064400000000000000000000007631046102023000173330ustar 00000000000000{ "a": { "b": { "type": "integer", "value": "123" } }, "table": { "a": { "b": { "c": { "type": "integer", "value": "1" }, "d": { "type": "integer", "value": "2" } } }, "in": { "type": { "color": { "type": "string", "value": "blue" }, "name": { "type": "string", "value": "cat" } } } } } basic-toml-0.1.10/tests/valid/dotted-keys.toml000064400000000000000000000001401046102023000173220ustar 00000000000000a.b = 123 [table] a.b.c = 1 a . b . d = 2 in = { type.name = "cat", type.color = "blue" } basic-toml-0.1.10/tests/valid/empty.json000064400000000000000000000000031046102023000162200ustar 00000000000000{} basic-toml-0.1.10/tests/valid/empty.toml000064400000000000000000000000001046102023000162170ustar 00000000000000basic-toml-0.1.10/tests/valid/example-bom.toml000064400000000000000000000001361046102023000173010ustar 00000000000000best-day-ever = 1987-07-05T17:45:00Z [numtheory] boring = false perfection = [6, 28, 496] basic-toml-0.1.10/tests/valid/example-v0.3.0.json000064400000000000000000000062271046102023000173550ustar 00000000000000{"Array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"Booleans":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"Datetime":{"key1":{"type":"datetime","value":"1979-05-27T07:32:00Z"}},"Float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}}},"Integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"}},"String":{"Literal":{"Multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"Multiline":{"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}},"Multilined":{"Singleline":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}}},"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"Table":{"key":{"type":"string","value":"value"}},"dog":{"tater":{"type":{"type":"string","value":"pug"}}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"x":{"y":{"z":{"w":{}}}}} basic-toml-0.1.10/tests/valid/example-v0.3.0.toml000064400000000000000000000070731046102023000173570ustar 00000000000000# Comment # I am a comment. Hear me roar. Roar. # Table # Tables (also known as hash tables or dictionaries) are collections of key/value pairs. # They appear in square brackets on a line by themselves. [Table] key = "value" # Yeah, you can do this. # Nested tables are denoted by table names with dots in them. Name your tables whatever crap you please, just don't use #, ., [ or ]. [dog.tater] type = "pug" # You don't need to specify all the super-tables if you don't want to. TOML knows how to do it for you. # [x] you # [x.y] don't # [x.y.z] need these [x.y.z.w] # for this to work # String # There are four ways to express strings: basic, multi-line basic, literal, and multi-line literal. # All strings must contain only valid UTF-8 characters. [String] basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." [String.Multiline] # The following strings are byte-for-byte equivalent: key1 = "One\nTwo" key2 = """One\nTwo""" key3 = """ One Two""" [String.Multilined.Singleline] # The following strings are byte-for-byte equivalent: key1 = "The quick brown fox jumps over the lazy dog." key2 = """ The quick brown \ fox jumps over \ the lazy dog.""" key3 = """\ The quick brown \ fox jumps over \ the lazy dog.\ """ [String.Literal] # What you see is what you get. winpath = 'C:\Users\nodejs\templates' winpath2 = '\\ServerX\admin$\system32\' quoted = 'Tom "Dubs" Preston-Werner' regex = '<\i\c*\s*>' [String.Literal.Multiline] regex2 = '''I [dw]on't need \d{2} apples''' lines = ''' The first newline is trimmed in raw strings. All other whitespace is preserved. ''' # Integer # Integers are whole numbers. Positive numbers may be prefixed with a plus sign. # Negative numbers are prefixed with a minus sign. [Integer] key1 = +99 key2 = 42 key3 = 0 key4 = -17 # Float # A float consists of an integer part (which may be prefixed with a plus or minus sign) # followed by a fractional part and/or an exponent part. [Float.fractional] # fractional key1 = +1.0 key2 = 3.1415 key3 = -0.01 [Float.exponent] # exponent #key1 = 5e+22 #key2 = 1e6 #key3 = -2E-2 [Float.both] # both #key = 6.626e-34 # Boolean # Booleans are just the tokens you're used to. Always lowercase. [Booleans] True = true False = false # Datetime # Datetimes are RFC 3339 dates. [Datetime] key1 = 1979-05-27T07:32:00Z #key2 = 1979-05-27T00:32:00-07:00 #key3 = 1979-05-27T00:32:00.999999-07:00 # Array # Arrays are square brackets with other primitives inside. Whitespace is ignored. Elements are separated by commas. Data types may not be mixed. [Array] key1 = [ 1, 2, 3 ] key2 = [ "red", "yellow", "green" ] key3 = [ [ 1, 2 ], [3, 4, 5] ] key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok #Arrays can also be multiline. So in addition to ignoring whitespace, arrays also ignore newlines between the brackets. # Terminating commas are ok before the closing bracket. key5 = [ 1, 2, 3 ] key6 = [ 1, 2, # this is ok ] # Array of Tables # These can be expressed by using a table name in double brackets. # Each table with the same double bracketed name will be an element in the array. # The tables are inserted in the order encountered. [[products]] name = "Hammer" sku = 738594937 [[products]] [[products]] name = "Nail" sku = 284758393 color = "gray" # You can create nested arrays of tables as well. [[fruit]] name = "apple" [fruit.physical] color = "red" shape = "round" [[fruit.variety]] name = "red delicious" [[fruit.variety]] name = "granny smith" [[fruit]] name = "banana" [[fruit.variety]] name = "plantain" basic-toml-0.1.10/tests/valid/example-v0.4.0.json000064400000000000000000000066711046102023000173610ustar 00000000000000{"array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"boolean":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"datetime":{},"float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}},"underscores":{}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"},"underscores":{"key1":{"type":"integer","value":"1000"},"key2":{"type":"integer","value":"5349221"},"key3":{"type":"integer","value":"12345"}}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"string":{"basic":{"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"literal":{"multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"multiline":{"continued":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}},"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}}},"table":{"inline":{"name":{"first":{"type":"string","value":"Tom"},"last":{"type":"string","value":"Preston-Werner"}},"point":{"x":{"type":"integer","value":"1"},"y":{"type":"integer","value":"2"}}},"key":{"type":"string","value":"value"},"subtable":{"key":{"type":"string","value":"another value"}}},"x":{"y":{"z":{"w":{}}}}} basic-toml-0.1.10/tests/valid/example-v0.4.0.toml000064400000000000000000000120441046102023000173520ustar 00000000000000################################################################################ ## Comment # Speak your mind with the hash symbol. They go from the symbol to the end of # the line. ################################################################################ ## Table # Tables (also known as hash tables or dictionaries) are collections of # key/value pairs. They appear in square brackets on a line by themselves. [table] key = "value" # Yeah, you can do this. # Nested tables are denoted by table names with dots in them. Name your tables # whatever crap you please, just don't use #, ., [ or ]. [table.subtable] key = "another value" # You don't need to specify all the super-tables if you don't want to. TOML # knows how to do it for you. # [x] you # [x.y] don't # [x.y.z] need these [x.y.z.w] # for this to work ################################################################################ ## Inline Table # Inline tables provide a more compact syntax for expressing tables. They are # especially useful for grouped data that can otherwise quickly become verbose. # Inline tables are enclosed in curly braces `{` and `}`. No newlines are # allowed between the curly braces unless they are valid within a value. [table.inline] name = { first = "Tom", last = "Preston-Werner" } point = { x = 1, y = 2 } ################################################################################ ## String # There are four ways to express strings: basic, multi-line basic, literal, and # multi-line literal. All strings must contain only valid UTF-8 characters. [string.basic] basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." [string.multiline] # The following strings are byte-for-byte equivalent: key1 = "One\nTwo" key2 = """One\nTwo""" key3 = """ One Two""" [string.multiline.continued] # The following strings are byte-for-byte equivalent: key1 = "The quick brown fox jumps over the lazy dog." key2 = """ The quick brown \ fox jumps over \ the lazy dog.""" key3 = """\ The quick brown \ fox jumps over \ the lazy dog.\ """ [string.literal] # What you see is what you get. winpath = 'C:\Users\nodejs\templates' winpath2 = '\\ServerX\admin$\system32\' quoted = 'Tom "Dubs" Preston-Werner' regex = '<\i\c*\s*>' [string.literal.multiline] regex2 = '''I [dw]on't need \d{2} apples''' lines = ''' The first newline is trimmed in raw strings. All other whitespace is preserved. ''' ################################################################################ ## Integer # Integers are whole numbers. Positive numbers may be prefixed with a plus sign. # Negative numbers are prefixed with a minus sign. [integer] key1 = +99 key2 = 42 key3 = 0 key4 = -17 [integer.underscores] # For large numbers, you may use underscores to enhance readability. Each # underscore must be surrounded by at least one digit. key1 = 1_000 key2 = 5_349_221 key3 = 1_2_3_4_5 # valid but inadvisable ################################################################################ ## Float # A float consists of an integer part (which may be prefixed with a plus or # minus sign) followed by a fractional part and/or an exponent part. [float.fractional] key1 = +1.0 key2 = 3.1415 key3 = -0.01 [float.exponent] [float.both] [float.underscores] ################################################################################ ## Boolean # Booleans are just the tokens you're used to. Always lowercase. [boolean] True = true False = false ################################################################################ ## Datetime # Datetimes are RFC 3339 dates. [datetime] #key1 = 1979-05-27T07:32:00Z #key2 = 1979-05-27T00:32:00-07:00 #key3 = 1979-05-27T00:32:00.999999-07:00 ################################################################################ ## Array # Arrays are square brackets with other primitives inside. Whitespace is # ignored. Elements are separated by commas. Since 2019-11-06 data types can be # mixed. [array] key1 = [ 1, 2, 3 ] key2 = [ "red", "yellow", "green" ] key3 = [ [ 1, 2 ], [3, 4, 5] ] key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok # Arrays can also be multiline. So in addition to ignoring whitespace, arrays # also ignore newlines between the brackets. Terminating commas are ok before # the closing bracket. key5 = [ 1, 2, 3 ] key6 = [ 1, 2, # this is ok ] ################################################################################ ## Array of Tables # These can be expressed by using a table name in double brackets. Each table # with the same double bracketed name will be an element in the array. The # tables are inserted in the order encountered. [[products]] name = "Hammer" sku = 738594937 [[products]] [[products]] name = "Nail" sku = 284758393 color = "gray" # You can create nested arrays of tables as well. [[fruit]] name = "apple" [fruit.physical] color = "red" shape = "round" [[fruit.variety]] name = "red delicious" [[fruit.variety]] name = "granny smith" [[fruit]] name = "banana" [[fruit.variety]] name = "plantain" basic-toml-0.1.10/tests/valid/example.json000064400000000000000000000005421046102023000165250ustar 00000000000000{ "best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, "numtheory": { "boring": {"type": "bool", "value": "false"}, "perfection": { "type": "array", "value": [ {"type": "integer", "value": "6"}, {"type": "integer", "value": "28"}, {"type": "integer", "value": "496"} ] } } } basic-toml-0.1.10/tests/valid/example.toml000064400000000000000000000001331046102023000165230ustar 00000000000000best-day-ever = 1987-07-05T17:45:00Z [numtheory] boring = false perfection = [6, 28, 496] basic-toml-0.1.10/tests/valid/example2.json000064400000000000000000000025761046102023000166200ustar 00000000000000{"clients":{"data":{"type":"array","value":[{"type":"array","value":[{"type":"string","value":"gamma"},{"type":"string","value":"delta"}]},{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}]},"hosts":{"type":"array","value":[{"type":"string","value":"alpha"},{"type":"string","value":"omega"}]}},"database":{"connection_max":{"type":"integer","value":"5000"},"enabled":{"type":"bool","value":"true"},"ports":{"type":"array","value":[{"type":"integer","value":"8001"},{"type":"integer","value":"8001"},{"type":"integer","value":"8002"}]},"server":{"type":"string","value":"192.168.1.1"}},"owner":{"bio":{"type":"string","value":"GitHub Cofounder \u0026 CEO\nLikes tater tots and beer."},"dob":{"type":"datetime","value":"1979-05-27T07:32:00Z"},"name":{"type":"string","value":"Tom Preston-Werner"},"organization":{"type":"string","value":"GitHub"}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"servers":{"alpha":{"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.1"}},"beta":{"country":{"type":"string","value":"中国"},"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.2"}}},"title":{"type":"string","value":"TOML Example"}} basic-toml-0.1.10/tests/valid/example2.toml000064400000000000000000000015531046102023000166140ustar 00000000000000# This is a TOML document. Boom. title = "TOML Example" [owner] name = "Tom Preston-Werner" organization = "GitHub" bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." dob = 1979-05-27T07:32:00Z # First class dates? Why not? [database] server = "192.168.1.1" ports = [ 8001, 8001, 8002 ] connection_max = 5000 enabled = true [servers] # You can indent as you please. Tabs or spaces. TOML don't care. [servers.alpha] ip = "10.0.0.1" dc = "eqdc10" [servers.beta] ip = "10.0.0.2" dc = "eqdc10" country = "中国" # This should be parsed as UTF-8 [clients] data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it # Line breaks are OK when inside arrays hosts = [ "alpha", "omega" ] # Products [[products]] name = "Hammer" sku = 738594937 [[products]] name = "Nail" sku = 284758393 color = "gray" basic-toml-0.1.10/tests/valid/float-exponent.json000064400000000000000000000007501046102023000200360ustar 00000000000000{ "lower": {"type": "float", "value": "300.0"}, "upper": {"type": "float", "value": "300.0"}, "neg": {"type": "float", "value": "0.03"}, "pos": {"type": "float", "value": "300.0"}, "zero": {"type": "float", "value": "3.0"}, "pointlower": {"type": "float", "value": "310.0"}, "pointupper": {"type": "float", "value": "310.0"}, "prefix-zero-exp": {"type": "float", "value": "1000000.0"}, "prefix-zero-exp-plus": {"type": "float", "value": "1000000.0"} } basic-toml-0.1.10/tests/valid/float-exponent.toml000064400000000000000000000002231046102023000200330ustar 00000000000000lower = 3e2 upper = 3E2 neg = 3e-2 pos = 3E+2 zero = 3e0 pointlower = 3.1e2 pointupper = 3.1E2 prefix-zero-exp = 1e06 prefix-zero-exp-plus = 1e+06 basic-toml-0.1.10/tests/valid/float.json000064400000000000000000000001431046102023000161740ustar 00000000000000{ "pi": {"type": "float", "value": "3.14"}, "negpi": {"type": "float", "value": "-3.14"} } basic-toml-0.1.10/tests/valid/float.toml000064400000000000000000000000301046102023000161710ustar 00000000000000pi = 3.14 negpi = -3.14 basic-toml-0.1.10/tests/valid/hard_example.json000064400000000000000000000012551046102023000175250ustar 00000000000000{"the":{"hard":{"another_test_string":{"type":"string","value":" Same thing, but with a string #"},"bit#":{"multi_line_array":{"type":"array","value":[{"type":"string","value":"]"}]},"what?":{"type":"string","value":"You don't think some user won't do that?"}},"harder_test_string":{"type":"string","value":" And when \"'s are in the string, along with # \""},"test_array":{"type":"array","value":[{"type":"string","value":"] "},{"type":"string","value":" # "}]},"test_array2":{"type":"array","value":[{"type":"string","value":"Test #11 ]proved that"},{"type":"string","value":"Experiment #9 was a success"}]}},"test_string":{"type":"string","value":"You'll hate me after this - #"}}} basic-toml-0.1.10/tests/valid/hard_example.toml000064400000000000000000000025751046102023000175350ustar 00000000000000# Test file for TOML # Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate # This part you'll really hate [the] test_string = "You'll hate me after this - #" # " Annoying, isn't it? [the.hard] test_array = [ "] ", " # "] # ] There you go, parse this! test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ] # You didn't think it'd as easy as chucking out the last #, did you? another_test_string = " Same thing, but with a string #" harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too" # Things will get harder [the.hard."bit#"] "what?" = "You don't think some user won't do that?" multi_line_array = [ "]", # ] Oh yes I did ] # Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test #[error] if you didn't catch this, your parser is broken #string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this #array = [ # "This might most likely happen in multiline arrays", # Like here, # "or here, # and here" # ] End of array comment, forgot the # #number = 3.14 pi <--again forgot the # basic-toml-0.1.10/tests/valid/implicit-and-explicit-after.json000064400000000000000000000003021046102023000223540ustar 00000000000000{ "a": { "better": {"type": "integer", "value": "43"}, "b": { "c": { "answer": {"type": "integer", "value": "42"} } } } } basic-toml-0.1.10/tests/valid/implicit-and-explicit-after.toml000064400000000000000000000000451046102023000223620ustar 00000000000000[a.b.c] answer = 42 [a] better = 43 basic-toml-0.1.10/tests/valid/implicit-and-explicit-before.json000064400000000000000000000003021046102023000225150ustar 00000000000000{ "a": { "better": {"type": "integer", "value": "43"}, "b": { "c": { "answer": {"type": "integer", "value": "42"} } } } } basic-toml-0.1.10/tests/valid/implicit-and-explicit-before.toml000064400000000000000000000000451046102023000225230ustar 00000000000000[a] better = 43 [a.b.c] answer = 42 basic-toml-0.1.10/tests/valid/implicit-groups.json000064400000000000000000000002141046102023000202150ustar 00000000000000{ "a": { "b": { "c": { "answer": {"type": "integer", "value": "42"} } } } } basic-toml-0.1.10/tests/valid/implicit-groups.toml000064400000000000000000000000241046102023000202160ustar 00000000000000[a.b.c] answer = 42 basic-toml-0.1.10/tests/valid/integer.json000064400000000000000000000010211046102023000165200ustar 00000000000000{ "answer": {"type": "integer", "value": "42"}, "neganswer": {"type": "integer", "value": "-42"}, "neg_zero": {"type": "integer", "value": "0"}, "pos_zero": {"type": "integer", "value": "0"}, "hex1": {"type": "integer", "value": "3735928559"}, "hex2": {"type": "integer", "value": "3735928559"}, "hex3": {"type": "integer", "value": "3735928559"}, "oct1": {"type": "integer", "value": "342391"}, "oct2": {"type": "integer", "value": "493"}, "bin1": {"type": "integer", "value": "214"} } basic-toml-0.1.10/tests/valid/integer.toml000064400000000000000000000004321046102023000165270ustar 00000000000000answer = 42 neganswer = -42 neg_zero = -0 pos_zero = +0 # hexadecimal with prefix `0x` hex1 = 0xDEADBEEF hex2 = 0xdeadbeef hex3 = 0xdead_beef # octal with prefix `0o` oct1 = 0o01234567 oct2 = 0o755 # useful for Unix file permissions # binary with prefix `0b` bin1 = 0b11010110 basic-toml-0.1.10/tests/valid/key-empty.json000064400000000000000000000000541046102023000170140ustar 00000000000000{ "": {"type": "integer", "value": "1"} } basic-toml-0.1.10/tests/valid/key-empty.toml000064400000000000000000000000071046102023000170140ustar 00000000000000"" = 1 basic-toml-0.1.10/tests/valid/key-equals-nospace.json000064400000000000000000000000651046102023000206000ustar 00000000000000{ "answer": {"type": "integer", "value": "42"} } basic-toml-0.1.10/tests/valid/key-equals-nospace.toml000064400000000000000000000000121046102023000205720ustar 00000000000000answer=42 basic-toml-0.1.10/tests/valid/key-quote-newline.json000064400000000000000000000000601046102023000204470ustar 00000000000000{ "\n": {"type": "integer", "value": "1"} } basic-toml-0.1.10/tests/valid/key-quote-newline.toml000064400000000000000000000000111046102023000204450ustar 00000000000000"\n" = 1 basic-toml-0.1.10/tests/valid/key-space.json000064400000000000000000000000611046102023000167470ustar 00000000000000{ "a b": {"type": "integer", "value": "1"} } basic-toml-0.1.10/tests/valid/key-space.toml000064400000000000000000000000121046102023000167450ustar 00000000000000"a b" = 1 basic-toml-0.1.10/tests/valid/key-special-chars.json000064400000000000000000000001421046102023000203720ustar 00000000000000{ "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'": { "type": "integer", "value": "1" } } basic-toml-0.1.10/tests/valid/key-special-chars.toml000064400000000000000000000000551046102023000203770ustar 00000000000000"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'" = 1 basic-toml-0.1.10/tests/valid/key-with-pound.json000064400000000000000000000000661046102023000177570ustar 00000000000000{ "key#name": {"type": "integer", "value": "5"} } basic-toml-0.1.10/tests/valid/key-with-pound.toml000064400000000000000000000000171046102023000177550ustar 00000000000000"key#name" = 5 basic-toml-0.1.10/tests/valid/long-float.json000064400000000000000000000002051046102023000171300ustar 00000000000000{ "longpi": {"type": "float", "value": "3.141592653589793"}, "neglongpi": {"type": "float", "value": "-3.141592653589793"} } basic-toml-0.1.10/tests/valid/long-float.toml000064400000000000000000000000721046102023000171340ustar 00000000000000longpi = 3.141592653589793 neglongpi = -3.141592653589793 basic-toml-0.1.10/tests/valid/long-integer.json000064400000000000000000000002151046102023000174610ustar 00000000000000{ "answer": {"type": "integer", "value": "9223372036854775807"}, "neganswer": {"type": "integer", "value": "-9223372036854775808"} } basic-toml-0.1.10/tests/valid/long-integer.toml000064400000000000000000000000761046102023000174700ustar 00000000000000answer = 9223372036854775807 neganswer = -9223372036854775808 basic-toml-0.1.10/tests/valid/multiline-string.json000064400000000000000000000016101046102023000203750ustar 00000000000000{ "multiline_empty_one": { "type": "string", "value": "" }, "multiline_empty_two": { "type": "string", "value": "" }, "multiline_empty_three": { "type": "string", "value": "" }, "multiline_empty_four": { "type": "string", "value": "" }, "multiline_empty_five": { "type": "string", "value": "" }, "equivalent_one": { "type": "string", "value": "The quick brown fox jumps over the lazy dog." }, "equivalent_two": { "type": "string", "value": "The quick brown fox jumps over the lazy dog." }, "equivalent_three": { "type": "string", "value": "The quick brown fox jumps over the lazy dog." }, "equivalent_four": { "type": "string", "value": "The quick brown fox jumps over the lazy dog." } } basic-toml-0.1.10/tests/valid/multiline-string.toml000064400000000000000000000010601046102023000203760ustar 00000000000000multiline_empty_one = """""" multiline_empty_two = """ """ multiline_empty_three = """\ """ multiline_empty_four = """\ \ \ """ multiline_empty_five = """\ \ \ \ """ equivalent_one = "The quick brown fox jumps over the lazy dog." equivalent_two = """ The quick brown \ fox jumps over \ the lazy dog.""" equivalent_three = """\ The quick brown \ fox jumps over \ the lazy dog.\ """ equivalent_four = """\ The quick brown \ fox jumps over \ the lazy dog.\ """ basic-toml-0.1.10/tests/valid/quote-surrounded-value.json000064400000000000000000000002371046102023000215320ustar 00000000000000{ "double": { "type": "string", "value": "\"double quotes here\"" }, "single": { "type": "string", "value": "'single quotes here'" } } basic-toml-0.1.10/tests/valid/quote-surrounded-value.toml000064400000000000000000000001001046102023000215210ustar 00000000000000double = '"double quotes here"' single = "'single quotes here'" basic-toml-0.1.10/tests/valid/raw-multiline-string.json000064400000000000000000000005471046102023000211740ustar 00000000000000{ "oneline": { "type": "string", "value": "This string has a ' quote character." }, "firstnl": { "type": "string", "value": "This string has a ' quote character." }, "multiline": { "type": "string", "value": "This string\nhas ' a quote character\nand more than\none newline\nin it." } } basic-toml-0.1.10/tests/valid/raw-multiline-string.toml000064400000000000000000000003031046102023000211640ustar 00000000000000oneline = '''This string has a ' quote character.''' firstnl = ''' This string has a ' quote character.''' multiline = ''' This string has ' a quote character and more than one newline in it.''' basic-toml-0.1.10/tests/valid/raw-string.json000064400000000000000000000014171046102023000171710ustar 00000000000000{ "backspace": { "type": "string", "value": "This string has a \\b backspace character." }, "tab": { "type": "string", "value": "This string has a \\t tab character." }, "newline": { "type": "string", "value": "This string has a \\n new line character." }, "formfeed": { "type": "string", "value": "This string has a \\f form feed character." }, "carriage": { "type": "string", "value": "This string has a \\r carriage return character." }, "slash": { "type": "string", "value": "This string has a \\/ slash character." }, "backslash": { "type": "string", "value": "This string has a \\\\ backslash character." } } basic-toml-0.1.10/tests/valid/raw-string.toml000064400000000000000000000005651046102023000171760ustar 00000000000000backspace = 'This string has a \b backspace character.' tab = 'This string has a \t tab character.' newline = 'This string has a \n new line character.' formfeed = 'This string has a \f form feed character.' carriage = 'This string has a \r carriage return character.' slash = 'This string has a \/ slash character.' backslash = 'This string has a \\ backslash character.' basic-toml-0.1.10/tests/valid/string-delim-end.json000064400000000000000000000003531046102023000202340ustar 00000000000000{ "str1": { "type": "string", "value": "\"This,\" she said, \"is just a pointless statement.\"" }, "str2": { "type": "string", "value": "foo''bar''" }, "str3": { "type": "string", "value": "\"\"" } }basic-toml-0.1.10/tests/valid/string-delim-end.toml000064400000000000000000000001501046102023000202310ustar 00000000000000str1 = """"This," she said, "is just a pointless statement."""" str2 = '''foo''bar''''' str3 = """""""" basic-toml-0.1.10/tests/valid/string-empty.json000064400000000000000000000001101046102023000175230ustar 00000000000000{ "answer": { "type": "string", "value": "" } } basic-toml-0.1.10/tests/valid/string-empty.toml000064400000000000000000000000141046102023000175300ustar 00000000000000answer = "" basic-toml-0.1.10/tests/valid/string-escapes.json000064400000000000000000000031641046102023000200240ustar 00000000000000{ "backspace": { "type": "string", "value": "This string has a \u0008 backspace character." }, "tab": { "type": "string", "value": "This string has a \u0009 tab character." }, "newline": { "type": "string", "value": "This string has a \u000A new line character." }, "formfeed": { "type": "string", "value": "This string has a \u000C form feed character." }, "carriage": { "type": "string", "value": "This string has a \u000D carriage return character." }, "quote": { "type": "string", "value": "This string has a \u0022 quote character." }, "slash": { "type": "string", "value": "This string has a \u002F slash character." }, "backslash": { "type": "string", "value": "This string has a \u005C backslash character." }, "notunicode1": { "type": "string", "value": "This string does not have a unicode \\u escape." }, "notunicode2": { "type": "string", "value": "This string does not have a unicode \u005Cu escape." }, "notunicode3": { "type": "string", "value": "This string does not have a unicode \\u0075 escape." }, "notunicode4": { "type": "string", "value": "This string does not have a unicode \\\u0075 escape." }, "delete": { "type": "string", "value": "This string has a \u007f delete control code." }, "unitseparator": { "type": "string", "value": "This string has a \u001f unit separator control code." } } basic-toml-0.1.10/tests/valid/string-escapes.toml000064400000000000000000000014621046102023000200250ustar 00000000000000backspace = "This string has a \b backspace character." tab = "This string has a \t tab character." newline = "This string has a \n new line character." formfeed = "This string has a \f form feed character." carriage = "This string has a \r carriage return character." quote = "This string has a \" quote character." slash = "This string has a / slash character." backslash = "This string has a \\ backslash character." notunicode1 = "This string does not have a unicode \\u escape." notunicode2 = "This string does not have a unicode \u005Cu escape." notunicode3 = "This string does not have a unicode \\u0075 escape." notunicode4 = "This string does not have a unicode \\\u0075 escape." delete = "This string has a \u007F delete control code." unitseparator = "This string has a \u001F unit separator control code." basic-toml-0.1.10/tests/valid/string-simple.json000064400000000000000000000001531046102023000176650ustar 00000000000000{ "answer": { "type": "string", "value": "You are not drinking enough whisky." } } basic-toml-0.1.10/tests/valid/string-simple.toml000064400000000000000000000000571046102023000176720ustar 00000000000000answer = "You are not drinking enough whisky." basic-toml-0.1.10/tests/valid/string-with-pound.json000064400000000000000000000002711046102023000204730ustar 00000000000000{ "pound": {"type": "string", "value": "We see no # comments here."}, "poundcomment": { "type": "string", "value": "But there are # some comments here." } } basic-toml-0.1.10/tests/valid/string-with-pound.toml000064400000000000000000000001611046102023000204730ustar 00000000000000pound = "We see no # comments here." poundcomment = "But there are # some comments here." # Did I # mess you up? basic-toml-0.1.10/tests/valid/table-array-implicit.json000064400000000000000000000001641046102023000211050ustar 00000000000000{ "albums": { "songs": [ {"name": {"type": "string", "value": "Glory Days"}} ] } } basic-toml-0.1.10/tests/valid/table-array-implicit.toml000064400000000000000000000000451046102023000211050ustar 00000000000000[[albums.songs]] name = "Glory Days" basic-toml-0.1.10/tests/valid/table-array-many.json000064400000000000000000000007271046102023000202440ustar 00000000000000{ "people": [ { "first_name": {"type": "string", "value": "Bruce"}, "last_name": {"type": "string", "value": "Springsteen"} }, { "first_name": {"type": "string", "value": "Eric"}, "last_name": {"type": "string", "value": "Clapton"} }, { "first_name": {"type": "string", "value": "Bob"}, "last_name": {"type": "string", "value": "Seger"} } ] } basic-toml-0.1.10/tests/valid/table-array-many.toml000064400000000000000000000002431046102023000202370ustar 00000000000000[[people]] first_name = "Bruce" last_name = "Springsteen" [[people]] first_name = "Eric" last_name = "Clapton" [[people]] first_name = "Bob" last_name = "Seger" basic-toml-0.1.10/tests/valid/table-array-nest-no-keys.json000064400000000000000000000002721046102023000216270ustar 00000000000000{ "albums": [ { "songs": [{}, {}] } ], "artists": [ { "home": { "address": {} } } ] } basic-toml-0.1.10/tests/valid/table-array-nest-no-keys.toml000064400000000000000000000001411046102023000216240ustar 00000000000000[[ albums ]] [[ albums.songs ]] [[ albums.songs ]] [[ artists ]] [ artists.home.address ] basic-toml-0.1.10/tests/valid/table-array-nest.json000064400000000000000000000010721046102023000202430ustar 00000000000000{ "albums": [ { "name": {"type": "string", "value": "Born to Run"}, "songs": [ {"name": {"type": "string", "value": "Jungleland"}}, {"name": {"type": "string", "value": "Meeting Across the River"}} ] }, { "name": {"type": "string", "value": "Born in the USA"}, "songs": [ {"name": {"type": "string", "value": "Glory Days"}}, {"name": {"type": "string", "value": "Dancing in the Dark"}} ] } ] } basic-toml-0.1.10/tests/valid/table-array-nest.toml000064400000000000000000000004061046102023000202450ustar 00000000000000[[albums]] name = "Born to Run" [[albums.songs]] name = "Jungleland" [[albums.songs]] name = "Meeting Across the River" [[albums]] name = "Born in the USA" [[albums.songs]] name = "Glory Days" [[albums.songs]] name = "Dancing in the Dark" basic-toml-0.1.10/tests/valid/table-array-one.json000064400000000000000000000002621046102023000200530ustar 00000000000000{ "people": [ { "first_name": {"type": "string", "value": "Bruce"}, "last_name": {"type": "string", "value": "Springsteen"} } ] } basic-toml-0.1.10/tests/valid/table-array-one.toml000064400000000000000000000000721046102023000200540ustar 00000000000000[[people]] first_name = "Bruce" last_name = "Springsteen" basic-toml-0.1.10/tests/valid/table-empty.json000064400000000000000000000000201046102023000173040ustar 00000000000000{ "a": {} } basic-toml-0.1.10/tests/valid/table-empty.toml000064400000000000000000000000041046102023000173100ustar 00000000000000[a] basic-toml-0.1.10/tests/valid/table-multi-empty.json000064400000000000000000000000741046102023000204450ustar 00000000000000{ "a": { "b": {} }, "b": {}, "c": { "a": {} } } basic-toml-0.1.10/tests/valid/table-multi-empty.toml000064400000000000000000000000301046102023000204370ustar 00000000000000[a] [a.b] [b] [c] [c.a] basic-toml-0.1.10/tests/valid/table-sub-empty.json000064400000000000000000000000311046102023000200750ustar 00000000000000{ "a": { "b": {} } } basic-toml-0.1.10/tests/valid/table-sub-empty.toml000064400000000000000000000000121046102023000200760ustar 00000000000000[a] [a.b] basic-toml-0.1.10/tests/valid/table-whitespace.json000064400000000000000000000000301046102023000203030ustar 00000000000000{ "valid key": {} } basic-toml-0.1.10/tests/valid/table-whitespace.toml000064400000000000000000000000161046102023000203110ustar 00000000000000["valid key"] basic-toml-0.1.10/tests/valid/table-with-pound.json000064400000000000000000000001221046102023000202470ustar 00000000000000{ "key#group": { "answer": {"type": "integer", "value": "42"} } } basic-toml-0.1.10/tests/valid/table-with-pound.toml000064400000000000000000000000321046102023000202510ustar 00000000000000["key#group"] answer = 42 basic-toml-0.1.10/tests/valid/unicode-escape.json000064400000000000000000000005171046102023000177600ustar 00000000000000{ "answer1": {"type": "string", "value": "\u000B"}, "answer4": {"type": "string", "value": "\u03B4α"}, "answer8": {"type": "string", "value": "\u03B4β"}, "answer9": {"type": "string", "value": "\uc0de"}, "answer10": {"type": "string", "value": "\u03B4α"}, "answer11": {"type": "string", "value": "\uABC1"} } basic-toml-0.1.10/tests/valid/unicode-escape.toml000064400000000000000000000002021046102023000177510ustar 00000000000000answer1 = "\u000B" answer4 = "\u03B4α" answer8 = "\U000003B4β" answer9 = "\uc0de" answer10 = "\u03b4α" answer11 = "\U0000abc1" basic-toml-0.1.10/tests/valid/unicode-literal.json000064400000000000000000000000641046102023000201510ustar 00000000000000{ "answer": {"type": "string", "value": "δ"} } basic-toml-0.1.10/tests/valid/unicode-literal.toml000064400000000000000000000000161046102023000201500ustar 00000000000000answer = "δ" basic-toml-0.1.10/tests/valid.rs000064400000000000000000000224031046102023000145450ustar 00000000000000#![allow( clippy::match_like_matches_macro, clippy::needless_pass_by_value, clippy::uninlined_format_args )] use serde_json::{json, Value}; fn to_json(toml: Value) -> Value { fn doit(s: &str, json: Value) -> Value { json!({ "type": s, "value": json }) } match toml { Value::Null => unreachable!(), Value::String(s) => doit("string", Value::String(s)), Value::Number(n) => { let repr = n.to_string(); if repr.contains('.') { let float: f64 = repr.parse().unwrap(); let mut repr = format!("{:.15}", float); repr.truncate(repr.trim_end_matches('0').len()); if repr.ends_with('.') { repr.push('0'); } doit("float", Value::String(repr)) } else { doit("integer", Value::String(repr)) } } Value::Bool(b) => doit("bool", Value::String(format!("{}", b))), Value::Array(arr) => { let is_table = match arr.first() { Some(&Value::Object(_)) => true, _ => false, }; let json = Value::Array(arr.into_iter().map(to_json).collect()); if is_table { json } else { doit("array", json) } } Value::Object(table) => { let mut map = serde_json::Map::new(); for (k, v) in table { map.insert(k, to_json(v)); } Value::Object(map) } } } fn run(toml_raw: &str, json_raw: &str) { println!("parsing:\n{}", toml_raw); let toml: Value = basic_toml::from_str(toml_raw).unwrap(); let json: Value = serde_json::from_str(json_raw).unwrap(); // Assert toml == json let toml_json = to_json(toml.clone()); assert!( json == toml_json, "expected\n{}\ngot\n{}\n", serde_json::to_string_pretty(&json).unwrap(), serde_json::to_string_pretty(&toml_json).unwrap() ); // Assert round trip println!("round trip parse: {}", toml); let toml2: Value = basic_toml::from_str(&basic_toml::to_string(&toml).unwrap()).unwrap(); assert_eq!(toml, toml2); } macro_rules! test( ($name:ident, $toml:expr, $json:expr) => ( #[test] fn $name() { run($toml, $json); } ) ); test!( array_empty, include_str!("valid/array-empty.toml"), include_str!("valid/array-empty.json") ); test!( array_nospaces, include_str!("valid/array-nospaces.toml"), include_str!("valid/array-nospaces.json") ); test!( arrays_hetergeneous, include_str!("valid/arrays-hetergeneous.toml"), include_str!("valid/arrays-hetergeneous.json") ); #[cfg(any())] test!( arrays, include_str!("valid/arrays.toml"), include_str!("valid/arrays.json") ); test!( arrays_nested, include_str!("valid/arrays-nested.toml"), include_str!("valid/arrays-nested.json") ); test!( array_mixed_types_ints_and_floats, include_str!("valid/array-mixed-types-ints-and-floats.toml"), include_str!("valid/array-mixed-types-ints-and-floats.json") ); test!( array_mixed_types_arrays_and_ints, include_str!("valid/array-mixed-types-arrays-and-ints.toml"), include_str!("valid/array-mixed-types-arrays-and-ints.json") ); test!( array_mixed_types_strings_and_ints, include_str!("valid/array-mixed-types-strings-and-ints.toml"), include_str!("valid/array-mixed-types-strings-and-ints.json") ); test!( empty, include_str!("valid/empty.toml"), include_str!("valid/empty.json") ); test!( bool, include_str!("valid/bool.toml"), include_str!("valid/bool.json") ); test!( comments_everywhere, include_str!("valid/comments-everywhere.toml"), include_str!("valid/comments-everywhere.json") ); #[cfg(any())] test!( datetime, include_str!("valid/datetime.toml"), include_str!("valid/datetime.json") ); #[cfg(any())] test!( example, include_str!("valid/example.toml"), include_str!("valid/example.json") ); test!( float, include_str!("valid/float.toml"), include_str!("valid/float.json") ); #[cfg(any())] test!( implicit_and_explicit_after, include_str!("valid/implicit-and-explicit-after.toml"), include_str!("valid/implicit-and-explicit-after.json") ); #[cfg(any())] test!( implicit_and_explicit_before, include_str!("valid/implicit-and-explicit-before.toml"), include_str!("valid/implicit-and-explicit-before.json") ); test!( implicit_groups, include_str!("valid/implicit-groups.toml"), include_str!("valid/implicit-groups.json") ); test!( integer, include_str!("valid/integer.toml"), include_str!("valid/integer.json") ); test!( key_equals_nospace, include_str!("valid/key-equals-nospace.toml"), include_str!("valid/key-equals-nospace.json") ); test!( key_space, include_str!("valid/key-space.toml"), include_str!("valid/key-space.json") ); test!( key_special_chars, include_str!("valid/key-special-chars.toml"), include_str!("valid/key-special-chars.json") ); test!( key_with_pound, include_str!("valid/key-with-pound.toml"), include_str!("valid/key-with-pound.json") ); test!( key_empty, include_str!("valid/key-empty.toml"), include_str!("valid/key-empty.json") ); test!( long_float, include_str!("valid/long-float.toml"), include_str!("valid/long-float.json") ); test!( long_integer, include_str!("valid/long-integer.toml"), include_str!("valid/long-integer.json") ); test!( multiline_string, include_str!("valid/multiline-string.toml"), include_str!("valid/multiline-string.json") ); test!( raw_multiline_string, include_str!("valid/raw-multiline-string.toml"), include_str!("valid/raw-multiline-string.json") ); test!( raw_string, include_str!("valid/raw-string.toml"), include_str!("valid/raw-string.json") ); test!( string_empty, include_str!("valid/string-empty.toml"), include_str!("valid/string-empty.json") ); test!( string_escapes, include_str!("valid/string-escapes.toml"), include_str!("valid/string-escapes.json") ); test!( string_simple, include_str!("valid/string-simple.toml"), include_str!("valid/string-simple.json") ); test!( string_with_pound, include_str!("valid/string-with-pound.toml"), include_str!("valid/string-with-pound.json") ); test!( table_array_implicit, include_str!("valid/table-array-implicit.toml"), include_str!("valid/table-array-implicit.json") ); test!( table_array_many, include_str!("valid/table-array-many.toml"), include_str!("valid/table-array-many.json") ); test!( table_array_nest, include_str!("valid/table-array-nest.toml"), include_str!("valid/table-array-nest.json") ); test!( table_array_one, include_str!("valid/table-array-one.toml"), include_str!("valid/table-array-one.json") ); test!( table_empty, include_str!("valid/table-empty.toml"), include_str!("valid/table-empty.json") ); test!( table_sub_empty, include_str!("valid/table-sub-empty.toml"), include_str!("valid/table-sub-empty.json") ); test!( table_multi_empty, include_str!("valid/table-multi-empty.toml"), include_str!("valid/table-multi-empty.json") ); test!( table_whitespace, include_str!("valid/table-whitespace.toml"), include_str!("valid/table-whitespace.json") ); test!( table_with_pound, include_str!("valid/table-with-pound.toml"), include_str!("valid/table-with-pound.json") ); test!( unicode_escape, include_str!("valid/unicode-escape.toml"), include_str!("valid/unicode-escape.json") ); test!( unicode_literal, include_str!("valid/unicode-literal.toml"), include_str!("valid/unicode-literal.json") ); #[cfg(any())] test!( hard_example, include_str!("valid/hard_example.toml"), include_str!("valid/hard_example.json") ); #[cfg(any())] test!( example2, include_str!("valid/example2.toml"), include_str!("valid/example2.json") ); #[cfg(any())] test!( example3, include_str!("valid/example-v0.3.0.toml"), include_str!("valid/example-v0.3.0.json") ); #[cfg(any())] test!( example4, include_str!("valid/example-v0.4.0.toml"), include_str!("valid/example-v0.4.0.json") ); #[cfg(any())] test!( example_bom, include_str!("valid/example-bom.toml"), include_str!("valid/example.json") ); #[cfg(any())] test!( datetime_truncate, include_str!("valid/datetime-truncate.toml"), include_str!("valid/datetime-truncate.json") ); test!( key_quote_newline, include_str!("valid/key-quote-newline.toml"), include_str!("valid/key-quote-newline.json") ); test!( table_array_nest_no_keys, include_str!("valid/table-array-nest-no-keys.toml"), include_str!("valid/table-array-nest-no-keys.json") ); test!( dotted_keys, include_str!("valid/dotted-keys.toml"), include_str!("valid/dotted-keys.json") ); test!( quote_surrounded_value, include_str!("valid/quote-surrounded-value.toml"), include_str!("valid/quote-surrounded-value.json") ); test!( float_exponent, include_str!("valid/float-exponent.toml"), include_str!("valid/float-exponent.json") ); test!( string_delim_end, include_str!("valid/string-delim-end.toml"), include_str!("valid/string-delim-end.json") );