gvdb-0.7.1/.cargo_vcs_info.json0000644000000001420000000000100117600ustar { "git": { "sha1": "7433aefc484eee2daca5a8035789618869a34e65" }, "path_in_vcs": "gvdb" }gvdb-0.7.1/Cargo.toml0000644000000040110000000000100077550ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.75" name = "gvdb" version = "0.7.1" exclude = ["test/c"] description = "Implementation of the glib gvdb file format" readme = "README.md" keywords = [ "gvdb", "glib", "gresource", "compile-resources", ] categories = [ "gui", "data-structures", "encoding", ] license = "MIT OR Apache-2.0" repository = "https://github.com/felinira/gvdb-rs" [package.metadata.docs.rs] all-features = true [dependencies.flate2] version = "1.0" optional = true [dependencies.glib] version = "0.20" optional = true [dependencies.memmap2] version = "0.9" optional = true [dependencies.quick-xml] version = "0.36" features = ["serialize"] optional = true [dependencies.serde] version = "1.0" features = ["derive"] [dependencies.serde_json] version = "1.0" optional = true [dependencies.walkdir] version = "2.3" optional = true [dependencies.zerocopy] version = "0.7" features = ["derive"] [dependencies.zvariant] version = "4.0" features = ["gvariant"] default-features = false [dev-dependencies.flate2] version = "1.0" features = ["zlib"] [dev-dependencies.glib] version = "0.20" [dev-dependencies.lazy_static] version = "1.4" [dev-dependencies.matches] version = "0.1" [dev-dependencies.pretty_assertions] version = "1.2" [dev-dependencies.rand] version = "0.8.5" [dev-dependencies.serde_json] version = "1.0" [dev-dependencies.tempfile] version = "3.10.1" [features] default = [] glib = ["dep:glib"] gresource = [ "dep:quick-xml", "dep:serde_json", "dep:flate2", "dep:walkdir", ] mmap = ["dep:memmap2"] gvdb-0.7.1/Cargo.toml.orig000064400000000000000000000023361046102023000134460ustar 00000000000000[package] name = "gvdb" version = "0.7.1" description = "Implementation of the glib gvdb file format" exclude = ["test/c"] edition.workspace = true rust-version.workspace = true license.workspace = true repository.workspace = true keywords.workspace = true categories.workspace = true [package.metadata.docs.rs] all-features = true [dependencies] zerocopy = { version = "0.7", features = ["derive"] } serde = { version = "1.0", features = ["derive"] } zvariant = { version = "4.0", default-features = false, features = [ "gvariant", ] } flate2 = { version = "1.0", optional = true } glib = { version = "0.20", optional = true } quick-xml = { version = "0.36", optional = true, features = ["serialize"] } memmap2 = { version = "0.9", optional = true } serde_json = { version = "1.0", optional = true } walkdir = { version = "2.3", optional = true } [dev-dependencies] # Use zlib for binary compatibility in tests flate2 = { version = "1.0", features = ["zlib"] } glib = "0.20" lazy_static = "1.4" matches = "0.1" pretty_assertions = "1.2" rand = "0.8.5" serde_json = "1.0" tempfile = "3.10.1" [features] mmap = ["dep:memmap2"] gresource = ["dep:quick-xml", "dep:serde_json", "dep:flate2", "dep:walkdir"] glib = ["dep:glib"] default = [] gvdb-0.7.1/LICENSES/Apache-2.0.txt000064400000000000000000000240501046102023000142000ustar 00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. gvdb-0.7.1/LICENSES/CC0-1.0.txt000064400000000000000000000156101046102023000133650ustar 00000000000000Creative Commons Legal Code CC0 1.0 Universal CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER. Statement of Purpose The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 1. Copyright and Related Rights. A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; ii. moral rights retained by the original author(s) and/or performer(s); iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; v. rights protecting the extraction, dissemination, use and reuse of data in a Work; vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 2. Waiver. To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 3. Public License Fallback. Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 4. Limitations and Disclaimers. a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. gvdb-0.7.1/LICENSES/MIT.txt000064400000000000000000000020661046102023000131560ustar 00000000000000MIT License Copyright (c) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gvdb-0.7.1/README.md000064400000000000000000000062271046102023000120410ustar 00000000000000# About this crate This is an implementation of the glib GVariant database file format in Rust. It includes a GResource XML parser and the ability to create compatible GResource files. [![Crates.io](https://img.shields.io/crates/v/gvdb)](https://crates.io/crates/gvdb) ## MSRV The minimum supported rust version of this crate is 1.75. ## Example ### Create a GResource file Create a GResource file from XML with `GResourceXMLDocument` and `GResourceBuilder`. Requires the `gresource` feature to be enabled. ```rust #[cfg(feature = "gresource")] mod gresource { use std::borrow::Cow; use std::path::PathBuf; use gvdb::gresource::BundleBuilder; use gvdb::gresource::XmlManifest; use gvdb::read::File; const GRESOURCE_XML: &str = "test-data/gresource/test3.gresource.xml"; fn create_gresource() { let doc = XmlManifest::from_file(&PathBuf::from(GRESOURCE_XML)).unwrap(); let builder = BundleBuilder::from_xml(doc).unwrap(); let data = builder.build().unwrap(); // To immediately read this data again, we can create a file reader from the data let root = File::from_bytes(Cow::Owned(data)).unwrap(); } } ``` Create a simple GVDB file with `FileWriter` ```rust use gvdb::write::{FileWriter, HashTableBuilder}; fn create_gvdb_file() { let mut file_writer = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); table_builder .insert_string("string", "test string") .unwrap(); let mut table_builder_2 = HashTableBuilder::new(); table_builder_2 .insert("int", 42u32) .unwrap(); table_builder .insert_table("table", table_builder_2) .unwrap(); let file_data = file_writer.write_to_vec_with_table(table_builder).unwrap(); } ``` ### Read a GVDB file The stored data at `/gvdb/rs/test/online-symbolic.svg` corresponds to the `(uuay)` GVariant type signature. ```rust use gvdb::read::File; use std::path::PathBuf; pub fn main() { let path = PathBuf::from("test-data/test3.gresource"); let file = File::from_file(&path).unwrap(); let table = file.hash_table().unwrap(); #[derive(serde::Deserialize, zvariant::Type)] struct GResourceData { size: u32, flags: u32, content: Vec, } let svg: GResourceData = table.get("/gvdb/rs/test/online-symbolic.svg").unwrap(); assert_eq!(svg.size, 1390); assert_eq!(svg.flags, 0); assert_eq!(svg.size as usize, svg.content.len() - 1); // Ensure the last byte is zero because of zero-padding defined in the format assert_eq!(svg.content[svg.content.len() - 1], 0); let svg_str = std::str::from_utf8(&svg.content[0..svg.content.len() - 1]).unwrap(); println!("{}", svg_str); } ``` ## License `gvdb` and `gvdb-macros` are available under the MIT OR Apache-2.0 license. See the [LICENSES](./LICENSES) folder for the complete license text. SVG icon files included in `test-data/gresource/icons/` are available under the CC0-1.0 license and redistributed from [Icon Development Kit](https://gitlab.gnome.org/Teams/Design/icon-development-kit). See [CC0-1.0.txt](./LICENSES/CC0-1.0.txt) and file for complete license text. gvdb-0.7.1/src/gresource/bundle/error.rs000064400000000000000000000115441046102023000163150ustar 00000000000000use std::fmt::{Debug, Display, Formatter}; use std::path::PathBuf; /// Error type for creating a GResource XML file #[non_exhaustive] pub enum BuilderError { /// An internal error occurred during creation of the GVDB file Gvdb(crate::write::Error), /// I/O error Io(std::io::Error, Option), /// This error can occur when using xml-stripblanks and the provided XML file is invalid Xml(quick_xml::Error, Option), /// A file needs to be interpreted as UTF-8 (for stripping whitespace etc.) but it is invalid Utf8(std::str::Utf8Error, Option), /// This error can occur when using json-stripblanks and the provided JSON file is invalid Json(serde_json::Error, Option), /// Error when canonicalizing a path from an absolute to a relative path StripPrefix(std::path::StripPrefixError, PathBuf), /// This feature is not implemented in gvdb-rs Unimplemented(String), } impl BuilderError { pub(crate) fn from_io_with_filename

( filename: Option

, ) -> impl FnOnce(std::io::Error) -> BuilderError where P: Into, { let path = filename.map(|p| p.into()); move |err| BuilderError::Io(err, path) } } impl std::error::Error for BuilderError {} impl From for BuilderError { fn from(err: crate::write::Error) -> Self { Self::Gvdb(err) } } impl Display for BuilderError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { BuilderError::Xml(err, path) => { if let Some(path) = path { write!( f, "Error processing XML data for file '{}': {}", path.display(), err ) } else { write!(f, "Error processing XML data: {}", err) } } BuilderError::Io(err, path) => { if let Some(path) = path { write!(f, "I/O error for file '{}': {}", path.display(), err) } else { write!(f, "I/O error: {}", err) } } BuilderError::Json(err, path) => { if let Some(path) = path { write!( f, "Error parsing JSON from file: '{}': {}", path.display(), err ) } else { write!(f, "Error reading/writing JSON data: {}", err) } } BuilderError::Utf8(err, path) => { if let Some(path) = path { write!( f, "Error converting file '{}' to UTF-8: {}", path.display(), err ) } else { write!(f, "Error converting data to UTF-8: {}", err) } } BuilderError::Unimplemented(err) => { write!(f, "{}", err) } BuilderError::Gvdb(err) => { write!(f, "Error while creating GVDB file: {:?}", err) } BuilderError::StripPrefix(err, path) => { write!( f, "Error when canonicalizing path '{:?}' from an absolute to a relative path: {}", path, err ) } } } } impl Debug for BuilderError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { Display::fmt(self, f) } } /// Result type for [`BuilderError`] pub type BuilderResult = std::result::Result; #[cfg(test)] mod test { use super::*; #[test] fn from() { let io_res = std::fs::File::open("test/invalid_file_name"); let err = BuilderError::Io(io_res.unwrap_err(), None); assert!(format!("{}", err).contains("I/O")); let io_res = std::fs::File::open("test/invalid_file_name"); let err = BuilderError::from_io_with_filename(Some("test"))(io_res.unwrap_err()); assert!(format!("{}", err).contains("test")); let writer_error = crate::write::Error::Consistency("test".to_string()); let err = BuilderError::from(writer_error); assert!(format!("{}", err).contains("test")); let err = BuilderError::Xml( quick_xml::Error::Syntax(quick_xml::errors::SyntaxError::UnclosedTag), Some(PathBuf::from("test_file")), ); assert!(format!("{}", err).contains("test_file")); let err = BuilderError::Xml( quick_xml::Error::Syntax(quick_xml::errors::SyntaxError::UnclosedTag), None, ); assert!(format!("{}", err).contains("XML")); } } gvdb-0.7.1/src/gresource/bundle.rs000064400000000000000000000655421046102023000151730ustar 00000000000000mod error; pub use error::*; use crate::gresource::xml::PreprocessOptions; use crate::write::{FileWriter, HashTableBuilder}; use flate2::write::ZlibEncoder; use std::borrow::Cow; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use walkdir::WalkDir; const FLAG_COMPRESSED: u32 = 1 << 0; static SKIPPED_FILE_EXTENSIONS_DEFAULT: &[&str] = &["meson.build", "gresource.xml", ".gitignore", ".license"]; static COMPRESS_EXTENSIONS_DEFAULT: &[&str] = &[".ui", ".css"]; /// A container for a GResource data object /// /// Allows to read a file from the filesystem. The file is then preprocessed and compressed. /// /// ``` /// # use std::path::PathBuf; /// use gvdb::gresource::{PreprocessOptions, FileData}; /// /// let mut key = "/my/app/id/icons/scalable/actions/send-symbolic.svg".to_string(); /// let mut filename = PathBuf::from("test-data/gresource/icons/scalable/actions/send-symbolic.svg"); /// /// let preprocess_options = PreprocessOptions::empty(); /// let file_data = /// FileData::from_file(key, &filename, true, &preprocess_options).unwrap(); /// ``` #[derive(Debug)] pub struct FileData<'a> { key: String, data: Cow<'a, [u8]>, flags: u32, /// uncompressed data is zero-terminated /// compressed data is not size: u32, } impl<'a> FileData<'a> { /// Create a new `GResourceFileData` from raw bytes /// /// The `path` parameter is used for error output, and should be set to a valid filesystem path /// if possible or `None` if not applicable. /// /// Preprocessing will be applied based on the `preprocess` parameter. /// Will compress the data if `compressed` is set. /// /// ``` /// # use std::borrow::Cow; /// use std::path::PathBuf; /// use gvdb::gresource::{FileData, PreprocessOptions}; /// /// let mut key = "/my/app/id/style.css".to_string(); /// let mut filename = PathBuf::from("path/to/style.css"); /// /// let preprocess_options = PreprocessOptions::empty(); /// let data: Vec = vec![1, 2, 3, 4]; /// let file_data = /// FileData::new(key, Cow::Owned(data), None, true, &preprocess_options).unwrap(); /// ``` pub fn new( key: String, data: Cow<'a, [u8]>, path: Option, compressed: bool, preprocess: &PreprocessOptions, ) -> BuilderResult { let mut flags = 0; let mut data = Self::preprocess(data, preprocess, path.clone())?; let size = data.len() as u32; if compressed { data = Self::compress(data, path)?; flags |= FLAG_COMPRESSED; } else { data.to_mut().push(0); } Ok(Self { key, data, flags, size, }) } /// Read the data from a file /// /// Preprocessing will be applied based on the `preprocess` parameter. /// Will compress the data if `compressed` is set. /// /// ``` /// # use std::path::PathBuf; /// use gvdb::gresource::{FileData, PreprocessOptions}; /// /// let mut key = "/my/app/id/icons/scalable/actions/send-symbolic.svg".to_string(); /// let mut filename = PathBuf::from("test-data/gresource/icons/scalable/actions/send-symbolic.svg"); /// /// let preprocess_options = PreprocessOptions::empty(); /// let file_data = /// FileData::from_file(key, &filename, true, &preprocess_options).unwrap(); /// ``` pub fn from_file( key: String, file_path: &Path, compressed: bool, preprocess: &PreprocessOptions, ) -> BuilderResult { let mut open_file = std::fs::File::open(file_path) .map_err(BuilderError::from_io_with_filename(Some(file_path)))?; let mut data = Vec::new(); open_file .read_to_end(&mut data) .map_err(BuilderError::from_io_with_filename(Some(file_path)))?; FileData::new( key, Cow::Owned(data), Some(file_path.to_path_buf()), compressed, preprocess, ) } fn xml_stripblanks(data: Cow<'a, [u8]>, path: Option) -> BuilderResult> { let output = Vec::new(); let mut reader = quick_xml::Reader::from_str( std::str::from_utf8(&data).map_err(|err| BuilderError::Utf8(err, path.clone()))?, ); reader.config_mut().trim_text(true); let mut writer = quick_xml::Writer::new(std::io::Cursor::new(output)); loop { match reader .read_event() .map_err(|err| BuilderError::Xml(err, path.clone()))? { quick_xml::events::Event::Eof => break, event => writer .write_event(event) .map_err(|err| BuilderError::Xml(err, path.clone()))?, } } Ok(Cow::Owned(writer.into_inner().into_inner())) } fn json_stripblanks( data: Cow<'a, [u8]>, path: Option, ) -> BuilderResult> { let string = std::str::from_utf8(&data).map_err(|err| BuilderError::Utf8(err, path.clone()))?; let json: serde_json::Value = serde_json::from_str(string).map_err(|err| BuilderError::Json(err, path.clone()))?; let mut output = json.to_string().as_bytes().to_vec(); output.push(b'\n'); Ok(Cow::Owned(output)) } fn preprocess( mut data: Cow<'a, [u8]>, options: &PreprocessOptions, path: Option, ) -> BuilderResult> { if options.xml_stripblanks { data = Self::xml_stripblanks(data, path.clone())?; } if options.json_stripblanks { data = Self::json_stripblanks(data, path)?; } if options.to_pixdata { return Err(BuilderError::Unimplemented( "to-pixdata is deprecated since gdk-pixbuf 2.32 and not supported by gvdb-rs" .to_string(), )); } Ok(data) } fn compress(data: Cow<'a, [u8]>, path: Option) -> BuilderResult> { let mut encoder = ZlibEncoder::new(Vec::new(), flate2::Compression::best()); encoder .write_all(&data) .map_err(BuilderError::from_io_with_filename(path.clone()))?; Ok(Cow::Owned( encoder .finish() .map_err(BuilderError::from_io_with_filename(path))?, )) } /// Return the `key` of this `FileData` pub fn key(&self) -> &str { &self.key } } /// We define equality as key equality only. The resulting file can only have one file for each key. impl<'a> std::cmp::PartialEq for FileData<'a> { fn eq(&self, other: &Self) -> bool { self.key == other.key } } impl<'a> std::cmp::Eq for FileData<'a> {} /// We define ordering as key ordering only. The resulting file can only have one file for each key. impl<'a> std::cmp::PartialOrd for FileData<'a> { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl<'a> std::cmp::Ord for FileData<'a> { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.key.cmp(&other.key) } } /// GResource data value /// /// This is the format in which all GResource files are stored in the GVDB file. /// /// The size is the *uncompressed* size and can be used for verification purposes. /// The flags only indicate whether a file is compressed or not. (Compressed = 1) #[derive(zvariant::Type, zvariant::Value, zvariant::OwnedValue)] pub struct Data { size: u32, flags: u32, data: Vec, } /// Create a GResource binary file /// /// # Example /// /// Create a GResource XML file with [`XmlManifest`][crate::gresource::XmlManifest] and /// [`BundleBuilder`] /// ``` /// use std::borrow::Cow; /// use std::path::PathBuf; /// use gvdb::gresource::BundleBuilder; /// use gvdb::gresource::XmlManifest; /// use gvdb::read::File; /// /// const GRESOURCE_XML: &str = "test/data/gresource/test3.gresource.xml"; /// /// fn create_gresource() { /// let doc = XmlManifest::from_file(&PathBuf::from(GRESOURCE_XML)).unwrap(); /// let builder = BundleBuilder::from_xml(doc).unwrap(); /// let data = builder.build().unwrap(); /// let root = File::from_bytes(Cow::Owned(data)).unwrap(); /// } /// ``` #[derive(Debug)] pub struct BundleBuilder<'a> { files: Vec>, } impl<'a> BundleBuilder<'a> { /// Create this builder from a GResource XML file pub fn from_xml(xml: super::xml::XmlManifest) -> BuilderResult { let mut files = Vec::new(); for gresource in &xml.gresources { for file in &gresource.files { let mut key = gresource.prefix.clone(); if !key.ends_with('/') { key.push('/'); } if let Some(alias) = &file.alias { key.push_str(alias); } else { key.push_str(&file.filename); } let mut filename = xml.dir.clone(); filename.push(PathBuf::from(&file.filename)); let file_data = FileData::from_file(key, &filename, file.compressed, &file.preprocess)?; files.push(file_data); } } Ok(Self { files }) } /// Scan a directory and create a GResource file with all the contents of the directory. /// /// This will ignore any files that end with gresource.xml and meson.build, as /// those are most likely not needed inside the GResource. /// /// This is equivalent to the following XML: /// /// ```xml /// /// /// /// /// /// ``` /// /// ## `prefix` /// /// The prefix for the gresource section /// /// ## `directory` /// /// The root directory of the included files /// /// ## `strip_blanks` /// /// Acts as if every xml file uses the option `xml-stripblanks` in the GResource XML and every /// JSON file uses `json-stripblanks`. /// /// JSON files are all files with the extension '.json'. /// XML files are all files with the extensions '.xml', '.ui', '.svg' /// /// ## `compress` /// /// Compresses all files that end with the preconfigured patterns. /// Compressed files are currently: ".ui", ".css" pub fn from_directory( prefix: &str, directory: &Path, strip_blanks: bool, compress: bool, ) -> BuilderResult { let compress_extensions = if compress { COMPRESS_EXTENSIONS_DEFAULT } else { &[] }; Self::from_directory_with_extensions( prefix, directory, strip_blanks, compress_extensions, SKIPPED_FILE_EXTENSIONS_DEFAULT, ) } /// Like `from_directory` but allows you to specify the extensions directories yourself /// /// ## `compress_extensions` /// /// All files that end with these strings will get compressed /// /// ## `skipped_file_extensions` /// /// Skip all files that end with this string pub fn from_directory_with_extensions( prefix: &str, directory: &Path, strip_blanks: bool, compress_extensions: &[&str], skipped_file_extensions: &[&str], ) -> BuilderResult { let mut prefix = prefix.to_string(); if !prefix.ends_with('/') { prefix.push('/'); } let mut files = Vec::new(); 'outer: for res in WalkDir::new(directory).into_iter() { let entry = match res { Ok(entry) => entry, Err(err) => { let path = err.path().map(|p| p.to_path_buf()); Err(BuilderError::Io(err.into(), path))? } }; if entry.path().is_file() { let filename: &str = match entry.file_name().try_into() { Ok(name) => name, Err(err) => return Err(BuilderError::Utf8(err, Some(entry.path().to_owned()))), }; for name in skipped_file_extensions { if filename.ends_with(name) { continue 'outer; } } let mut compress_this = false; for name in compress_extensions { if filename.ends_with(name) { compress_this = true; break; } } let file_abs_path = entry.path(); let file_path_relative = match file_abs_path.strip_prefix(directory) { Ok(path) => path, Err(err) => { return Err(BuilderError::StripPrefix(err, file_abs_path.to_owned())) } }; let file_path_str_relative: &str = match file_path_relative.as_os_str().try_into() { Ok(name) => name, Err(err) => { return Err(BuilderError::Utf8(err, Some(file_path_relative.to_owned()))) } }; let options = if strip_blanks && file_path_str_relative.ends_with(".json") { PreprocessOptions::json_stripblanks() } else if strip_blanks && file_path_str_relative.ends_with(".xml") || file_path_str_relative.ends_with(".ui") || file_path_str_relative.ends_with(".svg") { PreprocessOptions::xml_stripblanks() } else { PreprocessOptions::empty() }; let key = format!("{}{}", prefix, file_path_str_relative); let file_data = FileData::from_file(key, file_abs_path, compress_this, &options)?; files.push(file_data); } } // Make sure the files are sorted in a reproducible way to ensure reproducible builds files.sort(); Ok(Self { files }) } /// Create a new Builder from a `Vec`. /// /// This is the most flexible way to create a GResource file, but also the most hands-on. pub fn from_file_data(files: Vec>) -> Self { Self { files } } /// Build the binary GResource data pub fn build(self) -> BuilderResult> { let builder = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); for file_data in self.files.into_iter() { let data = Data { size: file_data.size, flags: file_data.flags, data: file_data.data.to_vec(), }; table_builder.insert_value(file_data.key(), zvariant::Value::from(data))?; } Ok(builder.write_to_vec_with_table(table_builder)?) } } #[cfg(test)] mod test { use super::*; use crate::gresource::xml::XmlManifest; use crate::read::File; use crate::test::{assert_is_file_3, byte_compare_file_3, GRESOURCE_DIR, GRESOURCE_XML}; use matches::assert_matches; use std::ffi::OsStr; use zvariant::Type; #[test] fn file_data() { let doc = XmlManifest::from_file(&GRESOURCE_XML).unwrap(); let builder = BundleBuilder::from_xml(doc).unwrap(); for file in &builder.files { assert!(file.key().starts_with("/gvdb/rs/test")); assert!( [ "/gvdb/rs/test/online-symbolic.svg", "/gvdb/rs/test/icons/scalable/actions/send-symbolic.svg", "/gvdb/rs/test/json/test.json", "/gvdb/rs/test/test.css" ] .contains(&file.key()), "Unknown file with key: {}", file.key() ); // Make sure the Eq implementation works as expected for file2 in &builder.files { if std::ptr::eq(file as *const FileData, file2 as *const FileData) { assert_eq!(file, file2); } else { assert_ne!(file, file2); } } } } #[test] fn from_dir_file_data() { for preprocess in [true, false] { let builder = BundleBuilder::from_directory( "/gvdb/rs/test", &GRESOURCE_DIR, preprocess, preprocess, ) .unwrap(); for file in builder.files { assert!(file.key().starts_with("/gvdb/rs/test")); assert!( [ "/gvdb/rs/test/icons/scalable/actions/online-symbolic.svg", "/gvdb/rs/test/icons/scalable/actions/send-symbolic.svg", "/gvdb/rs/test/json/test.json", "/gvdb/rs/test/test.css", "/gvdb/rs/test/test3.gresource.xml" ] .contains(&file.key()), "Unknown file with key: {}", file.key() ); } } } #[test] fn from_dir_invalid() { let res = BundleBuilder::from_directory( "/gvdb/rs/test", &PathBuf::from("INVALID_DIR"), false, false, ); assert!(res.is_err()); let err = res.unwrap_err(); assert_matches!(err, BuilderError::Io(..)); } #[test] fn test_file_3() { let doc = XmlManifest::from_file(&GRESOURCE_XML).unwrap(); let builder = BundleBuilder::from_xml(doc).unwrap(); let data = builder.build().unwrap(); let root = File::from_bytes(Cow::Owned(data)).unwrap(); assert_is_file_3(&root); byte_compare_file_3(&root); } #[test] fn test_file_from_dir() { let builder = BundleBuilder::from_directory("/gvdb/rs/test", &GRESOURCE_DIR, true, true).unwrap(); let data = builder.build().unwrap(); let root = File::from_bytes(Cow::Owned(data)).unwrap(); let table = root.hash_table().unwrap(); let mut names = table.keys().collect::, _>>().unwrap(); names.sort(); let reference_names = vec![ "/", "/gvdb/", "/gvdb/rs/", "/gvdb/rs/test/", "/gvdb/rs/test/icons/", "/gvdb/rs/test/icons/scalable/", "/gvdb/rs/test/icons/scalable/actions/", "/gvdb/rs/test/icons/scalable/actions/online-symbolic.svg", "/gvdb/rs/test/icons/scalable/actions/send-symbolic.svg", "/gvdb/rs/test/json/", "/gvdb/rs/test/json/test.json", "/gvdb/rs/test/test.css", ]; assert_eq!(names, reference_names); let svg2 = zvariant::Structure::try_from( table .get_value("/gvdb/rs/test/icons/scalable/actions/send-symbolic.svg") .unwrap(), ) .unwrap() .into_fields(); let svg2_size = u32::try_from(&svg2[0]).unwrap(); let svg2_flags = u32::try_from(&svg2[1]).unwrap(); let svg2_data = >::try_from(svg2[2].try_clone().unwrap()).unwrap(); assert_eq!(svg2_size, 339); assert_eq!(svg2_flags, 0); // Check for null byte assert_eq!(svg2_data[svg2_data.len() - 1], 0); assert_eq!(svg2_size as usize, svg2_data.len() - 1); } #[test] /// Make sure from_dir reproducibly creates an identical file fn test_from_dir_reproducible_build() { let mut last_data = None; use rand::prelude::*; fn copy_random_order(from: &Path, to: &Path) { let mut rng = rand::thread_rng(); let mut files: Vec = std::fs::read_dir(from) .unwrap() .map(|d| d.unwrap()) .collect(); files.shuffle(&mut rng); for entry in files.iter() { let destination = to.join(entry.file_name()); println!("copy file: {:?} to: {:?}", entry, destination); let file_type = entry.file_type().unwrap(); if file_type.is_file() { std::fs::copy(entry.path(), &destination).unwrap(); } else if file_type.is_dir() { std::fs::create_dir(&destination).unwrap(); copy_random_order(&entry.path(), &destination); } } } for _ in 0..10 { // Create a new directory with inodes in random order let test_dir = tempfile::tempdir().unwrap(); // Randomize order of root files and copy to test dir copy_random_order(&GRESOURCE_DIR, test_dir.path()); let builder = BundleBuilder::from_directory("/gvdb/rs/test", test_dir.path(), true, true) .unwrap(); let data = builder.build().unwrap(); if let Some(last_data) = last_data { assert_eq!(last_data, data); } last_data = Some(data); } } #[test] #[cfg(unix)] fn test_from_dir_invalid() { use std::os::unix::ffi::OsStrExt; let invalid_utf8 = OsStr::from_bytes(&[0xC3, 0x28]); let mut dir: PathBuf = ["test-data", "temp2"].iter().collect(); dir.push(invalid_utf8); std::fs::create_dir_all(&dir).unwrap(); std::fs::File::create(dir.join("test.xml")).unwrap(); let res = BundleBuilder::from_directory("test", dir.parent().unwrap(), false, false); let _ = std::fs::remove_file(dir.join("test.xml")); let _ = std::fs::remove_dir(&dir); std::fs::remove_dir(dir.parent().unwrap()).unwrap(); let err = res.unwrap_err(); println!("{}", err); assert_matches!(err, BuilderError::Utf8(_, _)); assert!(format!("{}", err).contains("UTF-8")); } #[test] fn test_invalid_utf8_json() { use std::os::unix::ffi::OsStrExt; let invalid_utf8 = OsStr::from_bytes(&[0xC3, 0x28]); let dir: PathBuf = ["test-data", "temp3"].iter().collect(); std::fs::create_dir_all(&dir).unwrap(); let mut file = std::fs::File::create(dir.join("test.json")).unwrap(); let _ = file.write(invalid_utf8.as_bytes()); let res = BundleBuilder::from_directory("test", &dir, true, true); let _ = std::fs::remove_file(dir.join("test.json")); let _ = std::fs::remove_dir(&dir); let err = res.unwrap_err(); println!("{}", err); assert_matches!(err, BuilderError::Utf8(..)); assert!(format!("{}", err).contains("UTF-8")); } #[test] fn test_from_file_data() { let path = GRESOURCE_DIR.join("json").join("test.json"); let file_data = FileData::from_file( "test.json".to_string(), &path, false, &PreprocessOptions::empty(), ) .unwrap(); println!("{:?}", file_data); let builder = BundleBuilder::from_file_data(vec![file_data]); println!("{:?}", builder); let _ = builder.build().unwrap(); } #[test] fn to_pixdata() { let path = GRESOURCE_DIR.join("json").join("test.json"); let mut options = PreprocessOptions::empty(); options.to_pixdata = true; let err = FileData::from_file("test.json".to_string(), &path, false, &options).unwrap_err(); assert_matches!(err, BuilderError::Unimplemented(_)); assert!(format!("{}", err).contains("to-pixdata is deprecated")); } #[test] fn xml_stripblanks() { for path in [Some(PathBuf::from("test")), None] { let xml = "), /// Generic I/O error occurred when handling XML file Io(std::io::Error, Option), /// A file needs to be interpreted as UTF-8 (for stripping whitespace etc.) but it is invalid Utf8(std::str::Utf8Error, Option), } impl XmlManifestError { pub(crate) fn from_io_with_filename( filename: &std::path::Path, ) -> impl FnOnce(std::io::Error) -> XmlManifestError { let path = filename.to_path_buf(); move |err| XmlManifestError::Io(err, Some(path)) } } impl std::error::Error for XmlManifestError {} impl std::fmt::Display for XmlManifestError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { XmlManifestError::Serde(err, path) => { if let Some(path) = path { write!(f, "Error parsing XML file '{}': {}", path.display(), err) } else { write!(f, "Error parsing XML file: {}", err) } } XmlManifestError::Io(err, path) => { if let Some(path) = path { write!(f, "I/O error for file '{}': {}", path.display(), err) } else { write!(f, "I/O error: {}", err) } } XmlManifestError::Utf8(err, path) => { if let Some(path) = path { write!( f, "Error converting file '{}' to UTF-8: {}", path.display(), err ) } else { write!(f, "Error converting data to UTF-8: {}", err) } } } } } impl std::fmt::Debug for XmlManifestError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self, f) } } /// Result type for XmlManifestError pub type XmlManifestResult = std::result::Result; #[cfg(test)] mod test { use super::*; #[test] fn from() { let io_res = std::fs::File::open("test/invalid_file_name"); let err = XmlManifestError::Io(io_res.unwrap_err(), None); assert!(format!("{}", err).contains("I/O")); } } gvdb-0.7.1/src/gresource/xml.rs000064400000000000000000000245771046102023000145250ustar 00000000000000mod error; pub use error::*; use serde::de::Error; use serde::Deserialize; use std::borrow::Cow; use std::io::Read; use std::path::{Path, PathBuf}; /// A GResource XML document #[derive(Debug, Deserialize, PartialEq, Eq)] #[serde(deny_unknown_fields)] #[non_exhaustive] pub struct XmlManifest { /// The list of GResource sections #[serde(rename = "gresource")] pub gresources: Vec, /// The directory of the XML file #[serde(default)] pub dir: PathBuf, } /// A GResource section inside a GResource XML document #[derive(Debug, Deserialize, PartialEq, Eq)] #[serde(deny_unknown_fields)] #[non_exhaustive] pub struct GResource { /// The files for this GResource section #[serde(rename = "file", default)] pub files: Vec, /// An optional prefix to prepend to the containing file keys #[serde(default, rename = "@prefix")] pub prefix: String, } /// A file within a GResource section #[derive(Debug, Deserialize, PartialEq, Eq)] #[serde(deny_unknown_fields)] #[non_exhaustive] pub struct File { /// The on-disk file name of the file #[serde(rename = "$value")] pub filename: String, /// The alias for this file if it should be named differently inside the GResource file #[serde(rename = "@alias")] pub alias: Option, /// Whether the file should be compressed using zlib #[serde(deserialize_with = "parse_bool_value", default, rename = "@compressed")] pub compressed: bool, /// A list of preprocessing options #[serde( deserialize_with = "parse_preprocess_options", default, rename = "@preprocess" )] pub preprocess: PreprocessOptions, } /// Preprocessing options for files that will be put in a GResource #[derive(Debug, Default, PartialEq, Eq)] #[non_exhaustive] pub struct PreprocessOptions { /// Strip whitespace from XML file pub xml_stripblanks: bool, /// Unimplemented pub to_pixdata: bool, /// Strip whitespace from JSON file pub json_stripblanks: bool, } impl PreprocessOptions { /// An empty set of preprocessing options /// /// No preprocessing will be done pub fn empty() -> Self { Self { xml_stripblanks: false, to_pixdata: false, json_stripblanks: false, } } /// XML strip blanks preprocessing will be applied pub fn xml_stripblanks() -> Self { Self { xml_stripblanks: true, to_pixdata: false, json_stripblanks: false, } } /// JSON strip blanks preprocessing will be applied pub fn json_stripblanks() -> Self { Self { xml_stripblanks: false, to_pixdata: false, json_stripblanks: true, } } } fn parse_bool_value<'de, D>(d: D) -> Result where D: serde::Deserializer<'de>, { match &*String::deserialize(d)? { "true" | "t" | "yes" | "y" | "1" => Ok(true), "false" | "f" | "no" | "n" | "0" => Ok(false), other => Err(D::Error::custom(format!("got '{}', but expected any of 'true', 't', 'yes', 'y', '1' / 'false', 'f', 'no', 'n', '0'", other))), } } fn parse_preprocess_options<'de, D>(d: D) -> Result where D: serde::Deserializer<'de>, { let mut this = PreprocessOptions::default(); for item in String::deserialize(d)?.split(',') { match item { "json-stripblanks" => this.json_stripblanks = true, "xml-stripblanks" => this.xml_stripblanks = true, "to-pixdata" => this.to_pixdata = true, other => { return Err(D::Error::custom(format!( "got '{}' but expected any of 'json-stripblanks', 'xml-stripblanks'", other ))) } } } Ok(this) } impl XmlManifest { /// Load a GResource XML file from disk using `path` pub fn from_file(path: &Path) -> error::XmlManifestResult { let mut file = std::fs::File::open(path) .map_err(error::XmlManifestError::from_io_with_filename(path))?; let mut data = Vec::with_capacity( file.metadata() .map_err(error::XmlManifestError::from_io_with_filename(path))? .len() as usize, ); file.read_to_end(&mut data) .map_err(error::XmlManifestError::from_io_with_filename(path))?; let dir = path.parent().unwrap(); Self::from_bytes_with_filename(dir, Some(path.to_path_buf()), Cow::Owned(data)) } /// Load a GResource XML file from the provided `Cow<[u8]>` bytes. A filename is provided for /// error context fn from_bytes_with_filename( dir: &Path, filename: Option, data: Cow<'_, [u8]>, ) -> error::XmlManifestResult { let mut this: Self = quick_xml::de::from_str( std::str::from_utf8(&data) .map_err(|err| error::XmlManifestError::Utf8(err, filename.clone()))?, ) .map_err(|err| error::XmlManifestError::Serde(err, filename))?; this.dir = dir.to_path_buf(); Ok(this) } /// Load a GResource XML file from the provided `Cow<[u8]>` bytes pub fn from_bytes(dir: &Path, data: Cow<'_, [u8]>) -> error::XmlManifestResult { Self::from_bytes_with_filename(dir, None, data) } /// Load a GResource XML file from a `&str` or `String` pub fn from_string(dir: &Path, str: impl ToString) -> error::XmlManifestResult { Self::from_bytes(dir, Cow::Borrowed(str.to_string().as_bytes())) } } #[cfg(test)] mod test { use super::*; use matches::assert_matches; use pretty_assertions::assert_eq; #[test] fn deserialize_simple() { let test_path = PathBuf::from("/TEST"); let data = r#"test"#; let doc = XmlManifest::from_bytes(&test_path, Cow::Borrowed(data.as_bytes())).unwrap(); println!("{:?}", doc); assert_eq!(doc, doc); assert_eq!(doc.gresources.len(), 1); assert_eq!(doc.gresources[0].files.len(), 1); assert_eq!(doc.gresources[0].files[0].filename, "test"); assert_eq!(doc.gresources[0].files[0].preprocess.xml_stripblanks, true); assert_eq!( doc.gresources[0].files[0].preprocess.json_stripblanks, false ); assert_eq!(doc.gresources[0].files[0].preprocess.to_pixdata, false); assert_eq!(doc.gresources[0].files[0].compressed, false); } #[test] fn deserialize_complex() { let test_path = PathBuf::from("/TEST"); let data = r#"test.json"#; let doc = XmlManifest::from_bytes(&test_path, Cow::Borrowed(data.as_bytes())).unwrap(); assert_eq!(doc.gresources.len(), 1); assert_eq!(doc.gresources[0].files.len(), 1); assert_eq!(doc.gresources[0].files[0].filename, "test.json"); assert_eq!(doc.gresources[0].files[0].compressed, true); assert_eq!(doc.gresources[0].files[0].preprocess.json_stripblanks, true); assert_eq!(doc.gresources[0].files[0].preprocess.to_pixdata, true); assert_eq!(doc.gresources[0].files[0].preprocess.xml_stripblanks, false); assert_eq!(doc.gresources[0].prefix, "/bla/blub") } #[test] fn deserialize_fail() { let test_path = PathBuf::from("/TEST"); let res = XmlManifest::from_string(&test_path, r#""#); assert!(format!("{:?}", res).contains("parsing XML")); assert_matches!( res, Err(error::XmlManifestError::Serde(quick_xml::DeError::Custom(field), _)) if field == "missing field `gresource`" ); let string = r#""#.to_string(); let res = XmlManifest::from_bytes_with_filename( &test_path, Some(PathBuf::from("test_filename")), Cow::Borrowed(string.as_bytes()), ); assert!(format!("{:?}", res).contains("test_filename")); assert_matches!( res, Err(error::XmlManifestError::Serde(quick_xml::de::DeError::Custom(field), _)) if field == "missing field `$value`" ); assert_matches!( XmlManifest::from_string(&test_path, r#"filename"#), Err(error::XmlManifestError::Serde(quick_xml::de::DeError::Custom(field), _)) if field.starts_with("got 'nobool', but expected any of") ); assert_matches!( XmlManifest::from_string(&test_path, r#""#), Err(error::XmlManifestError::Serde(quick_xml::de::DeError::Custom(field), _))if field.starts_with("unknown field `wrong`, expected `gresource`") ); assert_matches!( XmlManifest::from_string(&test_path, r#"filename"#), Err(error::XmlManifestError::Serde(quick_xml::de::DeError::Custom(field), _)) if field.starts_with("unknown field `wrong`, expected `file` or `@prefix`") ); assert_matches!( XmlManifest::from_string(&test_path, r#"filename"#), Err(error::XmlManifestError::Serde(quick_xml::de::DeError::Custom(field), _)) if field.starts_with("unknown field `@wrong`, expected one of") ); assert_matches!( XmlManifest::from_string(&test_path, r#"filename"#), Err(error::XmlManifestError::Serde(quick_xml::de::DeError::Custom(field), _)) if field.starts_with("got 'fail' but expected any of") ); let res = XmlManifest::from_bytes(&test_path, Cow::Borrowed(&[0x80, 0x81])).unwrap_err(); println!("{}", res); assert_matches!(res, error::XmlManifestError::Utf8(..)); } #[test] fn io_error() { let test_path = PathBuf::from("invalid_file_name.xml"); let res = XmlManifest::from_file(&test_path); assert_matches!(res, Err(error::XmlManifestError::Io(_, _))); assert!(format!("{:?}", res).contains("invalid_file_name.xml")); } } gvdb-0.7.1/src/gresource.rs000064400000000000000000000032521046102023000137100ustar 00000000000000mod bundle; mod xml; pub use bundle::{BuilderError, BuilderResult, BundleBuilder, FileData}; pub use xml::{PreprocessOptions, XmlManifest, XmlManifestError, XmlManifestResult}; /// Deprecated type aliases mod deprecated { use super::*; /// Type has been renamed. Use [`BundleBuilder`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::BundleBuilder instead."] pub type GResourceBuilder<'a> = BundleBuilder<'a>; /// Type has been renamed. Use [`FileData`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::FileData instead."] pub type GResourceFileData<'a> = FileData<'a>; /// DType has been renamed. Use [`XmlManifest`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::XmlManifest instead."] pub type GResourceXMLDocument = XmlManifest; /// Type has been renamed. Use [`BuilderError`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::BuilderError instead."] pub type GResourceBuilderError = BuilderError; /// Type has been renamed. Use [`BuilderResult`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::BuilderResult instead."] pub type GResourceBuilderResult = BuilderResult; /// Type has been renamed. Use [`XmlManifestError`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::XmlManifestError instead."] pub type GResourceXMLError = XmlManifestError; /// Type has been renamed. Use [`XmlManifestResult`] instead. #[deprecated = "Type has been renamed. Use gvdb::gresource::XmlManifestResult instead."] pub type GResourceXMLResult = XmlManifestResult; } pub use deprecated::*; gvdb-0.7.1/src/lib.rs000064400000000000000000000060421046102023000124600ustar 00000000000000//! # Read and write GVDB files //! //! This crate allows you to read and write GVDB (GLib GVariant database) files. //! It can also parse GResource XML files and create the corresponding GResource binary //! //! ## Examples //! //! Load a GResource file from disk with [`File`](crate::read::File) //! //! ``` //! use std::path::PathBuf; //! use gvdb::read::File; //! //! pub fn read_gresource_file() { //! let path = PathBuf::from("test-data/test3.gresource"); //! let file = File::from_file(&path).unwrap(); //! let table = file.hash_table().unwrap(); //! //! #[derive(serde::Deserialize, zvariant::Type)] //! struct SvgData { //! size: u32, //! flags: u32, //! content: Vec //! } //! //! let svg: SvgData = table //! .get("/gvdb/rs/test/online-symbolic.svg") //! .unwrap(); //! let svg_str = std::str::from_utf8(&svg.content).unwrap(); //! //! println!("{}", svg_str); //! } //! ``` //! //! Create a simple GVDB file with [`FileWriter`](crate::write::FileWriter) //! //! ``` //! use gvdb::write::{FileWriter, HashTableBuilder}; //! //! fn create_gvdb_file() { //! let mut file_writer = FileWriter::new(); //! let mut table_builder = HashTableBuilder::new(); //! table_builder //! .insert_string("string", "test string") //! .unwrap(); //! //! let mut table_builder_2 = HashTableBuilder::new(); //! table_builder_2 //! .insert("int", 42u32) //! .unwrap(); //! //! table_builder //! .insert_table("table", table_builder_2) //! .unwrap(); //! let file_data = file_writer.write_to_vec_with_table(table_builder).unwrap(); //! } //! ``` //! //! ## Features //! //! By default, no features are enabled. //! //! ### `mmap` //! //! Use the memmap2 crate to read memory-mapped GVDB files. //! //! ### `glib` //! //! By default this crate uses the [glib](https://crates.io/crates/zvariant) crate to allow reading //! and writing `GVariant` data to the gvdb files. By enabling this feature you can pass GVariants //! directly from the glib crate as well. //! //! ### `gresource` //! //! To be able to compile GResource files, the `gresource` feature must be enabled. //! //! ## Macros //! //! The [gvdb-macros](https://crates.io/crates/gvdb-macros) crate provides useful macros for //! GResource file creation. #![warn(missing_docs)] #![allow(unknown_lints, clippy::assigning_clones)] #![doc = include_str!("../README.md")] extern crate core; /// Read GResource XML files and compile a GResource file /// /// Use [`XmlManifest`](crate::gresource::XmlManifest) for XML file reading and /// [`BundleBuilder`](crate::gresource::BundleBuilder) to create the GResource binary /// file #[cfg(feature = "gresource")] pub mod gresource; /// Read GVDB files from a file or from a byte slice /// /// See the documentation of [`File`](crate::read::File) to get started pub mod read; /// Create GVDB files /// /// See the documentation of [`FileWriter`](crate::write::FileWriter) to get started pub mod write; #[cfg(test)] pub(crate) mod test; mod util; gvdb-0.7.1/src/read/error.rs000064400000000000000000000116531046102023000137620ustar 00000000000000use std::fmt::{Display, Formatter}; use std::num::TryFromIntError; use std::path::{Path, PathBuf}; use std::str::Utf8Error; use std::string::FromUtf8Error; /// An error that can occur during GVDB file reading #[non_exhaustive] #[derive(Debug)] pub enum Error { /// Error converting a string to UTF-8 Utf8(Utf8Error), /// Generic I/O error. Path contains an optional filename if applicable Io(std::io::Error, Option), /// Tried to access an invalid data offset DataOffset, /// Tried to read unaligned data DataAlignment, /// Read invalid data with context information in the provided string Data(String), /// The item with the specified key does not exist in the hash table KeyNotFound(String), } impl Error { pub(crate) fn from_io_with_filename(filename: &Path) -> impl FnOnce(std::io::Error) -> Error { let path = filename.to_path_buf(); move |err| Error::Io(err, Some(path)) } } impl std::error::Error for Error {} impl From for Error { fn from(err: FromUtf8Error) -> Self { Self::Utf8(err.utf8_error()) } } impl From for Error { fn from(err: Utf8Error) -> Self { Self::Utf8(err) } } impl From for Error { fn from(err: zvariant::Error) -> Self { Self::Data(format!("Error deserializing value as gvariant: {err}")) } } impl From for Error { fn from(_err: TryFromIntError) -> Self { Self::DataOffset } } impl Display for Error { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Error::Utf8(err) => write!(f, "Error converting string to UTF-8: {}", err), Error::Io(err, path) => { if let Some(path) = path { write!( f, "I/O error while reading file '{}': {}", path.display(), err ) } else { write!(f, "I/O error: {}", err) } } Error::DataOffset => { write!(f, "Tried to access an invalid data offset. Most likely reason is a corrupted GVDB file") } Error::DataAlignment => { write!( f, "Tried to read unaligned data. Most likely reason is a corrupted GVDB file" ) } Error::Data(msg) => { write!( f, "A data inconsistency error occured while reading gvdb file: {}", msg ) } Error::KeyNotFound(key) => { write!(f, "The item with the key '{}' does not exist", key) } } } } /// The Result type for [`Error`] pub type Result = std::result::Result; #[cfg(test)] mod test { use crate::read::{Error, Header, Pointer}; use matches::assert_matches; use std::num::TryFromIntError; use zerocopy::{AsBytes, FromBytes}; #[test] fn from() { let io_res = std::fs::File::open("test/invalid_file_name"); let err = Error::Io(io_res.unwrap_err(), None); assert!(format!("{}", err).contains("I/O")); let utf8_err = String::from_utf8([0xC3, 0x28].to_vec()).unwrap_err(); let err = Error::from(utf8_err); assert!(format!("{}", err).contains("UTF-8")); let res: Result = u32::MAX.try_into(); let err = Error::from(res.unwrap_err()); assert_matches!(err, Error::DataOffset); assert!(format!("{}", err).contains("data offset")); let err = Error::Data("my data error".to_string()); assert!(format!("{}", err).contains("my data error")); let err = Error::KeyNotFound("test".to_string()); assert!(format!("{}", err).contains("test")); let err = Error::from(zvariant::Error::Message("test".to_string())); assert!(format!("{}", err).contains("test")); let to_transmute = Header::new(false, 0, Pointer::NULL); let mut bytes = to_transmute.as_bytes().to_vec(); bytes.extend_from_slice(b"fail"); let res = Header::ref_from(&bytes); assert_eq!(res, None); // unexpected trailing bytes let to_transmute = Header::new(false, 0, Pointer::NULL); let mut bytes = to_transmute.as_bytes().to_vec(); bytes.remove(bytes.len() - 1); let res = Header::ref_from(&bytes); assert_eq!(res, None); //Missing 1 byte let to_transmute = Header::new(false, 0, Pointer::NULL); let mut bytes = b"unalign".to_vec(); bytes.extend_from_slice(to_transmute.as_bytes()); let res = Header::ref_from(&bytes[7..]); assert_eq!(res, None); // Unaligned let bytes = vec![0u8; 5]; let res = Header::slice_from(&bytes); assert_eq!(res, None); // Invalid size } } gvdb-0.7.1/src/read/file.rs000064400000000000000000000421411046102023000135440ustar 00000000000000use crate::read::error::{Error, Result}; use crate::read::header::Header; use crate::read::pointer::Pointer; use crate::read::HashTable; use std::borrow::Cow; use std::io::Read; use std::path::Path; #[derive(Debug)] pub(crate) enum Data<'a> { Cow(Cow<'a, [u8]>), #[cfg(feature = "mmap")] Mmap(memmap2::Mmap), } impl AsRef<[u8]> for Data<'_> { fn as_ref(&self) -> &[u8] { match self { Data::Cow(cow) => cow.as_ref(), #[cfg(feature = "mmap")] Data::Mmap(mmap) => mmap.as_ref(), } } } impl<'a> Data<'a> { /// Dereference a pointer pub fn dereference(&'a self, pointer: &Pointer, alignment: u32) -> Result<&'a [u8]> { let start: usize = pointer.start() as usize; let end: usize = pointer.end() as usize; let alignment: usize = alignment as usize; if start > end { Err(Error::DataOffset) } else if start & (alignment - 1) != 0 { Err(Error::DataAlignment) } else { self.as_ref().get(start..end).ok_or(Error::DataOffset) } } } /// The root of a GVDB file /// /// # Examples /// /// Load a GResource file from disk /// /// ``` /// use std::path::PathBuf; /// use serde::Deserialize; /// use gvdb::read::File; /// /// let path = PathBuf::from("test-data/test3.gresource"); /// let file = File::from_file(&path).unwrap(); /// let table = file.hash_table().unwrap(); /// /// #[derive(serde::Deserialize, zvariant::Type)] /// struct SvgData { /// size: u32, /// flags: u32, /// content: Vec /// } /// /// let svg: SvgData = table /// .get("/gvdb/rs/test/online-symbolic.svg") /// .unwrap(); /// let svg_str = std::str::from_utf8(&svg.content).unwrap(); /// /// println!("{}", svg_str); /// ``` /// /// Query the root hash table /// /// ``` /// use gvdb::read::File; /// # use matches::assert_matches; /// /// fn query_hash_table(file: File) { /// let table = file.hash_table().unwrap(); /// /// let mut keys = table.keys(); /// assert_eq!(keys.len(), 2); /// assert_matches!(keys.next().unwrap().as_deref(), Ok("string")); /// assert_matches!(keys.next().unwrap().as_deref(), Ok("table")); /// /// let str_value: String = table.get("string").unwrap(); /// assert_eq!(str_value, "test string"); /// /// let mut items = table.values().collect::, _>>().unwrap(); /// assert_eq!(items.len(), 2); /// assert_eq!(String::try_from(&items[0]).unwrap(), "test string"); /// /// let sub_table = table.get_hash_table("table").unwrap(); /// let mut sub_table_keys = sub_table.keys().collect::, _>>().unwrap(); /// assert_eq!(sub_table_keys.len(), 1); /// assert_eq!(sub_table_keys[0], "int"); /// /// let int_value: u32 = sub_table.get("int").unwrap(); /// assert_eq!(int_value, 42); /// } /// ``` pub struct File<'a> { pub(crate) data: Data<'a>, pub(crate) endianness: zvariant::Endian, pub(crate) header: Header, } impl<'a> File<'a> { /// Returns the root hash table of the file pub fn hash_table(&self) -> Result { let header = self.header; let root_ptr = header.root(); self.read_hash_table(root_ptr) } /// Dereference a pointer and try to read the underlying hash table pub(crate) fn read_hash_table(&self, pointer: &Pointer) -> Result { let data = self.data.dereference(pointer, 4)?; HashTable::for_bytes(data, self) } /// Dereference a pointer pub(crate) fn dereference(&self, pointer: &Pointer, alignment: u32) -> Result<&[u8]> { self.data.dereference(pointer, alignment) } fn from_data(data: Data<'a>) -> Result { let header = Header::try_from_bytes(data.as_ref())?; let byteswapped = header.is_byteswap()?; // Determine the zvariant endianness by comparing with target endianness let endianness = if cfg!(target_endian = "little") && !byteswapped || cfg!(target_endian = "big") && byteswapped { zvariant::LE } else { zvariant::BE }; Ok(Self { data, endianness, header, }) } /// Interpret a slice of bytes as a GVDB file pub fn from_bytes(bytes: Cow<'a, [u8]>) -> Result { Self::from_data(Data::Cow(bytes)) } /// Open a file and interpret the data as GVDB /// ``` /// let path = std::path::PathBuf::from("test-data/test3.gresource"); /// let file = gvdb::read::File::from_file(&path).unwrap(); /// ``` pub fn from_file(filename: &Path) -> Result { let mut file = std::fs::File::open(filename).map_err(Error::from_io_with_filename(filename))?; let mut data = Vec::with_capacity( file.metadata() .map_err(Error::from_io_with_filename(filename))? .len() as usize, ); file.read_to_end(&mut data) .map_err(Error::from_io_with_filename(filename))?; Self::from_bytes(Cow::Owned(data)) } /// Open a file and `mmap` it into memory. /// /// # Safety /// /// This is marked unsafe as the file could be modified on-disk while the mmap is active. /// This will cause undefined behavior. You must make sure to employ your own locking and to /// reload the file yourself when any modification occurs. #[cfg(feature = "mmap")] pub unsafe fn from_file_mmap(filename: &Path) -> Result { let file = std::fs::File::open(filename).map_err(Error::from_io_with_filename(filename))?; let mmap = memmap2::Mmap::map(&file).map_err(Error::from_io_with_filename(filename))?; Self::from_data(Data::Mmap(mmap)) } /// Determine the endianess to use for zvariant pub(crate) fn endianness(&self) -> zvariant::Endian { self.endianness } } impl std::fmt::Debug for File<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Ok(hash_table) = self.hash_table() { f.debug_struct("File") .field("endianness", &self.endianness) .field("header", &self.header) .field("hash_table", &hash_table) .finish() } else { f.debug_struct("File") .field("endianness", &self.endianness) .field("header", &self.header) .finish_non_exhaustive() } } } #[cfg(test)] mod test { use crate::read::file::File; use std::borrow::Cow; use std::mem::size_of; use std::path::PathBuf; use crate::read::{Error, HashItem, Header, Pointer}; use crate::test::*; use crate::write::{FileWriter, HashTableBuilder}; use matches::assert_matches; #[allow(unused_imports)] use pretty_assertions::{assert_eq, assert_ne, assert_str_eq}; use zerocopy::AsBytes; #[test] fn test_file_1() { let file = File::from_file(&TEST_FILE_1).unwrap(); assert_is_file_1(&file); } #[cfg(feature = "mmap")] #[test] fn test_file_1_mmap() { let file = unsafe { File::from_file_mmap(&TEST_FILE_1).unwrap() }; assert_is_file_1(&file); } #[test] fn test_file_2() { let file = File::from_file(&TEST_FILE_2).unwrap(); assert_is_file_2(&file); } #[test] fn test_file_3() { let file = File::from_file(&TEST_FILE_3).unwrap(); assert_is_file_3(&file); } #[test] fn invalid_header() { let header = Header::new_be(0, Pointer::new(0, 0)); let mut data = header.as_bytes().to_vec(); data[0] = 0; assert_matches!(File::from_bytes(Cow::Owned(data)), Err(Error::Data(_))); } #[test] fn invalid_version() { let header = Header::new_le(1, Pointer::new(0, 0)); let data = header.as_bytes().to_vec(); assert_matches!(File::from_bytes(Cow::Owned(data)), Err(Error::Data(_))); } #[test] fn file_does_not_exist() { let res = File::from_file(&PathBuf::from("this_file_does_not_exist")); assert_matches!(res, Err(Error::Io(_, _))); println!("{}", res.unwrap_err()); } #[cfg(feature = "mmap")] #[test] fn file_error_mmap() { unsafe { assert_matches!( File::from_file_mmap(&PathBuf::from("this_file_does_not_exist")), Err(Error::Io(_, _)) ); } } fn create_minimal_file() -> File<'static> { let header = Header::new_le(0, Pointer::new(0, 0)); let data = header.as_bytes().to_vec(); assert_bytes_eq( &data, &[ 71, 86, 97, 114, 105, 97, 110, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], "GVDB header", ); File::from_bytes(Cow::Owned(data)).unwrap() } #[test] fn test_minimal_file() { let file = create_minimal_file(); assert!(!format!("{file:?}").is_empty()); } #[test] fn broken_hash_table() { let writer = FileWriter::new(); let mut table = HashTableBuilder::new(); table.insert_string("test", "test").unwrap(); let mut data = writer.write_to_vec_with_table(table).unwrap(); // Remove data to see if this will throw an error data.remove(data.len() - 24); // We change the root pointer end to be shorter. Otherwise we will trigger // a data offset error when dereferencing. This is a bit hacky. // The root pointer end is always at position sizeof(u32 * 5). // As this is little endian, we can just modify the first byte. let root_ptr_end = size_of::() * 5; data[root_ptr_end] -= 25; let file = File::from_bytes(Cow::Owned(data)).unwrap(); let err = file.hash_table().unwrap_err(); assert_matches!(err, Error::Data(_)); assert!(format!("{}", err).contains("Not enough bytes to fit hash table")); } #[test] fn broken_hash_table2() { let writer = FileWriter::new(); let mut table = HashTableBuilder::new(); table.insert_string("test", "test").unwrap(); let mut data = writer.write_to_vec_with_table(table).unwrap(); // We change the root pointer end to be shorter. // The root pointer end is always at position sizeof(u32 * 5). // As this is little endian, we can just modify the first byte. let root_ptr_end = size_of::() * 5; data[root_ptr_end] -= 23; let file = File::from_bytes(Cow::Owned(data)).unwrap(); let err = file.hash_table().unwrap_err(); assert_matches!(err, Error::Data(_)); assert!(format!("{}", err).contains("Hash item size invalid")); } #[test] fn invalid_gvariant() { let writer = FileWriter::new(); let mut table = HashTableBuilder::new(); table.insert_string("test", "test").unwrap(); let mut data = writer.write_to_vec_with_table(table).unwrap(); // Load the file to find out where the value ends up being let file = File::from_bytes(Cow::Borrowed(&data[..])).unwrap(); let table = file.hash_table().unwrap(); let item = table.get_hash_item("test").unwrap(); let value_ptr = item.value_ptr(); drop(file); // Now we overwrite the value ptr with 0xFF for i in value_ptr.start()..value_ptr.end() { data[i as usize] = u8::MAX; } // Reload the file let file = File::from_bytes(Cow::Owned(data)).unwrap(); let table = file.hash_table().unwrap(); assert_matches!(table.get::("test"), Err(Error::Data(msg)) if msg.contains("gvariant")); assert_matches!(table.get_value("test"), Err(Error::Data(msg)) if msg.contains("gvariant")); for value in table.values() { assert_matches!(value, Err(Error::Data(msg)) if msg.contains("gvariant")); } } #[test] fn parent_invalid_offset() { let writer = FileWriter::new(); let mut table = HashTableBuilder::new(); table.insert_string("parent/test", "test").unwrap(); let mut data = writer.write_to_vec_with_table(table).unwrap(); let file = File::from_bytes(Cow::Owned(data.clone())).unwrap(); // We change the parent offset to be bigger than the item size in the hash table. // 'test' will always end up being item 2. // The parent field is at +4. let hash_item_size = size_of::(); let start = file.hash_table().unwrap().header.items_offset() + hash_item_size * 2; let parent_field = start + 4; data[parent_field..parent_field + size_of::()] .copy_from_slice(10u32.to_le().as_bytes()); println!("{:?}", File::from_bytes(Cow::Owned(data.clone())).unwrap()); let file = File::from_bytes(Cow::Owned(data)).unwrap(); let err = file .hash_table() .unwrap() .keys() .collect::, _>>() .unwrap_err(); assert_matches!(err, Error::Data(_)); assert!(format!("{}", err).contains("Parent with invalid offset")); assert!(format!("{}", err).contains("10")); } #[test] fn parent_loop() { let writer = FileWriter::new(); let mut table = HashTableBuilder::new(); table.insert_string("parent/test", "test").unwrap(); let mut data = writer.write_to_vec_with_table(table).unwrap(); let file = File::from_bytes(Cow::Owned(data.clone())).unwrap(); // We change the parent offset to be pointing to itself. // 'test' will always end up being item 2. // The parent field is at +4. let hash_item_size = size_of::(); let start = file.hash_table().unwrap().header.items_offset() + hash_item_size * 2; let parent_field = start + 4; data[parent_field..parent_field + size_of::()] .copy_from_slice(1u32.to_le().as_bytes()); println!("{:?}", File::from_bytes(Cow::Owned(data.clone())).unwrap()); let file = File::from_bytes(Cow::Owned(data)).unwrap(); let err = file .hash_table() .unwrap() .keys() .collect::, _>>() .unwrap_err(); assert_matches!(err, Error::Data(_)); assert!(format!("{}", err).contains("loop")); } #[test] fn iter() { let writer = FileWriter::new(); let mut table = HashTableBuilder::new(); table.insert_string("iter/test", "test").unwrap(); table.insert("iter/test2", "test2").unwrap(); let data = writer.write_to_vec_with_table(table).unwrap(); let file = File::from_bytes(Cow::Owned(data.clone())).unwrap(); let table = file.hash_table().unwrap(); let values = table.values().collect::, _>>().unwrap(); assert_eq!(String::try_from(&values[0]).unwrap(), "test"); assert_eq!(String::try_from(&values[1]).unwrap(), "test2"); } #[test] fn test_dereference_offset1() { // Pointer start > EOF let file = create_minimal_file(); let res = file.data.dereference(&Pointer::new(40, 42), 2); assert_matches!(res, Err(Error::DataOffset)); println!("{}", res.unwrap_err()); } #[test] fn test_dereference_offset2() { // Pointer start > end let file = create_minimal_file(); let res = file.data.dereference(&Pointer::new(10, 0), 2); assert_matches!(res, Err(Error::DataOffset)); println!("{}", res.unwrap_err()); } #[test] fn test_dereference_offset3() { // Pointer end > EOF let file = create_minimal_file(); let res = file.data.dereference(&Pointer::new(10, 0), 2); assert_matches!(res, Err(Error::DataOffset)); println!("{}", res.unwrap_err()); } #[test] fn test_dereference_alignment() { // Pointer end > EOF let file = create_minimal_file(); let res = file.data.dereference(&Pointer::new(1, 2), 2); assert_matches!(res, Err(Error::DataAlignment)); println!("{}", res.unwrap_err()); } #[test] fn test_nested_dict() { // test file 2 has a nested dictionary let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); // A table isn't a value let table_res = table.get_value("table"); assert_matches!(table_res, Err(Error::Data(_))); } #[test] fn test_nested_dict_fail() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let res = table.get_hash_table("string"); assert_matches!(res, Err(Error::Data(_))); } #[test] fn test_from_file_lifetime() { // Ensure the lifetime of the file is not bound by the filename let filename = TEST_FILE_2.clone(); let file = File::from_file(&filename).unwrap(); drop(filename); // Ensure the hash table only borrows the file immutably let table = file.hash_table().unwrap(); let table2 = file.hash_table().unwrap(); table2.keys().collect::, _>>().unwrap(); table.keys().collect::, _>>().unwrap(); } } gvdb-0.7.1/src/read/hash.rs000064400000000000000000000651601046102023000135560ustar 00000000000000use crate::read::error::{Error, Result}; use crate::read::hash_item::HashItem; use crate::util::djb_hash; use serde::Deserialize; use std::fmt::{Debug, Formatter}; use std::mem::size_of; use zerocopy::byteorder::little_endian::U32 as u32le; use zerocopy::{AsBytes, FromBytes, FromZeroes}; use zvariant::Type; use super::{File, HashItemType}; #[cfg(unix)] type GVariantDeserializer<'de, 'sig, 'f> = zvariant::gvariant::Deserializer<'de, 'sig, 'f, zvariant::Fd<'f>>; #[cfg(not(unix))] type GVariantDeserializer<'de, 'sig, 'f> = zvariant::gvariant::Deserializer<'de, 'sig, 'f, ()>; /// The header of a GVDB hash table. /// /// ```text /// +-------+-----------------------+ /// | Bytes | Field | /// +-------+-----------------------+ /// | 4 | number of bloom words | /// +-------+-----------------------+ /// | 4 | number of buckets | /// +-------+-----------------------+ /// ``` #[repr(C)] #[derive(Copy, Clone, PartialEq, Eq, FromBytes, FromZeroes, AsBytes)] pub struct HashHeader { n_bloom_words: u32, n_buckets: u32, } impl HashHeader { /// Create a new [`HashHeader`]` using the provided `bloom_shift`, `n_bloom_words` and /// `n_buckets` pub fn new(bloom_shift: u32, n_bloom_words: u32, n_buckets: u32) -> Self { assert!(n_bloom_words < (1 << 27)); let n_bloom_words = bloom_shift << 27 | n_bloom_words; Self { n_bloom_words: n_bloom_words.to_le(), n_buckets: n_buckets.to_le(), } } /// Read the hash table header from `data` pub fn try_from_bytes(data: &[u8]) -> Result<&Self> { HashHeader::ref_from_prefix(data) .ok_or(Error::Data("Invalid hash table header".to_string())) } /// Number of bloom words in the hash table header pub fn n_bloom_words(&self) -> u32 { u32::from_le(self.n_bloom_words) & ((1 << 27) - 1) } /// The start of the bloom words region pub fn bloom_words_offset(&self) -> usize { size_of::() } /// Size of the bloom words section in the header pub fn bloom_words_len(&self) -> usize { self.n_bloom_words() as usize * size_of::() } /// Read the bloom words from `data` fn read_bloom_words<'a>(&self, data: &'a [u8]) -> Result<&'a [u32le]> { // Bloom words come directly after header let offset = self.bloom_words_offset(); let len = self.bloom_words_len(); if len == 0 { Ok(&[]) } else { let words_data = data.get(offset..(offset + len)).ok_or_else(|| { Error::Data(format!( "Not enough bytes to fit hash table: Expected at least {} bytes, got {}", self.items_offset(), data.len() )) })?; u32le::slice_from(words_data).ok_or(Error::DataOffset) } } /// The offset of the hash buckets section pub fn buckets_offset(&self) -> usize { self.bloom_words_offset() + self.bloom_words_len() } /// Number of hash buckets in the hash table header pub fn n_buckets(&self) -> u32 { u32::from_le(self.n_buckets) } /// Length of the hash buckets section in the header pub fn buckets_len(&self) -> usize { self.n_buckets() as usize * size_of::() } /// Read the buckets as a little endian slice fn read_buckets<'a>(&self, data: &'a [u8]) -> Result<&'a [u32le]> { let offset = self.buckets_offset(); let len = self.buckets_len(); if len == 0 { Ok(&[]) } else { let buckets_data = data.get(offset..(offset + len)).ok_or_else(|| { Error::Data(format!( "Not enough bytes to fit hash table: Expected at least {} bytes, got {}", self.items_offset(), data.len() )) })?; u32le::slice_from(buckets_data).ok_or(Error::DataOffset) } } /// The start of the hash items region pub fn items_offset(&self) -> usize { self.buckets_offset() + self.buckets_len() } /// Read the items as a slice fn read_items<'a>(&self, data: &'a [u8]) -> Result<&'a [HashItem]> { let offset = self.items_offset(); let len = data.len().saturating_sub(offset); if len == 0 { // The hash table has no items. This is generally valid. Ok(&[]) } else if len % size_of::() != 0 { Err(Error::Data(format!( "Hash item size invalid: Expected a multiple of {}, got {}", size_of::(), data.len() ))) } else { let items_data = data.get(offset..(offset + len)).unwrap_or_default(); HashItem::slice_from(items_data).ok_or(Error::DataOffset) } } } impl Debug for HashHeader { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("HashHeader") .field("n_bloom_words", &self.n_bloom_words()) .field("n_buckets", &self.n_buckets()) .field("data", &self.as_bytes()) .finish() } } /// A hash table inside a GVDB file /// /// ```text /// +--------+---------------------------+ /// | Bytes | Field | /// +--------+---------------------------+ /// | 4 | number of bloom words (b) | /// +--------+---------------------------+ /// | 4 | number of buckets (n) | /// +--------+---------------------------+ /// | b * 4 | bloom words | /// +--------+---------------------------+ /// | n * 4 | buckets | /// +--------+---------------------------+ /// | x * 24 | hash items | /// +--------+---------------------------+ /// ``` #[derive(Clone)] pub struct HashTable<'table, 'file> { pub(crate) file: &'table File<'file>, pub(crate) header: &'table HashHeader, bloom_words: &'table [u32le], buckets: &'table [u32le], items: &'table [HashItem], } impl<'table, 'file> HashTable<'table, 'file> { /// Interpret a chunk of bytes as a HashTable. The table_ptr should point to the hash table. /// Data has to be the complete GVDB file, as hash table items are stored somewhere else. pub(crate) fn for_bytes(data: &'table [u8], root: &'table File<'file>) -> Result { let header = HashHeader::try_from_bytes(data)?; let bloom_words = header.read_bloom_words(data)?; let buckets = header.read_buckets(data)?; let items = header.read_items(data)?; Ok(Self { file: root, header, bloom_words, buckets, items, }) } // TODO: Calculate proper bloom shift fn bloom_shift(&self) -> usize { 0 } /// Check whether the hash value corresponds to the bloom filter fn bloom_filter(&self, hash_value: u32) -> bool { if self.header.n_bloom_words() == 0 { return true; } let word = (hash_value / 32) % self.header.n_bloom_words(); let mut mask = 1 << (hash_value & 31); mask |= 1 << ((hash_value >> self.bloom_shift()) & 31); // We know this index is < n_bloom_words let bloom_word = self.bloom_words.get(word as usize).unwrap().get(); bloom_word & mask == mask } /// Get the hash item at hash item index fn get_hash_item_for_index(&self, index: usize) -> Option<&HashItem> { self.items.get(index) } /// Iterator over the keys contained in the hash table. /// /// Not all of these keys correspond to gvariant encoded values. Some keys may correspond to internal container /// types, or hash tables. pub fn keys<'iter>(&'iter self) -> Keys<'iter, 'table, 'file> { Keys { hash_table: self, pos: 0, } } /// Iterator over the gvariant encoded values contained in the hash table. pub fn values<'iter>(&'iter self) -> Values<'iter, 'table, 'file> { let context = zvariant::serialized::Context::new_gvariant(self.file.endianness(), 0); Values { hash_table: self, pos: 0, context, } } /// Recurses through parents and check whether `item` has the specified full path name fn check_key(&self, item: &HashItem, key: &str) -> bool { let this_key = match self.key_for_item(item) { Ok(this_key) => this_key, Err(_) => return false, }; if !key.ends_with(&this_key) { return false; } if let Some(parent) = item.parent() { if let Some(parent_item) = self.get_hash_item_for_index(parent as usize) { let parent_key_len = key.len().saturating_sub(this_key.len()); self.check_key(parent_item, &key[0..parent_key_len]) } else { false } } else { key.len() == this_key.len() } } /// Return the string that corresponds to the key part of the [`HashItem`]. fn key_for_item(&self, item: &HashItem) -> Result<&str> { let data = self.file.dereference(&item.key_ptr(), 1)?; Ok(std::str::from_utf8(data)?) } /// Gets the item at key `key`. pub(crate) fn get_hash_item(&self, key: &str) -> Option { if self.buckets.is_empty() || self.items.is_empty() { return None; } let hash_value = djb_hash(key); if !self.bloom_filter(hash_value) { return None; } let bucket = (hash_value % self.buckets.len() as u32) as usize; let mut itemno = self.buckets[bucket as usize].get() as usize; let lastno = if let Some(item) = self.buckets.get(bucket + 1) { item.get() as usize } else { self.items.len() }; while itemno < lastno { let item = self.get_hash_item_for_index(itemno)?; if hash_value == item.hash_value() && self.check_key(item, key) { return Some(*item); } itemno += 1; } None } fn get_item_bytes(&self, item: &HashItem) -> Result<&'table [u8]> { let typ = item.typ()?; if typ == HashItemType::Value { Ok(self.file.dereference(item.value_ptr(), 8)?) } else { Err(Error::Data(format!( "Unable to parse item for key '{:?}' as GVariant: Expected type 'v', got type {}", self.key_for_item(item), typ ))) } } /// Get the bytes for the [`HashItem`] at `key`. fn get_bytes(&self, key: &str) -> Result<&'table [u8]> { let item = self .get_hash_item(key) .ok_or(Error::KeyNotFound(key.to_string()))?; self.get_item_bytes(&item) } /// Returns the nested [`HashTable`] at `key`, if one is found. pub fn get_hash_table(&self, key: &str) -> Result { let item = self .get_hash_item(key) .ok_or(Error::KeyNotFound(key.to_string()))?; let typ = item.typ()?; if typ == HashItemType::HashTable { self.file.read_hash_table(item.value_ptr()) } else { Err(Error::Data(format!( "Unable to parse item for key '{}' as hash table: Expected type 'H', got type '{}'", self.key_for_item(&item)?, typ ))) } } fn deserializer_for_bytes( context: zvariant::serialized::Context, data: &[u8], ) -> GVariantDeserializer { // On non-unix systems this function lacks the FD argument GVariantDeserializer::new( data, #[cfg(unix)] None::<&[zvariant::Fd]>, zvariant::Value::signature(), context, ) .expect("zvariant::Value::signature() must be a valid zvariant signature") } /// Create a zvariant deserializer for the specified key. fn deserializer_for_key(&self, key: &str) -> Result { let data = self.get_bytes(key)?; // Create a new zvariant context based on our endianess and the byteswapped property let context = zvariant::serialized::Context::new_gvariant(self.file.endianness(), 0); Ok(Self::deserializer_for_bytes(context, data)) } /// Returns the data for `key` as a [`enum@zvariant::Value`]. /// /// Unless you need to inspect the value at runtime, it is recommended to use [`HashTable::get`]. pub fn get_value(&self, key: &str) -> Result { let mut de = self.deserializer_for_key(key)?; zvariant::Value::deserialize(&mut de).map_err(|err| { Error::Data(format!( "Error deserializing value for key \"{}\" as gvariant type \"{}\": {}", key, zvariant::Value::signature(), err )) }) } /// Returns the data for `key` and try to deserialize a [`enum@zvariant::Value`]. /// /// Then try to extract an underlying `T`. pub fn get<'d, T>(&'d self, key: &str) -> Result where T: zvariant::Type + serde::Deserialize<'d> + 'd, { let mut de = self.deserializer_for_key(key)?; let value = zvariant::DeserializeValue::deserialize(&mut de).map_err(|err| { Error::Data(format!( "Error deserializing value for key \"{}\" as gvariant type \"{}\": {}", key, T::signature(), err )) })?; Ok(value.0) } #[cfg(feature = "glib")] /// Returns the data for `key` as a [`struct@glib::Variant`]. pub fn get_gvariant(&self, key: &str) -> Result { let data = self.get_bytes(key)?; let variant = glib::Variant::from_data_with_type(data, glib::VariantTy::VARIANT); if self.file.endianness == zvariant::Endian::native() { Ok(variant) } else { Ok(variant.byteswap()) } } } impl std::fmt::Debug for HashTable<'_, '_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("HashTable") .field("header", &self.header) .field("bloom_words", &self.bloom_words) .field("buckets", &self.buckets) .field( "map", &self .keys() .map(|name| { name.into_iter() .map(|name| { let item = self.get_hash_item(&name); match item { Some(item) => { let value = match item.typ() { Ok(super::HashItemType::Container) => { Ok(Box::new(item) as Box) } Ok(super::HashItemType::HashTable) => { self.get_hash_table(&name).map(|table| { Box::new(table) as Box }) } Ok(super::HashItemType::Value) => { self.get_value(&name).map(|value| { Box::new(value) as Box }) } Err(err) => Err(err), }; (name.to_string(), Some((item, value))) } None => (name.to_string(), None), } }) .collect::>() }) .collect::>(), ) .finish() } } /// Iterator over all keys in a [`HashTable`] pub struct Keys<'a, 'table, 'file> { hash_table: &'a HashTable<'table, 'file>, pos: usize, } impl<'a, 'table, 'file> Iterator for Keys<'a, 'table, 'file> { type Item = Result; fn next(&mut self) -> Option { let mut item_count = self.hash_table.items.len() as isize; self.hash_table .get_hash_item_for_index(self.pos) .map(|mut item| { self.pos += 1; let mut key = self.hash_table.key_for_item(item)?.to_owned(); while let Some(parent) = item.parent() { if item_count < 0 { return Err(Error::Data( "Error finding all parent items. The file appears to have a loop" .to_string(), )); } item = if let Some(item) = self.hash_table.get_hash_item_for_index(parent as usize) { item } else { return Err(Error::Data(format!( "Parent with invalid offset encountered: {}", parent ))); }; let parent_key = self.hash_table.key_for_item(item)?; key.insert_str(0, parent_key); item_count -= 1; } Ok(key) }) } fn size_hint(&self) -> (usize, Option) { let size = self.hash_table.items.len().saturating_sub(self.pos); (size, Some(size)) } } impl<'a, 'table, 'file> ExactSizeIterator for Keys<'a, 'table, 'file> {} /// Iterator over all values in a [`HashTable`] pub struct Values<'a, 'table, 'file> { hash_table: &'a HashTable<'table, 'file>, context: zvariant::serialized::Context, pos: usize, } impl<'a, 'table, 'file> Iterator for Values<'a, 'table, 'file> { type Item = Result>; fn next(&mut self) -> Option { let item = loop { let Some(item) = self.hash_table.get_hash_item_for_index(self.pos) else { break None; }; self.pos += 1; if item.typ().is_ok_and(|t| t == HashItemType::Value) { break Some(item); } }; item.map(|item| { let bytes = self.hash_table.get_item_bytes(item)?; let mut de = HashTable::deserializer_for_bytes(self.context, bytes); Ok(zvariant::Value::deserialize(&mut de)?) }) } fn size_hint(&self) -> (usize, Option) { ( 0, Some(self.hash_table.items.len().saturating_sub(self.pos)), ) } } #[cfg(test)] pub(crate) mod test { use crate::read::{Error, File, HashHeader, HashItem, Pointer}; use crate::test::*; use crate::test::{assert_eq, assert_matches, assert_ne}; #[test] fn debug() { let header = HashHeader::new(0, 0, 0); let header2 = header; println!("{:?}", header2); let file = new_empty_file(); let table = file.hash_table().unwrap(); let table2 = table.clone(); println!("{:?}", table2); } #[test] fn get_header() { let file = new_empty_file(); let table = file.hash_table().unwrap(); let header = table.header; assert_eq!(header.n_buckets(), 0); let file = new_simple_file(false); let table = file.hash_table().unwrap(); let header = table.header; assert_eq!(header.n_buckets(), 1); println!("{:?}", table); } #[test] fn bloom_words() { let file = new_empty_file(); let table = file.hash_table().unwrap(); let header = table.header; assert_eq!(header.n_bloom_words(), 0); assert_eq!(header.bloom_words_len(), 0); assert!(table.bloom_words.is_empty()); } #[test] fn get_item() { let file = new_empty_file(); let table = file.hash_table().unwrap(); let res = table.get_hash_item("test"); assert_matches!(res, None); for endianess in [true, false] { let file = new_simple_file(endianess); let table = file.hash_table().unwrap(); let item = table.get_hash_item(SIMPLE_FILE_KEY).unwrap(); assert_ne!(item.value_ptr(), &Pointer::NULL); let bytes = table.get_item_bytes(&item); assert!(bytes.is_ok()); let value: u32 = table .get_value(SIMPLE_FILE_KEY) .unwrap() .try_into() .unwrap(); assert_eq!(value, SIMPLE_FILE_VALUE); let item_fail = table.get_hash_item("fail"); assert_matches!(item_fail, None); let res_item = table.get_hash_item("test_fail"); assert_matches!(res_item, None); } } #[test] fn broken_items() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let table = table.get_hash_table("table").unwrap(); let broken_item = HashItem::test_new_invalid_type(); assert_matches!(table.get_item_bytes(&broken_item), Err(Error::Data(_))); let null_item = HashItem::test_new_null(); assert_matches!(table.get_item_bytes(&null_item), Ok(&[])); let invalid_parent = HashItem::test_new_invalid_parent(); assert_matches!(table.get_item_bytes(&null_item), Ok(&[])); let parent = table.get_hash_item_for_index(invalid_parent.parent().unwrap() as usize); assert_matches!(parent, None); let broken_item = HashItem::test_new_invalid_key_ptr(); assert_matches!(table.key_for_item(&broken_item), Err(Error::DataOffset)); let broken_item = HashItem::test_new_invalid_value_ptr(); assert_matches!(table.get_item_bytes(&broken_item), Err(Error::DataOffset)); } #[test] fn get() { for endianess in [true, false] { let file = new_simple_file(endianess); let table = file.hash_table().unwrap(); let res: u32 = table.get::(SIMPLE_FILE_KEY).unwrap(); assert_eq!(res, SIMPLE_FILE_VALUE); let res = table.get::(SIMPLE_FILE_KEY); assert_matches!(res, Err(Error::Data(_))); } } #[test] fn get_bloom_word() { for endianess in [true, false] { let file = new_simple_file(endianess); let table = file.hash_table().unwrap(); let res = table.bloom_words.first(); assert_matches!(res, None); } } #[test] fn bloom_shift() { for endianess in [true, false] { let file = new_simple_file(endianess); let table = file.hash_table().unwrap(); let res = table.bloom_shift(); assert_eq!(res, 0); } } #[test] fn get_value() { for endianess in [true, false] { let file = new_simple_file(endianess); let table = file.hash_table().unwrap(); let res = table.get_value(SIMPLE_FILE_KEY).unwrap(); assert_eq!(&res, &zvariant::Value::from(SIMPLE_FILE_VALUE)); let fail = table.get_value("fail").unwrap_err(); assert_matches!(fail, Error::KeyNotFound(_)); } } #[test] fn get_hash_table() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let table = table.get_hash_table("table").unwrap(); let fail = table.get_hash_table("fail").unwrap_err(); assert_matches!(fail, Error::KeyNotFound(_)); } #[test] fn check_name_pass() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let item = table.get_hash_item("string").unwrap(); assert_eq!(table.check_key(&item, "string"), true); } #[test] fn check_name_invalid_name() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let item = table.get_hash_item("string").unwrap(); assert_eq!(table.check_key(&item, "fail"), false); } #[test] fn check_name_wrong_item() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let table = table.get_hash_table("table").unwrap(); // Get an item from the sub-hash table and call check_names on the root let item = table.get_hash_item("int").unwrap(); assert_eq!(table.check_key(&item, "table"), false); } #[test] fn check_name_broken_key_pointer() { let file = File::from_file(&TEST_FILE_2).unwrap(); let table = file.hash_table().unwrap(); let table = table.get_hash_table("table").unwrap(); // Break the key pointer let item = table.get_hash_item("int").unwrap(); let key_ptr = Pointer::new(500, 500); let broken_item = HashItem::new( item.hash_value(), None, key_ptr, item.typ().unwrap(), *item.value_ptr(), ); assert_eq!(table.check_key(&broken_item, "table"), false); } #[test] fn check_name_invalid_parent() { let file = File::from_file(&TEST_FILE_3).unwrap(); let table = file.hash_table().unwrap(); // Break the key pointer let item = table .get_hash_item("/gvdb/rs/test/online-symbolic.svg") .unwrap(); let broken_item = HashItem::new( item.hash_value(), Some(50), item.key_ptr(), item.typ().unwrap(), *item.value_ptr(), ); assert_eq!( table.check_key(&broken_item, "/gvdb/rs/test/online-symbolic.svg"), false ); } } #[cfg(all(feature = "glib", test))] mod test_glib { use crate::read::Error; use crate::test::{new_simple_file, SIMPLE_FILE_KEY, SIMPLE_FILE_VALUE}; use glib::prelude::*; use matches::assert_matches; #[test] fn get_gvariant() { for endianess in [true, false] { let file = new_simple_file(endianess); let table = file.hash_table().unwrap(); let res: glib::Variant = table.get_gvariant(SIMPLE_FILE_KEY).unwrap().get().unwrap(); assert_eq!(res, SIMPLE_FILE_VALUE.to_variant()); let fail = table.get_gvariant("fail").unwrap_err(); assert_matches!(fail, Error::KeyNotFound(_)); } } } gvdb-0.7.1/src/read/hash_item.rs000064400000000000000000000162561046102023000145760ustar 00000000000000use crate::read::error::{Error, Result}; use crate::read::pointer::Pointer; use std::fmt::{Display, Formatter}; use zerocopy::{AsBytes, FromBytes, FromZeroes}; #[derive(PartialEq, Eq, Debug)] pub enum HashItemType { Value, HashTable, Container, } impl From for u8 { fn from(item: HashItemType) -> Self { match item { HashItemType::Value => b'v', HashItemType::HashTable => b'H', HashItemType::Container => b'L', } } } impl TryFrom for HashItemType { type Error = Error; fn try_from(value: u8) -> Result { let chr = value as char; if chr == 'v' { Ok(HashItemType::Value) } else if chr == 'H' { Ok(HashItemType::HashTable) } else if chr == 'L' { Ok(HashItemType::Container) } else { Err(Error::Data(format!("Invalid HashItemType: '{}'", chr))) } } } impl Display for HashItemType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let text = match self { HashItemType::Value => "Value", HashItemType::HashTable => "HashTable", HashItemType::Container => "Child", }; write!(f, "{}", text) } } /// GVDB hash item. /// /// ```text /// +-------+----------------------+ /// | Bytes | Field | /// +-------+----------------------+ /// | 4 | djb2 hash value | /// +-------+----------------------+ /// | 4 | parent item index | /// +-------+----------------------+ /// | 4 | start address of key | /// +-------+----------------------+ /// | 2 | size of key | /// +-------+----------------------+ /// | 1 | hash item kind | /// +-------+------------------- --+ /// | 1 | unused | /// +-------+----------------------+ /// | 8 | value data pointer | /// +-------+----------------------+ /// ``` #[repr(C)] #[derive(Copy, Clone, FromZeroes, FromBytes, AsBytes)] pub struct HashItem { hash_value: u32, parent: u32, key_start: u32, key_size: u16, typ: u8, unused: u8, value: Pointer, } impl HashItem { pub fn new( hash_value: u32, parent: Option, key_ptr: Pointer, typ: HashItemType, value: Pointer, ) -> Self { let key_start = key_ptr.start().to_le(); let key_size = (key_ptr.size() as u16).to_le(); let parent = if let Some(parent) = parent { parent } else { u32::MAX }; Self { hash_value: hash_value.to_le(), parent: parent.to_le(), key_start, key_size, typ: typ.into(), unused: 0, value, } } /// djb hash value of the item data. pub fn hash_value(&self) -> u32 { u32::from_le(self.hash_value) } /// The item index of the parent hash item. /// /// 0xFFFFFFFF means this is a root item. pub fn parent(&self) -> Option { let parent = u32::from_le(self.parent); if parent == u32::MAX { None } else { Some(parent) } } /// Global start pointer of the key data pub fn key_start(&self) -> u32 { u32::from_le(self.key_start) } /// The size of the key data. pub fn key_size(&self) -> u16 { u16::from_le(self.key_size) } /// Convenience method to generate a proper GVDB pointer from key_start and key_size. pub fn key_ptr(&self) -> Pointer { Pointer::new( self.key_start() as usize, self.key_start() as usize + self.key_size() as usize, ) } /// The kind of hash item. pub fn typ(&self) -> Result { self.typ.try_into() } /// A pointer to the underlying data. pub fn value_ptr(&self) -> &Pointer { &self.value } } #[cfg(test)] impl HashItem { pub(crate) fn test_new_null() -> Self { Self { hash_value: 0, parent: u32::MAX, key_start: 0, key_size: 0, typ: b'v', unused: 0, value: Pointer::NULL, } } pub(crate) fn test_new_invalid_type() -> Self { Self { hash_value: 0, parent: u32::MAX, key_start: 0, key_size: 0, typ: b'x', unused: 0, value: Pointer::NULL, } } pub(crate) fn test_new_invalid_parent() -> Self { Self { hash_value: 0, parent: u32::MAX - 1, key_start: 0, key_size: 0, typ: b'v', unused: 0, value: Pointer::NULL, } } pub(crate) fn test_new_invalid_key_ptr() -> Self { Self { hash_value: 0, parent: u32::MAX, key_start: u32::MAX, key_size: 100, typ: b'v', unused: 0, value: Pointer::NULL, } } pub(crate) fn test_new_invalid_value_ptr() -> Self { Self { hash_value: 0, parent: u32::MAX, key_start: 0, key_size: 0, typ: b'v', unused: 0, value: Pointer::new(usize::MAX, 100), } } } impl std::fmt::Debug for HashItem { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("HashItem") .field("hash_value", &self.hash_value()) .field("parent", &self.parent()) .field("key_start", &self.key_start()) .field("key_size", &self.key_size()) .field("typ", &self.typ()) .field("unused", &self.unused) .field("value", &self.value_ptr()) .finish() } } #[cfg(test)] mod test { use crate::read::{Error, HashItem, HashItemType, Pointer}; use matches::assert_matches; #[test] fn derives() { let typ = HashItemType::Value; println!("{}, {:?}", typ, typ); let typ = HashItemType::HashTable; println!("{}, {:?}", typ, typ); let typ = HashItemType::Container; println!("{}, {:?}", typ, typ); let item = HashItem::new(0, None, Pointer::NULL, HashItemType::Value, Pointer::NULL); let item2 = item; println!("{:?}", item2); } #[test] fn type_try_from() { assert_matches!(HashItemType::try_from(b'v'), Ok(HashItemType::Value)); assert_matches!(HashItemType::try_from(b'H'), Ok(HashItemType::HashTable)); assert_matches!(HashItemType::try_from(b'L'), Ok(HashItemType::Container)); assert_matches!(HashItemType::try_from(b'x'), Err(Error::Data(_))); assert_matches!(HashItemType::try_from(b'?'), Err(Error::Data(_))); } #[test] fn item() { let item = HashItem::new( 0, Some(0), Pointer::NULL, HashItemType::Value, Pointer::NULL, ); assert_eq!(item.hash_value(), 0); assert_eq!(item.parent(), Some(0)); assert_eq!(item.key_ptr(), Pointer::NULL); assert_matches!(item.typ(), Ok(HashItemType::Value)); assert_eq!(item.value_ptr(), &Pointer::NULL); } } gvdb-0.7.1/src/read/header.rs000064400000000000000000000123171046102023000140570ustar 00000000000000use crate::read::error::{Error, Result}; use crate::read::pointer::Pointer; use zerocopy::{AsBytes, FromBytes, FromZeroes}; // This is just a string, but it is stored in the byteorder of the file // Default byteorder is little endian, but the format supports big endian as well // "GVar" const GVDB_SIGNATURE0: u32 = 1918981703; // "iant" const GVDB_SIGNATURE1: u32 = 1953390953; /// A GVDB file header. /// /// ```text /// +-------+--------------+ /// | Bytes | Field | /// +-------+--------------+ /// | 8 | signature | /// +-------+--------------+ /// | 4 | version | /// +-------+--------------+ /// | 4 | options | /// +-------+--------------+ /// | 8 | root pointer | /// +-------+--------------+ /// ``` /// /// ## Signature /// /// The signature will look like the ASCII string `GVariant` for little endian /// and `raVGtnai` for big endian files. /// /// This is what you get when reading two u32, swapping the endianness, and interpreting them as a string. /// /// ## Version /// /// Version is always 0. /// /// ## Options /// /// There are no known options, this u32 is always 0. /// /// ## Root pointer /// /// Points to the root hash table within the file. #[repr(C)] #[derive(Copy, Clone, PartialEq, Eq, Debug, FromZeroes, FromBytes, AsBytes)] pub struct Header { signature: [u32; 2], version: u32, options: u32, root: Pointer, } impl Header { /// Try to read the header, determine the endianness and validate that the header is valid. /// /// Returns [`Error::DataOffset`]` if the header doesn't fit, and [`Error::Data`] if the header /// is invalid. pub fn try_from_bytes(data: &[u8]) -> Result { let header = Header::read_from_prefix(data).ok_or(Error::Data("Invalid GVDB header".to_string()))?; if !header.header_valid() { return Err(Error::Data( "Invalid GVDB header. Is this a GVDB file?".to_string(), )); } if header.version() != 0 { return Err(Error::Data(format!( "Unknown GVDB file format version: {}", header.version() ))); } Ok(header) } /// Create a new GVDB header in little-endian #[cfg(test)] pub fn new_le(version: u32, root: Pointer) -> Self { #[cfg(target_endian = "little")] let byteswap = false; #[cfg(target_endian = "big")] let byteswap = true; Self::new(byteswap, version, root) } /// Create a new GVDB header in big-endian #[cfg(test)] pub fn new_be(version: u32, root: Pointer) -> Self { #[cfg(target_endian = "little")] let byteswap = true; #[cfg(target_endian = "big")] let byteswap = false; Self::new(byteswap, version, root) } /// Create a new GVDB header in target endianness pub fn new(byteswap: bool, version: u32, root: Pointer) -> Self { let signature = if !byteswap { [GVDB_SIGNATURE0, GVDB_SIGNATURE1] } else { [GVDB_SIGNATURE0.swap_bytes(), GVDB_SIGNATURE1.swap_bytes()] }; Self { signature, version: version.to_le(), options: 0, root, } } /// Returns: /// /// - `Ok(true)` if the file is *not* in target endianness (eg. BE on an LE machine) /// - `Ok(false)` if the file is in target endianness (eg. LE on an LE machine) /// - [`Err(Error::Data)`](crate::read::error::Error::Data) if the file signature is invalid pub fn is_byteswap(&self) -> Result { if self.signature[0] == GVDB_SIGNATURE0 && self.signature[1] == GVDB_SIGNATURE1 { Ok(false) } else if self.signature[0] == GVDB_SIGNATURE0.swap_bytes() && self.signature[1] == GVDB_SIGNATURE1.swap_bytes() { Ok(true) } else { Err(Error::Data(format!( "Invalid GVDB header signature: {:?}. Is this a GVariant database file?", self.signature ))) } } /// Returns true if the header indicates that this is a valid GVDB file. pub fn header_valid(&self) -> bool { self.is_byteswap().is_ok() } /// The version of the GVDB file. We only recognize version 0 of the format. pub fn version(&self) -> u32 { self.version } /// The pointer to the root hash table. pub fn root(&self) -> &Pointer { &self.root } } #[cfg(test)] mod test { use super::*; use zerocopy::AsBytes; #[test] fn derives() { let header = Header::new(false, 0, Pointer::NULL); let header2 = header; println!("{:?}", header2); } #[test] fn header_serialize() { let header = Header::new(false, 123, Pointer::NULL); assert!(!header.is_byteswap().unwrap()); let data = header.as_bytes(); let parsed_header = Header::ref_from(data).unwrap(); assert!(!parsed_header.is_byteswap().unwrap()); let header = Header::new(true, 0, Pointer::NULL); assert!(header.is_byteswap().unwrap()); let data = header.as_bytes(); let parsed_header = Header::ref_from(data).unwrap(); assert!(parsed_header.is_byteswap().unwrap()); } } gvdb-0.7.1/src/read/pointer.rs000064400000000000000000000037341046102023000143120ustar 00000000000000use zerocopy::{AsBytes, FromBytes, FromZeroes}; /// A pointer internal to the GVDB file. /// /// GVDB files use pointer structs with global start and end locations. Pointers /// are *always* little-endian, independant of the file endianess. /// /// It is possible to retrieve the bytes stored at this pointer by using /// [`File::dereference()`](crate::read::File::dereference). #[repr(C)] #[derive(Copy, Clone, PartialEq, Eq, AsBytes, FromBytes, FromZeroes)] pub struct Pointer { start: u32, end: u32, } impl Pointer { #[allow(unused)] pub(crate) const NULL: Self = Self { start: 0, end: 0 }; /// Create a new GVDB pointer. Pointers are always internally stored as little endian, /// so we convert the values here. pub fn new(start: usize, end: usize) -> Self { Self { start: (start as u32).to_le(), end: (end as u32).to_le(), } } /// Returns the start address of the pointer and convert them to target endianess. pub fn start(&self) -> u32 { u32::from_le(self.start) } /// Returns the end address of the pointer and convert them to target endianess. pub fn end(&self) -> u32 { u32::from_le(self.end) } /// Returns the number of bytes referenced by the pointer. pub fn size(&self) -> usize { self.end().saturating_sub(self.start()) as usize } } impl std::fmt::Debug for Pointer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Pointer") .field("start", &self.start()) .field("end", &self.end()) .finish() } } #[cfg(test)] mod test { use crate::read::Pointer; #[test] fn derives() { let pointer = Pointer::new(0, 2); let pointer2 = pointer; println!("{:?}", pointer2); } #[test] fn no_panic_invalid_size() { let invalid_ptr = Pointer::new(100, 0); let size = invalid_ptr.size(); assert_eq!(size, 0); } } gvdb-0.7.1/src/read.rs000064400000000000000000000021211046102023000126170ustar 00000000000000mod error; mod file; mod hash; mod hash_item; mod header; mod pointer; pub use error::{Error, Result}; pub use file::File; pub use hash::HashTable; pub(crate) use hash::HashHeader; pub(crate) use hash_item::{HashItem, HashItemType}; pub(crate) use header::Header; pub(crate) use pointer::Pointer; /// Deprecated type aliases mod deprecated { use super::*; /// Type has been renamed. Use [`File`] instead. #[deprecated = "Type has been renamed. Use gvdb::read::File instead."] pub type GvdbFile<'a> = File<'a>; /// Type has been renamed. Use [`HashTable`] instead. #[deprecated = "Type has been renamed. Use gvdb::read::HashTable instead."] pub type GvdbHashTable<'a, 'b> = HashTable<'a, 'b>; /// Type has been renamed. Use [`Error`] instead. #[deprecated = "Type has been renamed. Use gvdb::read::Error instead."] pub type GvdbReaderError = Error; /// Type has been renamed. Use [`Result`] instead. #[deprecated = "Type has been renamed. Use gvdb::read::Result instead."] pub type GvdbReaderResult = Result; } pub use deprecated::*; gvdb-0.7.1/src/test.rs000064400000000000000000000354231046102023000126760ustar 00000000000000#![allow(unused)] use crate::read::{File, HashItemType, HashTable}; use crate::write::{FileWriter, HashTableBuilder}; use glib::value::ToValue; use lazy_static::lazy_static; pub use matches::assert_matches; pub use pretty_assertions::{assert_eq, assert_ne, assert_str_eq}; use serde::Deserialize; use std::borrow::Cow; use std::cmp::{max, min}; use std::io::{Cursor, Read, Write}; use std::path::{Path, PathBuf}; use zvariant::DynamicType; lazy_static! { pub(crate) static ref TEST_FILE_DIR: PathBuf = PathBuf::from("test-data"); pub(crate) static ref TEST_FILE_1: PathBuf = TEST_FILE_DIR.join("test1.gvdb"); pub(crate) static ref TEST_FILE_2: PathBuf = TEST_FILE_DIR.join("test2.gvdb"); pub(crate) static ref TEST_FILE_3: PathBuf = TEST_FILE_DIR.join("test3.gresource"); pub(crate) static ref TEST_FILE_4: PathBuf = TEST_FILE_DIR.join("test4.gvdb"); pub(crate) static ref GRESOURCE_DIR: PathBuf = TEST_FILE_DIR.join("gresource"); pub(crate) static ref GRESOURCE_XML: PathBuf = GRESOURCE_DIR.join("test3.gresource.xml"); } pub(crate) const SIMPLE_FILE_KEY: &str = "test"; pub(crate) const SIMPLE_FILE_VALUE: u32 = 0xabca_bcab_u32; fn write_byte_row( f: &mut dyn std::io::Write, offset: usize, bytes_per_row: usize, bytes: &[u8], ) -> std::io::Result<()> { write!(f, "{:08X}", offset)?; for (index, byte) in bytes.iter().enumerate() { if index % 4 == 0 { write!(f, " ")?; } write!(f, " {:02X}", byte)?; } let bytes_per_row = max(bytes_per_row, bytes.len()); for index in bytes.len()..bytes_per_row { if index % 4 == 0 { write!(f, " ")?; } write!(f, " ")?; } write!(f, " ")?; for byte in bytes { if byte.is_ascii_alphanumeric() || byte.is_ascii_whitespace() || byte.is_ascii_punctuation() { write!(f, "{}", *byte as char)?; } else { write!(f, ".")?; } } writeln!(f) } fn write_byte_rows( f: &mut dyn std::io::Write, center_offset: usize, additional_rows_top: usize, additional_rows_bottom: usize, bytes_per_row: usize, bytes: &[u8], ) -> std::io::Result<()> { let center_row_num = center_offset / bytes_per_row; let start_row = center_row_num - min(center_row_num, additional_rows_top); // We add 1 because we can add partial rows at the end let last_row = min( additional_rows_bottom + center_row_num, bytes.len() / bytes_per_row + 1, ); let row_count = last_row - start_row; for row in 0..row_count { let offset_start = (start_row + row) * bytes_per_row; let offset_end = min(bytes.len(), offset_start + bytes_per_row); write_byte_row( f, offset_start, bytes_per_row, &bytes[offset_start..offset_end], )?; } Ok(()) } pub fn assert_gvariant_eq(a: &[u8], b: &[u8], context: &str) { // Decode gvariant using glib, and diff using print() let a_var = glib::Variant::from_data::(a); let b_var = glib::Variant::from_data::(b); let a_str = a_var.print(true); let b_str = b_var.print(true); if a_str != b_str { let mut bytes_a: Vec = Vec::new(); write_byte_rows(&mut bytes_a, 0, 0, usize::MAX, 16, a); let mut bytes_b: Vec = Vec::new(); write_byte_rows(&mut bytes_b, 0, 0, usize::MAX, 16, b); assert_eq!( format!( "{}\n{}", a_var.print(true).as_str(), std::str::from_utf8(&bytes_a).unwrap() ), format!( "{}\n{}", b_var.print(true).as_str(), std::str::from_utf8(&bytes_b).unwrap() ), "{}", context ); } } #[track_caller] pub fn assert_bytes_eq(a: &[u8], b: &[u8], context: &str) { const WIDTH: usize = 16; const EXTRA_ROWS_TOP: usize = 8; const EXTRA_ROWS_BOTTOM: usize = 4; let max_len = max(a.len(), b.len()); for index in 0..max_len { let a_byte = a.get(index); let b_byte = b.get(index); if a_byte.is_none() || b_byte.is_none() || a_byte.unwrap() != b_byte.unwrap() { let mut a_bytes_buf = Vec::new(); write_byte_rows( &mut a_bytes_buf, index, EXTRA_ROWS_TOP, EXTRA_ROWS_BOTTOM, WIDTH, a, ) .unwrap(); let str_a = String::from_utf8(a_bytes_buf).unwrap(); let mut b_bytes_buf = Vec::new(); write_byte_rows( &mut b_bytes_buf, index, EXTRA_ROWS_TOP, EXTRA_ROWS_BOTTOM, WIDTH, b, ) .unwrap(); let str_b = String::from_utf8(b_bytes_buf).unwrap(); assert_str_eq!(str_a, str_b, "{}", context); } } } pub fn byte_compare_gvdb_file(a: &File, b: &File, context: &str) { assert_eq!(a.header, b.header); let a_hash = a.hash_table().unwrap(); let b_hash = b.hash_table().unwrap(); byte_compare_gvdb_hash_table(&a_hash, &b_hash, context); } fn byte_compare_file(file: &File, reference_path: &Path) { let mut reference_file = std::fs::File::open(reference_path).unwrap(); let mut reference_data = Vec::new(); reference_file.read_to_end(&mut reference_data).unwrap(); assert_bytes_eq( &reference_data, file.data.as_ref(), &format!("Byte comparing with file '{}'", reference_path.display()), ); } pub fn byte_compare_file_1(file: &File) { byte_compare_file(file, &TEST_FILE_1); } pub fn assert_is_file_1(file: &File) { let table = file.hash_table().unwrap(); let mut names = table.keys(); assert_eq!(names.len(), 1); assert_eq!(&names.next().unwrap().unwrap(), "root_key"); let value = table.get_value("root_key").unwrap(); assert_matches!(value, zvariant::Value::Structure(_)); assert_eq!(value.value_signature(), "(uus)"); let tuple = zvariant::Structure::try_from(value).unwrap(); let fields = tuple.into_fields(); assert_eq!(u32::try_from(&fields[0]), Ok(1234)); assert_eq!(u32::try_from(&fields[1]), Ok(98765)); assert_eq!(<&str>::try_from(&fields[2]), Ok("TEST_STRING_VALUE")); } pub fn byte_compare_file_2(file: &File) { byte_compare_file(file, &TEST_FILE_2); } pub fn assert_is_file_2(file: &File) { let table = file.hash_table().unwrap(); let names = table.keys().collect::, _>>().unwrap(); assert_eq!(names.len(), 2); assert_eq!(names[0], "string"); assert_eq!(names[1], "table"); let str_value = table.get_value("string").unwrap(); assert_matches!(str_value, zvariant::Value::Str(_)); assert_eq!(<&str>::try_from(&str_value), Ok("test string")); let sub_table = table.get_hash_table("table").unwrap(); let sub_table_names = sub_table.keys().collect::, _>>().unwrap(); assert_eq!(sub_table_names.len(), 1); assert_eq!(sub_table_names[0], "int"); let int_value = sub_table.get_value("int").unwrap(); assert_eq!(u32::try_from(int_value), Ok(42)); } pub fn byte_compare_file_3(file: &File) { let ref_root = File::from_file(&TEST_FILE_3).unwrap(); byte_compare_gvdb_file(&ref_root, file, "Comparing file 3"); } pub fn assert_is_file_3(file: &File) { let table = file.hash_table().unwrap(); let mut names = table.keys().collect::, _>>().unwrap(); names.sort(); let reference_names = vec![ "/", "/gvdb/", "/gvdb/rs/", "/gvdb/rs/test/", "/gvdb/rs/test/icons/", "/gvdb/rs/test/icons/scalable/", "/gvdb/rs/test/icons/scalable/actions/", "/gvdb/rs/test/icons/scalable/actions/send-symbolic.svg", "/gvdb/rs/test/json/", "/gvdb/rs/test/json/test.json", "/gvdb/rs/test/online-symbolic.svg", "/gvdb/rs/test/test.css", ]; assert_eq!(names, reference_names); #[derive(Clone, zvariant::Type, serde::Deserialize)] struct GResourceData { size: u32, flags: u32, content: Vec, } let svg1: GResourceData = table .get::("/gvdb/rs/test/online-symbolic.svg") .unwrap(); assert_eq!(svg1.size, 1390); assert_eq!(svg1.flags, 0); assert_eq!(svg1.size as usize, svg1.content.len() - 1); // Ensure the last byte is zero because of zero-padding defined in the format assert_eq!(svg1.content[svg1.content.len() - 1], 0); let svg1_str = std::str::from_utf8(&svg1.content[0..svg1.content.len() - 1]).unwrap(); assert!(svg1_str.starts_with( &(r#""#.to_string() + "\n\n" + r#" = >::try_from(svg2_fields[2].try_clone().unwrap()).unwrap(); assert_eq!(svg2_size, 345); assert_eq!(svg2_flags, 1); let mut decoder = flate2::read::ZlibDecoder::new(&*svg2_content); let mut svg2_data = Vec::new(); decoder.read_to_end(&mut svg2_data).unwrap(); // Ensure the last byte is *not* zero and len is not one bigger than specified because // compressed data is not zero-padded assert_ne!(svg2_data[svg2_data.len() - 1], 0); assert_eq!(svg2_size as usize, svg2_data.len()); let svg2_str = std::str::from_utf8(&svg2_data).unwrap(); let mut svg2_reference = String::new(); std::fs::File::open(GRESOURCE_DIR.join("icons/scalable/actions/send-symbolic.svg")) .unwrap() .read_to_string(&mut svg2_reference) .unwrap(); assert_str_eq!(svg2_str, svg2_reference); let json = zvariant::Structure::try_from(table.get_value("/gvdb/rs/test/json/test.json").unwrap()) .unwrap() .into_fields(); let json_size: u32 = (&json[0]).try_into().unwrap(); let json_flags: u32 = (&json[1]).try_into().unwrap(); let json_content: Vec = json[2].try_clone().unwrap().try_into().unwrap(); // Ensure the last byte is zero because of zero-padding defined in the format assert_eq!(json_content[json_content.len() - 1], 0); assert_eq!(json_size as usize, json_content.len() - 1); let json_str = std::str::from_utf8(&json_content[0..json_content.len() - 1]).unwrap(); assert_eq!(json_flags, 0); assert_str_eq!( json_str, r#"["test_string",42,{"bool":true}]"#.to_string() + "\n" ); } pub fn byte_compare_file_4(file: &File) { let ref_root = File::from_file(&TEST_FILE_4).unwrap(); byte_compare_gvdb_file(&ref_root, file, "Comparing file 4"); } pub(crate) fn new_empty_file() -> File<'static> { let writer = FileWriter::new(); let table_builder = HashTableBuilder::new(); let data = Vec::new(); let mut cursor = Cursor::new(data); writer.write_with_table(table_builder, &mut cursor).unwrap(); File::from_bytes(Cow::Owned(cursor.into_inner())).unwrap() } pub(crate) fn new_simple_file(big_endian: bool) -> File<'static> { let writer = if big_endian { FileWriter::for_big_endian() } else { FileWriter::new() }; let mut table_builder = HashTableBuilder::new(); table_builder .insert(SIMPLE_FILE_KEY, SIMPLE_FILE_VALUE) .unwrap(); let data = Vec::new(); let mut cursor = Cursor::new(data); writer.write_with_table(table_builder, &mut cursor).unwrap(); File::from_bytes(Cow::Owned(cursor.into_inner())).unwrap() } #[track_caller] pub(crate) fn byte_compare_gvdb_hash_table(a: &HashTable, b: &HashTable, context: &str) { assert_eq!(a.header, b.header); let mut keys_a = a.keys().collect::, _>>().unwrap(); let mut keys_b = b.keys().collect::, _>>().unwrap(); keys_a.sort(); keys_b.sort(); assert_eq!(keys_a, keys_b); for key in keys_a { let item_a = a.get_hash_item(&key).unwrap(); let item_b = b.get_hash_item(&key).unwrap(); let data_a = a.file.dereference(item_a.value_ptr(), 1).unwrap(); let data_b = b.file.dereference(item_b.value_ptr(), 1).unwrap(); match item_a.typ().unwrap() { HashItemType::Value => { assert_gvariant_eq( data_a, data_b, &format!("Comparing gvariant values with key '{}'", key), ); assert_bytes_eq( data_a, data_b, &format!("Comparing values with key '{}'", key), ); } HashItemType::HashTable => byte_compare_gvdb_hash_table( &a.get_hash_table(&key).expect(context), &b.get_hash_table(&key).expect(context), &format!("{context}: Comparing hash tables with key '{key}'"), ), HashItemType::Container => { // We don't compare containers, only their length if data_a.len() != data_b.len() { // The lengths should not be different. For context we will compare the data assert_bytes_eq( data_a, data_b, &format!("Containers with key '{}' have different lengths", key), ); } } } assert_eq!(item_a.hash_value(), item_b.hash_value()); assert_eq!(item_a.key_size(), item_b.key_size()); assert_eq!(item_a.typ().unwrap(), item_b.typ().unwrap()); assert_eq!(item_a.value_ptr().size(), item_b.value_ptr().size()); } } #[test] fn assert_bytes_eq1() { assert_bytes_eq(&[1, 2, 3], &[1, 2, 3], "test"); } #[test] fn assert_bytes_eq2() { // b is exactly 16 bytes long to test "b is too small" panic assert_bytes_eq( b"help i am stuck in a test case", b"help i am stuck in a test case", "test", ); } #[test] #[should_panic] fn assert_bytes_eq_fail1() { assert_bytes_eq(&[1, 2, 4], &[1, 2, 3], "test"); } #[test] #[should_panic] fn assert_bytes_eq_fail2() { assert_bytes_eq(&[1, 2, 3, 4], &[1, 2, 3], "test"); } #[test] #[should_panic] fn assert_bytes_eq_fail3() { assert_bytes_eq(&[1, 2, 3], &[1, 2, 3, 4], "test"); } #[test] #[should_panic] fn assert_bytes_eq_fail4() { // b is exactly 16 bytes long to test "b is too small" panic assert_bytes_eq( b"help i am stuck in a test case", b"help i am stuck in a test cas", "test", ); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/src/util.rs������������������������������������������������������������������������������0000644�0000000�0000000�00000002720�10461020230�0012666�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������/// Perform the djb2 hash function pub fn djb_hash(key: &str) -> u32 { let mut hash_value: u32 = 5381; for char in key.bytes() { hash_value = hash_value.wrapping_mul(33).wrapping_add(char as u32); } hash_value } /// Align an arbitrary offset to a multiple of 2 /// The result is undefined for alignments that are not a multiple of 2 pub fn align_offset(offset: usize, alignment: usize) -> usize { //(alignment - (offset % alignment)) % alignment (offset + alignment - 1) & !(alignment - 1) } #[cfg(test)] mod test { use super::align_offset; #[test] fn align() { assert_eq!(align_offset(17, 16), 32); assert_eq!(align_offset(13, 8), 16); assert_eq!(align_offset(1, 8), 8); assert_eq!(align_offset(2, 8), 8); assert_eq!(align_offset(3, 8), 8); assert_eq!(align_offset(4, 8), 8); assert_eq!(align_offset(5, 8), 8); assert_eq!(align_offset(6, 8), 8); assert_eq!(align_offset(7, 8), 8); assert_eq!(align_offset(8, 8), 8); assert_eq!(align_offset(1, 4), 4); assert_eq!(align_offset(2, 4), 4); assert_eq!(align_offset(3, 4), 4); assert_eq!(align_offset(4, 4), 4); assert_eq!(align_offset(0, 2), 0); assert_eq!(align_offset(1, 2), 2); assert_eq!(align_offset(2, 2), 2); assert_eq!(align_offset(3, 2), 4); assert_eq!(align_offset(0, 1), 0); assert_eq!(align_offset(1, 1), 1); } } ������������������������������������������������gvdb-0.7.1/src/write/error.rs�����������������������������������������������������������������������0000644�0000000�0000000�00000004167�10461020230�0014203�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������use std::fmt::{Debug, Display, Formatter}; use std::path::PathBuf; /// Error type for [`FileWriter`][crate::write::FileWriter] #[non_exhaustive] pub enum Error { /// Generic I/O error. Path contains an optional filename if applicable Io(std::io::Error, Option), /// An internal inconsistency was found Consistency(String), /// An error occured when serializing variant data with zvariant ZVariant(zvariant::Error), } impl std::error::Error for Error {} impl From for Error { fn from(err: std::io::Error) -> Self { Self::Io(err, None) } } impl From for Error { fn from(err: zvariant::Error) -> Self { Self::ZVariant(err) } } impl Display for Error { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Error::Io(err, path) => { if let Some(path) = path { write!(f, "I/O error for file '{}': {}", path.display(), err) } else { write!(f, "I/O error: {}", err) } } Error::Consistency(context) => { write!(f, "Internal inconsistency: {}", context) } Error::ZVariant(err) => { write!(f, "Error writing ZVariant data: {}", err) } } } } impl Debug for Error { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { Display::fmt(self, f) } } /// The Result type for [`Error`] pub type Result = std::result::Result; #[cfg(test)] mod test { use super::Error; use matches::assert_matches; use std::path::PathBuf; #[test] fn from() { let err = Error::from(zvariant::Error::Message("Test".to_string())); assert_matches!(err, Error::ZVariant(_)); assert!(format!("{}", err).contains("ZVariant")); let err = Error::Io( std::io::Error::from(std::io::ErrorKind::NotFound), Some(PathBuf::from("test_path")), ); assert_matches!(err, Error::Io(..)); assert!(format!("{}", err).contains("test_path")); } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/src/write/file.rs������������������������������������������������������������������������0000644�0000000�0000000�00000075561�10461020230�0013777�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������use crate::read::HashHeader; use crate::read::HashItem; use crate::read::Header; use crate::read::Pointer; use crate::util::align_offset; use crate::write::error::{Error, Result}; use crate::write::hash::SimpleHashTable; use crate::write::item::HashValue; use std::collections::{HashMap, VecDeque}; use std::io::Write; use std::mem::size_of; use zerocopy::AsBytes; /// Create hash tables for use in GVDB files /// /// # Example /// /// ``` /// use glib::prelude::*; /// use gvdb::write::{FileWriter, HashTableBuilder}; /// /// let file_writer = FileWriter::new(); /// let mut table_builder = HashTableBuilder::new(); /// table_builder /// .insert_string("string", "test string") /// .unwrap(); /// let gvdb_data = file_writer.write_to_vec_with_table(table_builder).unwrap(); /// ``` #[derive(Debug)] pub struct HashTableBuilder<'a> { items: HashMap>, path_separator: Option, } impl<'a> HashTableBuilder<'a> { /// Create a new empty HashTableBuilder with the default path separator `/` /// /// ``` /// # use gvdb::write::HashTableBuilder; /// let mut table_builder = HashTableBuilder::new(); /// ``` pub fn new() -> Self { Self::with_path_separator(Some("/")) } /// Create a new empty HashTableBuilder a different path separator than `/` or none at all /// /// ``` /// # use gvdb::write::HashTableBuilder; /// let mut table_builder = HashTableBuilder::with_path_separator(Some(":")); /// ``` pub fn with_path_separator(sep: Option<&str>) -> Self { Self { items: Default::default(), path_separator: sep.map(|s| s.to_string()), } } /// Insert the provided [`HashValue`] for the key. fn insert_item_value( &mut self, key: &(impl ToString + ?Sized), item: HashValue<'a>, ) -> Result<()> { let key = key.to_string(); if let Some(sep) = &self.path_separator { let mut this_key = "".to_string(); let mut last_key: Option = None; for segment in key.split(sep) { this_key += segment; if this_key != key { this_key += sep; } if let Some(last_key) = last_key { if let Some(last_item) = self.items.get_mut(&last_key) { if let HashValue::Container(ref mut container) = last_item { if !container.contains(&this_key) { container.push(this_key.clone()); } } else { return Err(Error::Consistency(format!( "Parent item with key '{}' is not of type container", this_key ))); } } else { let parent_item = HashValue::Container(vec![this_key.clone()]); self.items.insert(last_key.to_string(), parent_item); } } if key == this_key { // The item we actually want to insert self.items.insert(key.to_string(), item); break; } last_key = Some(this_key.clone()); } } else { self.items.insert(key, item); } Ok(()) } /// Insert Value `item` for `key` /// /// ``` /// use zvariant::Value; /// let mut table_builder = gvdb::write::HashTableBuilder::new(); /// let variant = Value::new(123u32); /// table_builder.insert_value("variant_123", variant); /// ``` pub fn insert_value( &mut self, key: &(impl ToString + ?Sized), value: zvariant::Value<'a>, ) -> Result<()> { let item = HashValue::Value(value); self.insert_item_value(key, item) } /// Insert `item` for `key` where item needs to be `Into` /// /// ``` /// use zvariant::Value; /// let mut table_builder = gvdb::write::HashTableBuilder::new(); /// let value = 123u32; /// table_builder.insert("variant_123", value); /// ``` pub fn insert(&mut self, key: &(impl ToString + ?Sized), value: T) -> Result<()> where T: Into>, { let item = HashValue::Value(value.into()); self.insert_item_value(key, item) } /// Insert GVariant `item` for `key` /// /// ``` /// # #[cfg(feature = "glib")] /// # use glib::prelude::*; /// # /// let mut table_builder = gvdb::write::HashTableBuilder::new(); /// let variant = 123u32.to_variant(); /// table_builder.insert_gvariant("variant_123", variant); /// ``` #[cfg(feature = "glib")] pub fn insert_gvariant( &mut self, key: &(impl ToString + ?Sized), variant: glib::Variant, ) -> Result<()> { let item = HashValue::GVariant(variant); self.insert_item_value(key, item) } /// Convenience method to create a string type GVariant for `value` and insert it at `key` /// /// ``` /// # let mut table_builder = gvdb::write::HashTableBuilder::new(); /// table_builder.insert_string("string_key", "string_data"); /// ``` pub fn insert_string( &mut self, key: &(impl ToString + ?Sized), string: &(impl ToString + ?Sized), ) -> Result<()> { let variant = zvariant::Value::new(string.to_string()); self.insert_value(key, variant) } /// Convenience method to create a byte type GVariant for `value` and insert it at `key` /// /// ``` /// # let mut table_builder = gvdb::write::HashTableBuilder::new(); /// table_builder.insert_bytes("bytes", &[1, 2, 3, 4, 5]); /// ``` pub fn insert_bytes(&mut self, key: &(impl ToString + ?Sized), bytes: &'a [u8]) -> Result<()> { let value = zvariant::Value::new(bytes); self.insert_value(key, value) } /// Insert an entire hash table at `key`. /// /// ``` /// # use zvariant::Value; /// # use gvdb::write::HashTableBuilder; /// let mut table_builder = HashTableBuilder::new(); /// let mut table_builder_2 = HashTableBuilder::new(); /// table_builder_2 /// .insert_value("int", Value::new(42u32)) /// .unwrap(); /// /// table_builder /// .insert_table("table", table_builder_2) /// .unwrap(); /// ``` pub fn insert_table( &mut self, key: &(impl ToString + ?Sized), table_builder: HashTableBuilder<'a>, ) -> Result<()> { let item = HashValue::TableBuilder(table_builder); self.insert_item_value(key, item) } /// The number of items contained in the hash table builder pub fn len(&self) -> usize { self.items.len() } /// Whether the hash table builder contains no items pub fn is_empty(&self) -> bool { self.items.is_empty() } pub(crate) fn build(mut self) -> Result> { let mut hash_table = SimpleHashTable::with_n_buckets(self.items.len()); let mut keys: Vec = self.items.keys().cloned().collect(); keys.sort(); for key in keys { let value = self.items.remove(&key).unwrap(); hash_table.insert(&key, value); } for (key, item) in hash_table.iter() { if let HashValue::Container(container) = &*item.value_ref() { for child in container { let child_item = hash_table.get(child); if let Some(child_item) = child_item { child_item.parent().replace(Some(item.clone())); } else { return Err(Error::Consistency(format!("Tried to set parent for child '{}' to '{}' but the child was not found.", child, key))); } } } } Ok(hash_table) } } impl<'a> Default for HashTableBuilder<'a> { fn default() -> Self { Self::new() } } #[derive(Debug)] struct Chunk { // The pointer that points to the data where the chunk will be in memory in the finished file pointer: Pointer, // We use a boxed slice because this guarantees that the size is not changed afterwards data: Box<[u8]>, } impl Chunk { pub fn new(data: Box<[u8]>, pointer: Pointer) -> Self { Self { pointer, data } } pub fn data_mut(&mut self) -> &mut [u8] { &mut self.data } pub fn into_data(self) -> Box<[u8]> { self.data } pub fn pointer(&self) -> Pointer { self.pointer } } /// Create GVDB files /// /// # Example /// ``` /// use glib::prelude::*; /// use gvdb::write::{FileWriter, HashTableBuilder}; /// /// fn create_gvdb_file() { /// let mut file_writer = FileWriter::new(); /// let mut table_builder = HashTableBuilder::new(); /// table_builder /// .insert_string("string", "test string") /// .unwrap(); /// let file_data = file_writer.write_to_vec_with_table(table_builder).unwrap(); /// } /// ``` pub struct FileWriter { offset: usize, chunks: VecDeque, byteswap: bool, } impl FileWriter { /// Create a new instance configured for writing little endian data (preferred endianness) /// ``` /// let file_writer = gvdb::write::FileWriter::new(); /// ``` pub fn new() -> Self { #[cfg(target_endian = "little")] let byteswap = false; #[cfg(target_endian = "big")] let byteswap = true; Self::with_byteswap(byteswap) } /// Create a new instance configured for writing big endian data /// (not recommended for most use cases) /// ``` /// let file_writer = gvdb::write::FileWriter::new(); /// ``` pub fn for_big_endian() -> Self { #[cfg(target_endian = "little")] let byteswap = true; #[cfg(target_endian = "big")] let byteswap = false; Self::with_byteswap(byteswap) } /// Specify manually whether you want to swap the endianness of the file. The default is to /// always create a little-endian file fn with_byteswap(byteswap: bool) -> Self { let mut this = Self { offset: 0, chunks: Default::default(), byteswap, }; this.allocate_empty_chunk(size_of::
(), 1); this } /// Allocate a chunk fn allocate_chunk_with_data( &mut self, data: Box<[u8]>, alignment: usize, ) -> (usize, &mut Chunk) { // Align the data self.offset = align_offset(self.offset, alignment); // Calculate the pointer let offset_start = self.offset; let offset_end = offset_start + data.len(); let pointer = Pointer::new(offset_start, offset_end); // Update the offset to the end of the chunk self.offset = offset_end; let chunk = Chunk::new(data, pointer); self.chunks.push_back(chunk); let index = self.chunks.len() - 1; (index, &mut self.chunks[index]) } fn allocate_empty_chunk(&mut self, size: usize, alignment: usize) -> (usize, &mut Chunk) { let data = vec![0; size].into_boxed_slice(); self.allocate_chunk_with_data(data, alignment) } fn add_value(&mut self, value: &zvariant::Value) -> Result<(usize, &mut Chunk)> { #[cfg(target_endian = "little")] let le = true; #[cfg(target_endian = "big")] let le = false; let data: Box<[u8]> = if le && !self.byteswap || !le && self.byteswap { let context = zvariant::serialized::Context::new_gvariant(zvariant::LE, 0); Box::from(&*zvariant::to_bytes(context, value)?) } else { let context = zvariant::serialized::Context::new_gvariant(zvariant::BE, 0); Box::from(&*zvariant::to_bytes(context, value)?) }; Ok(self.allocate_chunk_with_data(data, 8)) } #[cfg(feature = "glib")] fn add_gvariant(&mut self, variant: &glib::Variant) -> (usize, &mut Chunk) { let value = if self.byteswap { glib::Variant::from_variant(&variant.byteswap()) } else { glib::Variant::from_variant(variant) }; let normal = value.normal_form(); let data = normal.data(); self.allocate_chunk_with_data(data.to_vec().into_boxed_slice(), 8) } fn add_string(&mut self, string: &str) -> (usize, &mut Chunk) { let data = string.to_string().into_boxed_str().into_boxed_bytes(); self.allocate_chunk_with_data(data, 1) } fn add_simple_hash_table(&mut self, table: SimpleHashTable) -> Result<(usize, &mut Chunk)> { for (index, (_bucket, item)) in table.iter().enumerate() { item.set_assigned_index(index as u32); } let header = HashHeader::new(5, 0, table.n_buckets() as u32); let items_len = table.n_items() * size_of::(); let size = size_of::() + header.bloom_words_len() + header.buckets_len() + items_len; let hash_buckets_offset = size_of::() + header.bloom_words_len(); let hash_items_offset = hash_buckets_offset + header.buckets_len(); let (hash_table_chunk_index, hash_table_chunk) = self.allocate_empty_chunk(size, 4); let header = header.as_bytes(); hash_table_chunk.data_mut()[0..header.len()].copy_from_slice(header); let mut n_item = 0; for bucket in 0..table.n_buckets() { let hash_bucket_start = hash_buckets_offset + bucket * size_of::(); let hash_bucket_end = hash_bucket_start + size_of::(); self.chunks[hash_table_chunk_index].data[hash_bucket_start..hash_bucket_end] .copy_from_slice(u32::to_le_bytes(n_item as u32).as_slice()); for current_item in table.iter_bucket(bucket) { let parent = current_item .parent_ref() .as_ref() .map(|p| p.assigned_index()); let key = if let Some(parent) = &*current_item.parent_ref() { current_item.key().strip_prefix(parent.key()).unwrap_or("") } else { current_item.key() }; if key.is_empty() { return Err(Error::Consistency(format!( "Item '{}' already exists in hash map or key is empty", current_item.key() ))); } let key_ptr = self.add_string(key).1.pointer(); let typ = current_item.value_ref().typ(); let value_ptr = match current_item.value().take() { HashValue::Value(value) => self.add_value(&value)?.1.pointer(), #[cfg(feature = "glib")] HashValue::GVariant(variant) => self.add_gvariant(&variant).1.pointer(), HashValue::TableBuilder(tb) => self.add_table_builder(tb)?.1.pointer(), HashValue::Container(children) => { let size = children.len() * size_of::(); let chunk = self.allocate_empty_chunk(size, 4).1; let mut offset = 0; for child in children { let child_item = table.get(&child); if let Some(child_item) = child_item { child_item.parent().replace(Some(current_item.clone())); chunk.data_mut()[offset..offset + size_of::()] .copy_from_slice(&u32::to_le_bytes( child_item.assigned_index(), )); offset += size_of::(); } else { return Err(Error::Consistency(format!( "Child item '{}' not found for parent: '{}'", child, key ))); } } chunk.pointer() } }; let hash_item = HashItem::new(current_item.hash(), parent, key_ptr, typ, value_ptr); let hash_item_start = hash_items_offset + n_item * size_of::(); let hash_item_end = hash_item_start + size_of::(); self.chunks[hash_table_chunk_index].data[hash_item_start..hash_item_end] .copy_from_slice(hash_item.as_bytes()); n_item += 1; } } Ok(( hash_table_chunk_index, &mut self.chunks[hash_table_chunk_index], )) } fn add_table_builder( &mut self, table_builder: HashTableBuilder, ) -> Result<(usize, &mut Chunk)> { self.add_simple_hash_table(table_builder.build()?) } fn file_size(&self) -> usize { self.chunks[self.chunks.len() - 1].pointer().end() as usize } fn serialize(mut self, root_chunk_index: usize, writer: &mut dyn Write) -> Result { let root_ptr = self .chunks .get(root_chunk_index) .ok_or_else(|| { Error::Consistency(format!("Root chunk with id {} not found", root_chunk_index)) })? .pointer(); let header = Header::new(self.byteswap, 0, root_ptr); self.chunks[0].data_mut()[0..size_of::
()].copy_from_slice(header.as_bytes()); let mut size = 0; for chunk in self.chunks.into_iter() { // Align if size < chunk.pointer().start() as usize { let padding = chunk.pointer().start() as usize - size; size += padding; writer.write_all(&vec![0; padding])?; } size += chunk.pointer().size(); writer.write_all(&chunk.into_data())?; } Ok(size) } fn serialize_to_vec(self, root_chunk_index: usize) -> Result> { let mut vec = Vec::with_capacity(self.file_size()); self.serialize(root_chunk_index, &mut vec)?; Ok(vec) } /// Write the GVDB file into the provided [`std::io::Write`] pub fn write_with_table( mut self, table_builder: HashTableBuilder, writer: &mut dyn Write, ) -> Result { let index = self.add_table_builder(table_builder)?.0; self.serialize(index, writer) } /// Create a [`Vec`] with the GVDB file data pub fn write_to_vec_with_table(mut self, table_builder: HashTableBuilder) -> Result> { let index = self.add_table_builder(table_builder)?.0; self.serialize_to_vec(index) } } impl Default for FileWriter { fn default() -> Self { Self::new() } } #[cfg(test)] mod test { use super::*; use crate::{ read::{File, HashItemType}, test::byte_compare_file_4, }; use matches::assert_matches; use std::borrow::Cow; use std::io::Cursor; use crate::test::{ assert_bytes_eq, assert_is_file_1, assert_is_file_2, byte_compare_file_1, byte_compare_file_2, }; #[allow(unused_imports)] use pretty_assertions::{assert_eq, assert_ne, assert_str_eq}; #[test] fn derives() { let ht_builder = HashTableBuilder::default(); println!("{:?}", ht_builder); let chunk = Chunk::new(Box::new([0; 0]), Pointer::NULL); assert!(format!("{:?}", chunk).contains("Chunk")); } #[test] fn hash_table_builder1() { let mut builder = HashTableBuilder::new(); assert!(builder.is_empty()); builder.insert_string("string", "Test").unwrap(); builder .insert_value("123", zvariant::Value::new(123u32)) .unwrap(); assert!(!builder.is_empty()); assert_eq!(builder.len(), 2); let mut builder2 = HashTableBuilder::new(); builder2.insert_bytes("bytes", &[1, 2, 3, 4]).unwrap(); builder.insert_table("table", builder2).unwrap(); let table = builder.build().unwrap(); assert_eq!( table.get("string").unwrap().value_ref().value().unwrap(), &zvariant::Value::new("Test") ); assert_eq!( table.get("123").unwrap().value_ref().value().unwrap(), &zvariant::Value::new(123u32) ); let item = table.get("table").unwrap(); assert_matches!(item.value_ref().table_builder(), Some(_)); let val = item.value().take(); assert_matches!(val, HashValue::TableBuilder(..)); let HashValue::TableBuilder(tb) = val else { panic!("Invalid value"); }; let table2 = tb.build().unwrap(); let data: &[u8] = &[1, 2, 3, 4]; assert_eq!( table2.get("bytes").unwrap().value_ref().value().unwrap(), &zvariant::Value::new(data) ); } #[test] fn hash_table_builder2() { let mut builder = HashTableBuilder::new(); // invalid path builder.insert_string("string/", "collision").unwrap(); let err = builder.insert_string("string/test", "test").unwrap_err(); assert_matches!(err, Error::Consistency(_)); let mut builder = HashTableBuilder::with_path_separator(None); // invalid path but this isn't important as path handling is turned off builder.insert_string("string/", "collision").unwrap(); builder.insert_string("string/test", "test").unwrap(); } #[test] fn file_builder_file_1() { let mut file_builder = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); let value1 = 1234u32; let value2 = 98765u32; let value3 = "TEST_STRING_VALUE"; let tuple_data = (value1, value2, value3); let variant = zvariant::Value::new(tuple_data); table_builder.insert_value("root_key", variant).unwrap(); let root_index = file_builder.add_table_builder(table_builder).unwrap().0; let bytes = file_builder.serialize_to_vec(root_index).unwrap(); let root = File::from_bytes(Cow::Owned(bytes)).unwrap(); println!("{:?}", root); assert_is_file_1(&root); byte_compare_file_1(&root); } #[test] fn file_builder_file_2() { let mut file_builder = FileWriter::for_big_endian(); let mut table_builder = HashTableBuilder::new(); table_builder .insert_string("string", "test string") .unwrap(); let mut table_builder_2 = HashTableBuilder::new(); table_builder_2.insert("int", 42u32).unwrap(); table_builder .insert_table("table", table_builder_2) .unwrap(); let root_index = file_builder.add_table_builder(table_builder).unwrap().0; let bytes = file_builder.serialize_to_vec(root_index).unwrap(); let root = File::from_bytes(Cow::Owned(bytes)).unwrap(); println!("{:?}", root); assert_is_file_2(&root); byte_compare_file_2(&root); } #[test] fn file_builder_file_4() { let mut writer = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); let mut dict = HashMap::<&str, zvariant::Value>::new(); dict.insert("key1", "value1".into()); dict.insert("key2", 2u32.into()); let value = ("arg0", dict); table_builder.insert("struct", value).unwrap(); let root_index = writer.add_table_builder(table_builder).unwrap().0; let bytes = writer.serialize_to_vec(root_index).unwrap(); let root = File::from_bytes(Cow::Owned(bytes)).unwrap(); println!("{:?}", root); byte_compare_file_4(&root); } #[test] fn reproducible_build() { let mut last_data: Option> = None; for _ in 0..100 { let file_builder = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); for num in 0..200 { let str = format!("{}", num); table_builder.insert_string(&str, &str).unwrap(); } let data = file_builder.write_to_vec_with_table(table_builder).unwrap(); if let Some(last_data) = last_data { assert_bytes_eq(&last_data, &data, "Reproducible builds"); } last_data = Some(data); } } #[test] fn big_endian() { let mut file_builder = FileWriter::for_big_endian(); let mut table_builder = HashTableBuilder::new(); let value1 = 1234u32; let value2 = 98765u32; let value3 = "TEST_STRING_VALUE"; let tuple_data = (value1, value2, value3); let variant = zvariant::Value::new(tuple_data); table_builder.insert_value("root_key", variant).unwrap(); let root_index = file_builder.add_table_builder(table_builder).unwrap().0; let bytes = file_builder.serialize_to_vec(root_index).unwrap(); // "GVariant" byteswapped at 32 bit boundaries is the header for big-endian GVariant files assert_eq!("raVGtnai", std::str::from_utf8(&bytes[0..8]).unwrap()); let root = File::from_bytes(Cow::Owned(bytes)).unwrap(); println!("{:?}", root); assert_is_file_1(&root); } #[test] fn container() { let mut file_builder = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); table_builder .insert_string("contained/string", "str") .unwrap(); let root_index = file_builder.add_table_builder(table_builder).unwrap().0; let bytes = file_builder.serialize_to_vec(root_index).unwrap(); let root = File::from_bytes(Cow::Owned(bytes)).unwrap(); let container_item = root .hash_table() .unwrap() .get_hash_item("contained/") .unwrap(); assert_eq!(container_item.typ().unwrap(), HashItemType::Container); println!("{:?}", root); } #[test] fn missing_root() { let file = FileWriter::new(); assert_matches!(file.serialize_to_vec(1), Err(Error::Consistency(_))); } #[test] fn missing_child() { let mut table = HashTableBuilder::new(); let item = HashValue::Container(vec!["missing".to_string()]); table.insert_item_value("test", item).unwrap(); assert_matches!(table.build(), Err(Error::Consistency(_))); } #[test] fn empty_key() { let mut table = HashTableBuilder::new(); table.insert_string("", "test").unwrap(); let file = FileWriter::new(); let err = file.write_to_vec_with_table(table).unwrap_err(); assert_matches!(err, Error::Consistency(_)) } #[test] fn remove_child() { let mut table_builder = HashTableBuilder::new(); table_builder.insert_string("test/test", "test").unwrap(); table_builder.items.remove("test/test"); let file = FileWriter::new(); let err = file.write_to_vec_with_table(table_builder).unwrap_err(); assert_matches!(err, Error::Consistency(_)) } #[test] fn remove_child2() { let mut table_builder = HashTableBuilder::new(); table_builder.insert_string("test/test", "test").unwrap(); let mut table = table_builder.build().unwrap(); table.remove("test/test"); let mut file = FileWriter::new(); let err = file.add_simple_hash_table(table).unwrap_err(); assert_matches!(err, Error::Consistency(_)) } #[test] fn io_error() { let file = FileWriter::default(); // This buffer is intentionally too small to result in I/O error let buffer = [0u8; 10]; let mut cursor = Cursor::new(buffer); let mut table = HashTableBuilder::new(); table.insert("test", "test").unwrap(); let err = file.write_with_table(table, &mut cursor).unwrap_err(); assert_matches!(err, Error::Io(_, _)); assert!(format!("{}", err).contains("I/O error")); assert!(format!("{:?}", err).contains("I/O error")); } } #[cfg(all(feature = "glib", test))] mod test_glib { use crate::read::File; use crate::test::{assert_gvariant_eq, byte_compare_file_4}; use crate::write::hash::SimpleHashTable; use crate::write::item::HashValue; use crate::write::{FileWriter, HashTableBuilder}; use glib::prelude::*; use std::borrow::Cow; #[test] fn simple_hash_table() { let mut table: SimpleHashTable = SimpleHashTable::with_n_buckets(10); let item = HashValue::GVariant("test".to_variant()); table.insert("test", item); assert_eq!(table.n_items(), 1); assert_eq!( table.get("test").unwrap().value_ref().gvariant().unwrap(), &"test".to_variant() ); } #[test] fn hash_table_builder() { let mut table = HashTableBuilder::new(); table.insert_gvariant("test", "test".to_variant()).unwrap(); let simple_ht = table.build().unwrap(); assert_eq!( simple_ht .get("test") .unwrap() .value_ref() .gvariant() .unwrap(), &"test".to_variant() ); } #[test] fn file_writer() { for byteswap in [true, false] { let mut table = HashTableBuilder::default(); table.insert_gvariant("test", "test".to_variant()).unwrap(); let writer = FileWriter::with_byteswap(byteswap); let _ = writer.write_to_vec_with_table(table).unwrap(); } } #[test] fn file_builder_file_4_glib() { let mut writer = FileWriter::new(); let mut table_builder = HashTableBuilder::new(); let map = glib::VariantDict::new(None); map.insert("key1", "value1"); map.insert("key2", 2u32); let value = ("arg0", map).to_variant(); table_builder.insert_gvariant("struct", value).unwrap(); let root_index = writer.add_table_builder(table_builder).unwrap().0; let bytes = writer.serialize_to_vec(root_index).unwrap(); let root = File::from_bytes(Cow::Owned(bytes)).unwrap(); println!("{:?}", root); byte_compare_file_4(&root); } #[test] /// Regression test for https://github.com/dbus2/zbus/issues/868 fn gvariant_vs_zvariant() { let mut map_glib = std::collections::HashMap::<&str, &str>::new(); map_glib.insert("k", "v"); let variant_glib = glib::Variant::from_variant(&map_glib.to_variant()).normal_form(); let data_glib = variant_glib.data(); let mut map_zvariant = std::collections::HashMap::<&str, &str>::new(); map_zvariant.insert("k", "v"); let ctxt = zvariant::serialized::Context::new_gvariant(zvariant::LE, 0); let data_zvariant = zvariant::to_bytes(ctxt, &zvariant::Value::new(map_zvariant)).unwrap(); assert_gvariant_eq(data_glib, &data_zvariant, "gvariant vs zvariant"); } } �����������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/src/write/hash.rs������������������������������������������������������������������������0000644�0000000�0000000�00000023406�10461020230�0013772�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������use crate::util::djb_hash; use crate::write::item::{HashItemBuilder, HashValue}; use std::rc::Rc; /// A hash table with a fixed number of buckets. /// /// This is used as an intermediate representation before serializing /// hashtable data in a HVDB file. #[derive(Debug)] pub struct SimpleHashTable<'a> { buckets: Vec>>>, n_items: usize, } impl<'a> SimpleHashTable<'a> { /// Create a hash table with a number of buckets. pub fn with_n_buckets(n_buckets: usize) -> Self { let mut buckets = Vec::with_capacity(n_buckets); buckets.resize_with(n_buckets, || None); Self { buckets, n_items: 0, } } /// The number of buckets of the hash table. This number is fixed and does not change. pub fn n_buckets(&self) -> usize { self.buckets.len() } /// How many items are contained in the hash table. pub fn n_items(&self) -> usize { self.n_items } /// Retrieve the hash bucket for the provided [`u32`] hash value fn hash_bucket(&self, hash_value: u32) -> usize { (hash_value % self.buckets.len() as u32) as usize } /// Insert a new item into the hash table. /// /// Returns the created hash item. pub fn insert(&mut self, key: &str, item: HashValue<'a>) -> Rc> { let hash_value = djb_hash(key); let bucket = self.hash_bucket(hash_value); let item = Rc::new(HashItemBuilder::new(key, hash_value, item)); let replaced_item = std::mem::replace(&mut self.buckets[bucket], Some(item.clone())); if let Some(replaced_item) = replaced_item { if replaced_item.key() == key { // Replace self.buckets[bucket] .as_ref() .unwrap() .next() .replace(replaced_item.next().take()); } else { // Insert self.buckets[bucket] .as_ref() .unwrap() .next() .replace(Some(replaced_item)); self.n_items += 1; } } else { // Insert to empty bucket self.n_items += 1; } item } #[allow(dead_code)] /// Remove the item with the specified key pub fn remove(&mut self, key: &str) -> bool { let hash_value = djb_hash(key); let bucket = self.hash_bucket(hash_value); // Remove the item if it already exists if let Some((previous, item)) = self.get_from_bucket(key, bucket) { if let Some(previous) = previous { previous.next().replace(item.next().take()); } else { self.buckets[bucket] = item.next().take(); } self.n_items -= 1; true } else { false } } /// Retrieve an item with the specified key from the specified bucket. fn get_from_bucket( &self, key: &str, bucket: usize, ) -> Option<(Option>>, Rc>)> { let mut item = self.buckets.get(bucket)?.clone(); let mut previous = None; while let Some(current_item) = item { if current_item.key() == key { return Some((previous, current_item)); } else { previous = Some(current_item.clone()); item = current_item.next().borrow().clone(); } } None } /// Returns an item corresponding to the key. pub fn get(&self, key: &str) -> Option>> { let hash_value = djb_hash(key); let bucket = self.hash_bucket(hash_value); self.get_from_bucket(key, bucket).map(|r| r.1) } /// Iterator over the hash table items. pub fn iter(&self) -> SimpleHashTableIter<'_, 'a> { SimpleHashTableIter { hash_table: self, bucket: 0, last_item: None, } } /// Iterator over the items in the specified bucket. pub fn iter_bucket(&self, bucket: usize) -> SimpleHashTableBucketIter<'_, 'a> { SimpleHashTableBucketIter { hash_table: self, bucket, last_item: None, } } } /// Iterator over the items in a specific bucket of a [`SimpleHashTable`]. pub struct SimpleHashTableBucketIter<'it, 'h> { hash_table: &'it SimpleHashTable<'h>, bucket: usize, last_item: Option>>, } impl<'it, 'h> Iterator for SimpleHashTableBucketIter<'it, 'h> { type Item = Rc>; fn next(&mut self) -> Option { if let Some(last_item) = self.last_item.clone() { // First check if there are more items in this bucket if let Some(next_item) = &*last_item.next().borrow() { // Next item in the same bucket self.last_item = Some(next_item.clone()); Some(next_item.clone()) } else { // Last item in the bucket, return None } } else if let Some(Some(item)) = self.hash_table.buckets.get(self.bucket).cloned() { // We found something: Bucket exists and is not empty self.last_item = Some(item.clone()); Some(item.clone()) } else { None } } } /// Iterator over the items of a [`SimpleHashTable`]. pub struct SimpleHashTableIter<'it, 'h> { hash_table: &'it SimpleHashTable<'h>, bucket: usize, last_item: Option>>, } impl<'it, 'h> Iterator for SimpleHashTableIter<'it, 'h> { type Item = (usize, Rc>); fn next(&mut self) -> Option { if let Some(last_item) = self.last_item.clone() { // First check if there are more items in this bucket if let Some(next_item) = &*last_item.next().borrow() { // Next item in the same bucket self.last_item = Some(next_item.clone()); return Some((self.bucket, next_item.clone())); } else { // Last item in the bucket, check the next bucket self.bucket += 1; } } while let Some(bucket_item) = self.hash_table.buckets.get(self.bucket) { self.last_item = None; // This bucket might be empty if let Some(item) = bucket_item { // We found something self.last_item = Some(item.clone()); return Some((self.bucket, item.clone())); } else { // Empty bucket, continue with next bucket self.bucket += 1; } } // Nothing left None } } #[cfg(test)] mod test { use std::collections::HashSet; use matches::assert_matches; use crate::write::hash::SimpleHashTable; use crate::write::item::HashValue; #[test] fn derives() { let table = SimpleHashTable::with_n_buckets(1); assert!(format!("{:?}", table).contains("SimpleHashTable")); } #[test] fn simple_hash_table() { let mut table: SimpleHashTable = SimpleHashTable::with_n_buckets(10); let item = HashValue::Value(zvariant::Value::new("test_overwrite")); table.insert("test", item); assert_eq!(table.n_items(), 1); let item2 = HashValue::Value(zvariant::Value::new("test")); table.insert("test", item2); assert_eq!(table.n_items(), 1); assert_eq!( table.get("test").unwrap().value_ref().value().unwrap(), &"test".into() ); } #[test] fn simple_hash_table_2() { let mut table: SimpleHashTable = SimpleHashTable::with_n_buckets(10); for index in 0..20 { table.insert(&format!("{}", index), zvariant::Value::new(index).into()); } assert_eq!(table.n_items(), 20); for index in 0..20 { assert_eq!( zvariant::Value::new(index), *table .get(&format!("{}", index)) .unwrap() .value_ref() .value() .unwrap() ); } for index in 0..10 { let index = index * 2; assert!(table.remove(&format!("{}", index))); } for index in 0..20 { let item = table.get(&format!("{}", index)); assert_eq!(index % 2 == 1, item.is_some()); } assert!(!table.remove("50")); } #[test] fn simple_hash_table_iter() { let mut table: SimpleHashTable = SimpleHashTable::with_n_buckets(10); for index in 0..20 { table.insert(&format!("{}", index), zvariant::Value::new(index).into()); } let mut iter = table.iter(); for _ in 0..20 { let value: i32 = iter .next() .unwrap() .1 .value() .borrow() .value() .unwrap() .try_into() .unwrap(); assert_matches!(value, 0..=19); } } #[test] fn simple_hash_table_bucket_iter() { let mut table: SimpleHashTable = SimpleHashTable::with_n_buckets(10); for index in 0..20 { table.insert(&format!("{}", index), zvariant::Value::new(index).into()); } let mut values: HashSet = (0..20).collect(); for bucket in 0..table.n_buckets() { let iter = table.iter_bucket(bucket); for next in iter { let num: i32 = next.value().borrow().value().unwrap().try_into().unwrap(); assert!(values.remove(&num)); } } } } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/src/write/item.rs������������������������������������������������������������������������0000644�0000000�0000000�00000013552�10461020230�0014006�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������use crate::read::HashItemType; use crate::write::file::HashTableBuilder; use std::cell::{Cell, Ref, RefCell}; use std::rc::Rc; /// Holds the value of a GVDB hash table #[derive(Debug)] pub enum HashValue<'a> { /// A zvariant::Value Value(zvariant::Value<'a>), /// A glib::Variant #[cfg(feature = "glib")] GVariant(glib::Variant), TableBuilder(HashTableBuilder<'a>), /// A child container with no additional value Container(Vec), } impl<'a> Default for HashValue<'a> { fn default() -> Self { Self::Container(Vec::new()) } } #[allow(dead_code)] impl<'a> HashValue<'a> { pub fn typ(&self) -> HashItemType { match self { HashValue::Value(_) => HashItemType::Value, #[cfg(feature = "glib")] HashValue::GVariant(_) => HashItemType::Value, HashValue::TableBuilder(_) => HashItemType::HashTable, HashValue::Container(_) => HashItemType::Container, } } pub fn value(&self) -> Option<&zvariant::Value> { match self { HashValue::Value(value) => Some(value), _ => None, } } #[cfg(feature = "glib")] pub fn gvariant(&self) -> Option<&glib::Variant> { match self { HashValue::GVariant(variant) => Some(variant), _ => None, } } #[allow(dead_code)] pub fn table_builder(&self) -> Option<&HashTableBuilder> { match self { HashValue::TableBuilder(tb) => Some(tb), _ => None, } } pub fn container(&self) -> Option<&Vec> { match self { HashValue::Container(children) => Some(children), _ => None, } } } impl<'a> From> for HashValue<'a> { fn from(var: zvariant::Value<'a>) -> Self { HashValue::Value(var) } } #[cfg(feature = "glib")] impl<'a> From for HashValue<'a> { fn from(var: glib::Variant) -> Self { HashValue::GVariant(var) } } impl<'a> From> for HashValue<'a> { fn from(tb: HashTableBuilder<'a>) -> Self { HashValue::TableBuilder(tb) } } #[derive(Debug)] pub struct HashItemBuilder<'a> { /// The key string of the item key: String, /// The djb hash hash: u32, /// An arbitrary data container value: RefCell>, /// The assigned index for the gvdb file assigned_index: Cell, /// The parent item of this builder item parent: RefCell>>>, /// The next item in the hash bucket next: RefCell>>>, } impl<'a> HashItemBuilder<'a> { pub fn new(key: &str, hash: u32, value: HashValue<'a>) -> Self { let key = key.to_string(); Self { key, hash, value: RefCell::new(value), assigned_index: Cell::new(u32::MAX), parent: Default::default(), next: Default::default(), } } pub fn key(&self) -> &str { &self.key } pub fn hash(&self) -> u32 { self.hash } pub fn next(&self) -> &RefCell>>> { &self.next } pub fn value(&self) -> &RefCell> { &self.value } pub fn value_ref(&self) -> Ref> { self.value.borrow() } pub fn parent(&self) -> &RefCell>>> { &self.parent } pub fn parent_ref(&self) -> Ref>>> { self.parent.borrow() } pub fn assigned_index(&self) -> u32 { self.assigned_index.get() } pub fn set_assigned_index(&self, index: u32) { self.assigned_index.set(index); } } #[cfg(test)] mod test { use crate::read::HashItemType; use crate::write::item::{HashItemBuilder, HashValue}; use crate::write::HashTableBuilder; use matches::assert_matches; #[test] fn derives() { let value1: zvariant::Value = "test".into(); let item1 = HashValue::Value(value1); println!("{:?}", item1); } #[test] fn item_value() { let value1: zvariant::Value = "test".into(); let item1 = HashValue::Value( value1 .try_clone() .expect("Value to not contain a file descriptor"), ); assert_eq!(item1.typ(), HashItemType::Value); assert_eq!(item1.value().unwrap(), &value1); #[cfg(feature = "glib")] assert_matches!(item1.gvariant(), None); let value2 = HashTableBuilder::new(); let item2 = HashValue::from(value2); assert_eq!(item2.typ(), HashItemType::HashTable); assert!(item2.table_builder().is_some()); assert_matches!(item2.container(), None); let value3 = vec!["test".to_string(), "test2".to_string()]; let item3 = HashValue::Container(value3.clone()); assert_eq!(item3.typ(), HashItemType::Container); assert_eq!(item3.container().unwrap(), &value3); assert_matches!(item3.table_builder(), None); } #[test] fn builder_item() { let value1: zvariant::Value = "test".into(); let item1 = HashValue::Value(value1); let item = HashItemBuilder::new("test", 0, item1); println!("{:?}", item); assert_eq!(item.key(), "test"); assert_matches!(&*item.value().borrow(), HashValue::Value(_)); } } #[cfg(all(feature = "glib", test))] mod test_glib { use crate::read::HashItemType; use crate::write::item::HashValue; use glib::prelude::*; use matches::assert_matches; #[test] fn item_value() { let value1 = "test".to_variant(); let item1 = HashValue::from(value1.clone()); assert_eq!(item1.typ(), HashItemType::Value); assert_eq!(item1.gvariant().unwrap(), &value1); assert_matches!(item1.value(), None); } } ������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/src/write.rs�����������������������������������������������������������������������������0000644�0000000�0000000�00000001707�10461020230�0013047�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������mod error; mod file; mod hash; mod item; pub use error::{Error, Result}; pub use file::{FileWriter, HashTableBuilder}; /// Deprecated type aliases mod deprecated { use super::*; /// Type has been renamed. Use [`FileWriter`] instead. #[deprecated = "Type has been renamed. Use gvdb::write::FileWriter instead."] pub type GvdbFileWriter = FileWriter; /// Type has been renamed. Use [`HashTableBuilder`] instead. #[deprecated = "Type has been renamed. Use gvdb::write::HashTableBuilder instead."] pub type GvdbHashTableBuilder<'a> = HashTableBuilder<'a>; /// Type has been renamed. Use [`Error`] instead. #[deprecated = "Type has been renamed. Use gvdb::write::Error instead."] pub type GvdbWriterError = Error; /// Type has been renamed. Use [`Result`] instead. #[deprecated = "Type has been renamed. Use gvdb::write::Result instead."] pub type GvdbBuilderResult = Result; } pub use deprecated::*; ���������������������������������������������������������gvdb-0.7.1/test-data/gresource/icons/scalable/actions/online-symbolic.svg���������������������������0000644�0000000�0000000�00000002556�10461020230�0024614�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������ ��������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/gresource/icons/scalable/actions/send-symbolic.svg�����������������������������0000644�0000000�0000000�00000000531�10461020230�0024250�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������ �����������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/gresource/json/test.json�������������������������������������������������������0000644�0000000�0000000�00000000063�10461020230�0017261�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������[ "test_string", 42, { "bool": true } ]�����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/gresource/test.css�������������������������������������������������������������0000644�0000000�0000000�00000000047�10461020230�0016131�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������.test { background-color: black; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/gresource/test3.gresource.xml��������������������������������������������������0000644�0000000�0000000�00000001002�10461020230�0020211�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������ icons/scalable/actions/online-symbolic.svg icons/scalable/actions/send-symbolic.svg json/test.json test.css ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/test1.gvdb���������������������������������������������������������������������0000644�0000000�0000000�00000000150�10461020230�0014341�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������GVariant�����������<������(�������ÑÚÿÿÿÿ<����v�H���h���root_key����Ò��Í�TEST_STRING_VALUE��(uus)������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/test2.gvdb���������������������������������������������������������������������0000644�0000000�0000000�00000000246�10461020230�0014350�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������raVGtnai�����������X������(����������ü¯“ÿÿÿÿX����v�`���n���úhÿÿÿÿn����H�t���˜���string��test string��stable����(�������0€ˆ ÿÿÿÿ˜����v� ���¦���int��������*�u����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/test3.gresource����������������������������������������������������������������0000644�0000000�0000000�00000004416�10461020230�0015430�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������GVariant�����������p�����( ������������������������������� ��� ��� ���`ØÊÔ���p���v�x��³��‘)ó7���³���v�È��Õ��©ˆ~³���Õ���L�Ü��ì��ÒuÊa���ì���L�ô��ø��:†áÎ���ø�� �v���9��ZëÏ ���9���L�<��@��ºý"í���@���L�H��L��úD  ���L�� �L�X��\��Ôµ�ÿÿÿÿ\���L�`��d��Tˆ¢—���d���L�l��p��&Á }���p���L�x��|��ŠUè���|���v��� ��test.css'������xÚÓ+I-.Q¨æR�‚¤Ääìô¢üÒ¼Ýäüœü"+…¤ 5W-�û½ £�(uuay)send-symbolic.svg����Y�����xÚ-Ûj…0EßýŠ!}vÌýr0èC¿ ý€Òc5à£é×w´…@BØk {êû1°·Ï5ÍSd9ƒvúšiê"ûx+=»7EQ¯{ÖÈúm[nU•sƬp~v•äœW”`з©ë7Ùå`°§6¿ÎGd8K‡AN­ÿ4@½|n=<"AôA%@Â�%†n¥sJkØÁ ’Z Oÿ½ôB(%jg½ ŽÖraìùræÒ °.Ø�TN„Ó¾C©«À/Dp$–4•·N„ ¨NµCë/‹Æ¿Y Åþ‘*Ѳ¾Ó0Dö"[¥µ«š¢>7ÑüÊT_�(uuay)test/�� ������ �������json/������test.json�������!�������["test_string",42,{"bool":true}] ��(uuay)rs/���actions/���scalable/������/��� ���icons/�����gvdb/������online-symbolic.svg�n������ ��(uuay)��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������gvdb-0.7.1/test-data/test4.gvdb���������������������������������������������������������������������0000644�0000000�0000000�00000000203�10461020230�0014343�0����������������������������������������������������������������������������������������������������ustar �����������������������������������������������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������GVariant�����������<������(�������ªá“ÿÿÿÿ<����v�H���ƒ���struct������arg0����key1����value1��s������key2��������u'�(sa{sv})�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������