cpp_build-0.5.9/.cargo_vcs_info.json0000644000000001470000000000100130120ustar { "git": { "sha1": "9eda55bdc2922fa50c36cee80990a96bdf5e65c2" }, "path_in_vcs": "cpp_build" }cpp_build-0.5.9/Cargo.toml0000644000000025720000000000100110140ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "cpp_build" version = "0.5.9" authors = [ "Nika Layzell ", "Olivier Goffart ", ] description = "Cargo build script for the `cpp` crate" documentation = "https://docs.rs/cpp_build" readme = "README.md" keywords = [ "c", "cxx", "ffi", "compiler", "build-dependencies", ] categories = ["development-tools::ffi"] license = "MIT/Apache-2.0" repository = "https://github.com/mystor/rust-cpp" [package.metadata.docs.rs] features = ["docs-only"] [dependencies.cc] version = "1.0.38" [dependencies.cpp_common] version = "=0.5.9" [dependencies.lazy_static] version = "1.0" [dependencies.proc-macro2] version = "1.0" [dependencies.regex] version = "1" [dependencies.syn] version = "2.0" features = [ "full", "visit", ] [dependencies.unicode-xid] version = "0.2" [features] docs-only = [] parallel = ["cc/parallel"] cpp_build-0.5.9/Cargo.toml.orig000064400000000000000000000016451046102023000144750ustar 00000000000000[package] name = "cpp_build" version = "0.5.9" authors = ["Nika Layzell ", "Olivier Goffart "] edition = "2018" description = "Cargo build script for the `cpp` crate" readme = "../README.md" license = "MIT/Apache-2.0" keywords = ["c", "cxx", "ffi", "compiler", "build-dependencies"] categories = ["development-tools::ffi"] repository = "https://github.com/mystor/rust-cpp" documentation = "https://docs.rs/cpp_build" [features] # Ignore compilations error of the C++ code when building the documentation, as the docs.rs server # might not have the required libraries docs-only = [] parallel = ["cc/parallel"] [dependencies] lazy_static = "1.0" cc = "1.0.38" cpp_common = { path = "../cpp_common", version = "=0.5.9" } syn = { version = "2.0", features=["full", "visit"] } proc-macro2 = "1.0" regex = "1" unicode-xid = "0.2" [package.metadata.docs.rs] features = [ "docs-only" ] cpp_build-0.5.9/README.md000064400000000000000000000032751046102023000130660ustar 00000000000000# rust-cpp - Embed C++ code directly in Rust [![Documentation](https://docs.rs/cpp/badge.svg)](https://docs.rs/cpp/) ## Overview `rust-cpp` is a build tool & macro which enables you to write C++ code inline in your rust code. ```rust let name = std::ffi::CString::new("World").unwrap(); let name_ptr = name.as_ptr(); let r = unsafe { cpp!([name_ptr as "const char *"] -> u32 as "int32_t" { std::cout << "Hello, " << name_ptr << std::endl; return 42; }) }; assert_eq!(r, 42) ``` The crate also help to expose some C++ class to Rust by automatically implementing trait such as Drop, Clone (if the C++ type can be copied), and others ```rust cpp_class!{ #[derive(PartialEq)] unsafe struct MyClass as "std::unique_ptr" } ``` ## Usage For usage information and in-depth documentation, see the [`cpp` crate module level documentation](https://docs.rs/cpp). ## Differences with the [`cxx`](https://cxx.rs) crate This crate allows to write C++ code "inline" within your Rust functions, while with the [`cxx`](https://cxx.rs) crate, you have to write a bit of boiler plate to have calls to functions declared in a different `.cpp` file. Having C++ code inline might be helpful when trying to call to a C++ library and that one may wish to make plenty of call to small snippets. It can otherwise be fastidious to write and maintain the boiler plate for many small functions in different places. These crate can also be used in together. The `cxx` crate offer some useful types such as `CxxString` that can also be used with this crate. The `cxx` bridge does more type checking which can avoid some classes of errors. While this crate can only check for equal size and alignment. cpp_build-0.5.9/src/lib.rs000064400000000000000000000526751046102023000135220ustar 00000000000000//! This crate is the `cpp` cargo build script implementation. It is useless //! without the companion crates `cpp`, and `cpp_macro`. //! //! For more information, see the //! [`cpp` crate module level documentation](https://docs.rs/cpp). #![allow(clippy::write_with_newline)] mod strnom; use cpp_common::*; use lazy_static::lazy_static; use std::collections::hash_map::{Entry, HashMap}; use std::env; use std::fs::{create_dir, remove_dir_all, File}; use std::io::prelude::*; use std::path::{Path, PathBuf}; mod parser; fn warnln_impl(a: &str) { for s in a.lines() { println!("cargo:warning={}", s); } } macro_rules! warnln { ($($all:tt)*) => { $crate::warnln_impl(&format!($($all)*)); } } // Like the write! macro, but add the #line directive (pointing to this file). // Note: the string literal must be on on the same line of the macro macro_rules! write_add_line { ($o:expr, $($e:tt)*) => { (|| { writeln!($o, "#line {} \"{}\"", line!(), file!().replace('\\', "\\\\"))?; write!($o, $($e)*) })() }; } const INTERNAL_CPP_STRUCTS: &str = r#" /* THIS FILE IS GENERATED BY rust-cpp. DO NOT EDIT */ #include "stdint.h" // For {u}intN_t #include // For placement new #include // For abort #include #include namespace rustcpp { // We can't just pass or return any type from extern "C" rust functions (because the call // convention may differ between the C++ type, and the Rust type). // So we make sure to pass trivial structure that only contains a pointer to the object we want to // pass. The constructor of these helper class contains a 'container' of the right size which will // be allocated on the stack. template struct return_helper { struct container { #if defined (_MSC_VER) && (_MSC_VER + 0 < 1900) char memory[sizeof(T)]; ~container() { reinterpret_cast(this)->~T(); } #else // The fact that it is in an union means it is properly sized and aligned, but we have // to call the destructor and constructor manually union { T memory; }; ~container() { memory.~T(); } #endif container() {} }; const container* data; return_helper(int, const container &c = container()) : data(&c) { } }; template struct argument_helper { using type = const T&; }; template struct argument_helper { T &ref; argument_helper(T &x) : ref(x) {} using type = argument_helper const&; }; template typename std::enable_if::value>::type copy_helper(const void *src, void *dest) { new (dest) T (*static_cast(src)); } template typename std::enable_if::value>::type copy_helper(const void *, void *) { std::abort(); } template typename std::enable_if::value>::type default_helper(void *dest) { new (dest) T(); } template typename std::enable_if::value>::type default_helper(void *) { std::abort(); } template int compare_helper(const T &a, const T&b, int cmp) { switch (cmp) { using namespace std::rel_ops; case 0: if (a < b) return -1; if (b < a) return 1; return 0; case -2: return a < b; case 2: return a > b; case -1: return a <= b; case 1: return a >= b; } std::abort(); } } #define RUST_CPP_CLASS_HELPER(HASH, ...) \ extern "C" { \ void __cpp_destructor_##HASH(void *ptr) { typedef __VA_ARGS__ T; static_cast(ptr)->~T(); } \ void __cpp_copy_##HASH(const void *src, void *dest) { rustcpp::copy_helper<__VA_ARGS__>(src, dest); } \ void __cpp_default_##HASH(void *dest) { rustcpp::default_helper<__VA_ARGS__>(dest); } \ } "#; lazy_static! { static ref CPP_DIR: PathBuf = OUT_DIR.join("rust_cpp"); static ref CARGO_MANIFEST_DIR: PathBuf = PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect( r#" -- rust-cpp fatal error -- The CARGO_MANIFEST_DIR environment variable was not set. NOTE: rust-cpp's build function must be run in a build script."# )); } fn gen_cpp_lib(visitor: &parser::Parser) -> PathBuf { let result_path = CPP_DIR.join("cpp_closures.cpp"); let mut output = File::create(&result_path).expect("Unable to generate temporary C++ file"); write!(output, "{}", INTERNAL_CPP_STRUCTS).unwrap(); if visitor.callbacks_count > 0 { #[rustfmt::skip] write_add_line!(output, r#" extern "C" {{ void (*rust_cpp_callbacks{file_hash}[{callbacks_count}])() = {{}}; }} "#, file_hash = *FILE_HASH, callbacks_count = visitor.callbacks_count ).unwrap(); } write!(output, "{}\n\n", &visitor.snippets).unwrap(); let mut hashmap = HashMap::new(); let mut sizealign = vec![]; for Closure { body_str, sig, callback_offset, .. } in &visitor.closures { let ClosureSig { captures, cpp, .. } = sig; let hash = sig.name_hash(); let name = sig.extern_name(); match hashmap.entry(hash) { Entry::Occupied(e) => { if *e.get() != sig { // Let the compiler do a compilation error. FIXME: report a better error warnln!("Hash collision detected."); } else { continue; } } Entry::Vacant(e) => { e.insert(sig); } } let is_void = cpp == "void"; // Generate the sizes array with the sizes of each of the argument types if is_void { sizealign.push(format!( "{{{hash}ull, 0, 1, {callback_offset}ull << 32}}", hash = hash, callback_offset = callback_offset )); } else { sizealign.push(format!("{{ {hash}ull, sizeof({type}), rustcpp::AlignOf<{type}>::value, rustcpp::Flags<{type}>::value | {callback_offset}ull << 32 }}", hash=hash, type=cpp, callback_offset = callback_offset)); } for Capture { cpp, .. } in captures { sizealign.push(format!("{{ {hash}ull, sizeof({type}), rustcpp::AlignOf<{type}>::value, rustcpp::Flags<{type}>::value }}", hash=hash, type=cpp)); } // Generate the parameters and function declaration let params = captures .iter() .map(|&Capture { mutable, ref name, ref cpp }| { if mutable { format!("{} & {}", cpp, name) } else { format!("{} const& {}", cpp, name) } }) .collect::>() .join(", "); if is_void { #[rustfmt::skip] write_add_line!(output, r#" extern "C" {{ void {name}({params}) {{ {body} }} }} "#, name = &name, params = params, body = body_str ).unwrap(); } else { let comma = if params.is_empty() { "" } else { "," }; let args = captures .iter() .map(|Capture { name, .. }| name.to_string()) .collect::>() .join(", "); #[rustfmt::skip] write_add_line!(output, r#" static inline {ty} {name}_impl({params}) {{ {body} }} extern "C" {{ void {name}({params}{comma} void* __result) {{ ::new(__result) ({ty})({name}_impl({args})); }} }} "#, name = &name, params = params, comma = comma, ty = cpp, args = args, body = body_str ).unwrap(); } } for class in &visitor.classes { let hash = class.name_hash(); // Generate the sizes array sizealign.push(format!("{{ {hash}ull, sizeof({type}), rustcpp::AlignOf<{type}>::value, rustcpp::Flags<{type}>::value }}", hash=hash, type=class.cpp)); // Generate helper function. // (this is done in a macro, which right after a #line directing pointing to the location of // the cpp_class! macro in order to give right line information in the possible errors) write!( output, "{line}RUST_CPP_CLASS_HELPER({hash}, {cpp_name})\n", line = class.line, hash = hash, cpp_name = class.cpp ) .unwrap(); if class.derives("PartialEq") { write!(output, "{line}extern \"C\" bool __cpp_equal_{hash}(const {name} *a, const {name} *b) {{ return *a == *b; }}\n", line = class.line, hash = hash, name = class.cpp).unwrap(); } if class.derives("PartialOrd") { write!(output, "{line}extern \"C\" bool __cpp_compare_{hash}(const {name} *a, const {name} *b, int cmp) {{ return rustcpp::compare_helper(*a, *b, cmp); }}\n", line = class.line, hash = hash, name = class.cpp).unwrap(); } } let mut magic = vec![]; for mag in STRUCT_METADATA_MAGIC.iter() { magic.push(format!("{}", mag)); } #[rustfmt::skip] write_add_line!(output, r#" namespace rustcpp {{ template struct AlignOf {{ struct Inner {{ char a; T b; }}; static const uintptr_t value = sizeof(Inner) - sizeof(T); }}; template struct Flags {{ static const uintptr_t value = (std::is_copy_constructible::value << {flag_is_copy_constructible}) | (std::is_default_constructible::value << {flag_is_default_constructible}) | #if !defined(__GNUC__) || (__GNUC__ + 0 >= 5) || defined(__clang__) (std::is_trivially_destructible::value << {flag_is_trivially_destructible}) | (std::is_trivially_copyable::value << {flag_is_trivially_copyable}) | (std::is_trivially_default_constructible::value << {flag_is_trivially_default_constructible}) | #endif 0; }}; struct SizeAlign {{ uint64_t hash; uint64_t size; uint64_t align; uint64_t flags; }}; struct MetaData {{ uint8_t magic[128]; uint8_t version[16]; uint64_t endianness_check; uint64_t length; SizeAlign data[{length}]; }}; MetaData metadata_{hash} = {{ {{ {magic} }}, "{version}", 0xffef, {length}, {{ {data} }} }}; }} // namespace rustcpp "#, hash = *FILE_HASH, data = sizealign.join(", "), length = sizealign.len(), magic = magic.join(", "), version = VERSION, flag_is_copy_constructible = flags::IS_COPY_CONSTRUCTIBLE, flag_is_default_constructible = flags::IS_DEFAULT_CONSTRUCTIBLE, flag_is_trivially_destructible = flags::IS_TRIVIALLY_DESTRUCTIBLE, flag_is_trivially_copyable = flags::IS_TRIVIALLY_COPYABLE, flag_is_trivially_default_constructible = flags::IS_TRIVIALLY_DEFAULT_CONSTRUCTIBLE, ).unwrap(); result_path } fn clean_artifacts() { if CPP_DIR.is_dir() { remove_dir_all(&*CPP_DIR).expect( r#" -- rust-cpp fatal error -- Failed to remove existing build artifacts from output directory."#, ); } create_dir(&*CPP_DIR).expect( r#" -- rust-cpp fatal error -- Failed to create output object directory."#, ); } /// This struct is for advanced users of the build script. It allows providing /// configuration options to `cpp` and the compiler when it is used to build. /// /// ## API Note /// /// Internally, `cpp` uses the `cc` crate to build the compilation artifact, /// and many of the methods defined on this type directly proxy to an internal /// `cc::Build` object. pub struct Config { cc: cc::Build, std_flag_set: bool, // true if the -std flag was specified } impl Default for Config { fn default() -> Self { Config::new() } } impl Config { /// Create a new `Config` object. This object will hold the configuration /// options which control the build. If you don't need to make any changes, /// `cpp_build::build` is a wrapper function around this interface. pub fn new() -> Config { let mut cc = cc::Build::new(); cc.cpp(true).include(&*CARGO_MANIFEST_DIR); Config { cc, std_flag_set: false } } /// Add a directory to the `-I` or include path for headers pub fn include>(&mut self, dir: P) -> &mut Self { self.cc.include(dir); self } /// Specify a `-D` variable with an optional value pub fn define(&mut self, var: &str, val: Option<&str>) -> &mut Self { self.cc.define(var, val); self } // XXX: Make sure that this works with sizes logic /// Add an arbitrary object file to link in pub fn object>(&mut self, obj: P) -> &mut Self { self.cc.object(obj); self } /// Add an arbitrary flag to the invocation of the compiler pub fn flag(&mut self, flag: &str) -> &mut Self { if flag.starts_with("-std=") { self.std_flag_set = true; } self.cc.flag(flag); self } /// Add an arbitrary flag to the invocation of the compiler if it supports it pub fn flag_if_supported(&mut self, flag: &str) -> &mut Self { if flag.starts_with("-std=") { self.std_flag_set = true; } self.cc.flag_if_supported(flag); self } // XXX: Make sure this works with sizes logic /// Add a file which will be compiled pub fn file>(&mut self, p: P) -> &mut Self { self.cc.file(p); self } /// Set the standard library to link against when compiling with C++ /// support. /// /// The default value of this property depends on the current target: On /// OS X `Some("c++")` is used, when compiling for a Visual Studio based /// target `None` is used and for other targets `Some("stdc++")` is used. /// /// A value of `None` indicates that no automatic linking should happen, /// otherwise cargo will link against the specified library. /// /// The given library name must not contain the `lib` prefix. pub fn cpp_link_stdlib(&mut self, cpp_link_stdlib: Option<&str>) -> &mut Self { self.cc.cpp_link_stdlib(cpp_link_stdlib); self } /// Force the C++ compiler to use the specified standard library. /// /// Setting this option will automatically set `cpp_link_stdlib` to the same /// value. /// /// The default value of this option is always `None`. /// /// This option has no effect when compiling for a Visual Studio based /// target. /// /// This option sets the `-stdlib` flag, which is only supported by some /// compilers (clang, icc) but not by others (gcc). The library will not /// detect which compiler is used, as such it is the responsibility of the /// caller to ensure that this option is only used in conjuction with a /// compiler which supports the `-stdlib` flag. /// /// A value of `None` indicates that no specific C++ standard library should /// be used, otherwise `-stdlib` is added to the compile invocation. /// /// The given library name must not contain the `lib` prefix. pub fn cpp_set_stdlib(&mut self, cpp_set_stdlib: Option<&str>) -> &mut Self { self.cc.cpp_set_stdlib(cpp_set_stdlib); self } // XXX: Add support for custom targets // // /// Configures the target this configuration will be compiling for. // /// // /// This option is automatically scraped from the `TARGET` environment // /// variable by build scripts, so it's not required to call this function. // pub fn target(&mut self, target: &str) -> &mut Self { // self.cc.target(target); // self // } /// Configures the host assumed by this configuration. /// /// This option is automatically scraped from the `HOST` environment /// variable by build scripts, so it's not required to call this function. pub fn host(&mut self, host: &str) -> &mut Self { self.cc.host(host); self } /// Configures the optimization level of the generated object files. /// /// This option is automatically scraped from the `OPT_LEVEL` environment /// variable by build scripts, so it's not required to call this function. pub fn opt_level(&mut self, opt_level: u32) -> &mut Self { self.cc.opt_level(opt_level); self } /// Configures the optimization level of the generated object files. /// /// This option is automatically scraped from the `OPT_LEVEL` environment /// variable by build scripts, so it's not required to call this function. pub fn opt_level_str(&mut self, opt_level: &str) -> &mut Self { self.cc.opt_level_str(opt_level); self } /// Configures whether the compiler will emit debug information when /// generating object files. /// /// This option is automatically scraped from the `PROFILE` environment /// variable by build scripts (only enabled when the profile is "debug"), so /// it's not required to call this function. pub fn debug(&mut self, debug: bool) -> &mut Self { self.cc.debug(debug); self } // XXX: Add support for custom out_dir // // /// Configures the output directory where all object files and static // /// libraries will be located. // /// // /// This option is automatically scraped from the `OUT_DIR` environment // /// variable by build scripts, so it's not required to call this function. // pub fn out_dir>(&mut self, out_dir: P) -> &mut Self { // self.cc.out_dir(out_dir); // self // } /// Configures the compiler to be used to produce output. /// /// This option is automatically determined from the target platform or a /// number of environment variables, so it's not required to call this /// function. pub fn compiler>(&mut self, compiler: P) -> &mut Self { self.cc.compiler(compiler); self } /// Configures the tool used to assemble archives. /// /// This option is automatically determined from the target platform or a /// number of environment variables, so it's not required to call this /// function. pub fn archiver>(&mut self, archiver: P) -> &mut Self { self.cc.archiver(archiver); self } /// Define whether metadata should be emitted for cargo allowing it to /// automatically link the binary. Defaults to `true`. pub fn cargo_metadata(&mut self, cargo_metadata: bool) -> &mut Self { // XXX: Use this to control the cargo metadata which rust-cpp produces self.cc.cargo_metadata(cargo_metadata); self } /// Configures whether the compiler will emit position independent code. /// /// This option defaults to `false` for `i686` and `windows-gnu` targets and /// to `true` for all other targets. pub fn pic(&mut self, pic: bool) -> &mut Self { self.cc.pic(pic); self } /// Extracts `cpp` declarations from the passed-in crate root, and builds /// the associated static library to be linked in to the final binary. /// /// This method does not perform rust codegen - that is performed by `cpp` /// and `cpp_macros`, which perform the actual procedural macro expansion. /// /// This method may technically be called more than once for ergonomic /// reasons, but that usually won't do what you want. Use a different /// `Config` object each time you want to build a crate. pub fn build>(&mut self, crate_root: P) { assert_eq!( env!("CARGO_PKG_VERSION"), VERSION, "Internal Error: mismatched cpp_common and cpp_build versions" ); // Clean up any leftover artifacts clean_artifacts(); // Parse the crate let mut visitor = parser::Parser::default(); if let Err(err) = visitor.parse_crate(crate_root.as_ref().to_owned()) { warnln!( r#"-- rust-cpp parse error -- There was an error parsing the crate for the rust-cpp build script: {} In order to provide a better error message, the build script will exit successfully, such that rustc can provide an error message."#, err ); return; } // Generate the C++ library code let filename = gen_cpp_lib(&visitor); // Ensure C++11 mode is enabled. We rely on some C++11 construct, so we // must enable C++11 by default. // MSVC, GCC >= 5, Clang >= 6 defaults to C++14, but since we want to // supports older compiler which defaults to C++98, we need to // explicitly set the "-std" flag. // Ideally should be done by https://github.com/alexcrichton/cc-rs/issues/191 if !self.std_flag_set { self.cc.flag_if_supported("-std=c++11"); } // Build the C++ library if let Err(e) = self.cc.file(filename).try_compile(LIB_NAME) { let _ = writeln!(std::io::stderr(), "\n\nerror occurred: {}\n\n", e); #[cfg(not(feature = "docs-only"))] std::process::exit(1); } } } /// Run the `cpp` build process on the crate with a root at the given path. /// Intended to be used within `build.rs` files. pub fn build>(path: P) { Config::new().build(path) } cpp_build-0.5.9/src/parser.rs000064400000000000000000000521431046102023000142360ustar 00000000000000use cpp_common::{Class, Closure, Macro, RustInvocation}; use lazy_static::lazy_static; use regex::Regex; use std::fmt; use std::fs::File; use std::io::Read; use std::mem::swap; use std::path::{Path, PathBuf}; use syn::visit::Visit; #[allow(clippy::enum_variant_names)] #[derive(Debug)] pub enum Error { ParseCannotOpenFile { src_path: String }, ParseSyntaxError { src_path: String, error: syn::parse::Error }, LexError { src_path: String, line: u32 }, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::ParseCannotOpenFile { ref src_path } => { write!(f, "Parsing crate: cannot open file `{}`.", src_path) } Error::ParseSyntaxError { ref src_path, ref error } => { write!(f, "Parsing file : `{}`:\n{}", src_path, error) } Error::LexError { ref src_path, ref line } => { write!(f, "{}:{}: Lexing error", src_path, line + 1) } } } } #[derive(Debug)] struct LineError(u32, String); impl LineError { fn add_line(self, a: u32) -> LineError { LineError(self.0 + a, self.1) } } impl fmt::Display for LineError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}:{}", self.0 + 1, self.1) } } impl From for LineError { fn from(e: LexError) -> Self { LineError(e.line, "Lexing error".into()) } } enum ExpandSubMacroType<'a> { Lit, Closure(&'a mut u32), // the offset } // Given a string containing some C++ code with a rust! macro, // this functions expand the rust! macro to a call to an extern // function fn expand_sub_rust_macro(input: String, mut t: ExpandSubMacroType) -> Result { let mut result = input; let mut extra_decl = String::new(); let mut search_index = 0; loop { let (begin, end, line) = { let mut begin = 0; let mut cursor = new_cursor(&result); cursor.advance(search_index); while !cursor.is_empty() { cursor = skip_whitespace(cursor); let r = skip_literal(cursor)?; cursor = r.0; if r.1 { continue; } if cursor.is_empty() { break; } if let Ok((cur, ident)) = symbol(cursor) { begin = cursor.off as usize; cursor = cur; if ident != "rust" { continue; } } else { cursor = cursor.advance(1); continue; } cursor = skip_whitespace(cursor); if !cursor.starts_with("!") { continue; } break; } if cursor.is_empty() { return Ok(extra_decl + &result); } let end = find_delimited((find_delimited(cursor, "(")?.0).advance(1), ")")?.0; (begin, end.off as usize + 1, cursor.line) }; let input: ::proc_macro2::TokenStream = result[begin..end] .parse() .map_err(|_| LineError(line, "TokenStream parse error".into()))?; let rust_invocation = ::syn::parse2::(input).map_err(|e| LineError(line, e.to_string()))?; let fn_name = match t { ExpandSubMacroType::Lit => { extra_decl.push_str(&format!("extern \"C\" void {}();\n", rust_invocation.id)); rust_invocation.id.clone().to_string() } ExpandSubMacroType::Closure(ref mut offset) => { use cpp_common::FILE_HASH; **offset += 1; format!( "rust_cpp_callbacks{file_hash}[{offset}]", file_hash = *FILE_HASH, offset = **offset - 1 ) } }; let mut decl_types = rust_invocation .arguments .iter() .map(|(_, val)| format!("rustcpp::argument_helper<{}>::type", val)) .collect::>(); let mut call_args = rust_invocation.arguments.iter().map(|(val, _)| val.to_string()).collect::>(); let fn_call = match rust_invocation.return_type { None => format!( "reinterpret_cast({f})({args})", f = fn_name, types = decl_types.join(", "), args = call_args.join(", ") ), Some(rty) => { decl_types.push(format!("rustcpp::return_helper<{rty}>", rty = rty)); call_args.push("0".to_string()); format!( "std::move(*reinterpret_cast<{rty}*(*)({types})>({f})({args}))", rty = rty, f = fn_name, types = decl_types.join(", "), args = call_args.join(", ") ) } }; let fn_call = { // remove the rust! macro from the C++ snippet let orig = result.drain(begin..end); // add \ņ to the invocation in order to keep the same amount of line numbers // so errors point to the right line. orig.filter(|x| *x == '\n').fold(fn_call, |mut res, _| { res.push('\n'); res }) }; // add the invocation of call where the rust! macro used to be. result.insert_str(begin, &fn_call); search_index = begin + fn_call.len(); } } #[test] fn test_expand_sub_rust_macro() { let x = expand_sub_rust_macro("{ rust!(xxx [] { 1 }); }".to_owned(), ExpandSubMacroType::Lit); assert_eq!(x.unwrap(), "extern \"C\" void xxx();\n{ reinterpret_cast(xxx)(); }"); let x = expand_sub_rust_macro( "{ hello( rust!(xxx [] { 1 }), rust!(yyy [] { 2 }); ) }".to_owned(), ExpandSubMacroType::Lit, ); assert_eq!(x.unwrap(), "extern \"C\" void xxx();\nextern \"C\" void yyy();\n{ hello( reinterpret_cast(xxx)(), reinterpret_cast(yyy)(); ) }"); let s = "{ /* rust! */ /* rust!(xxx [] { 1 }) */ }".to_owned(); assert_eq!(expand_sub_rust_macro(s.clone(), ExpandSubMacroType::Lit).unwrap(), s); } #[path = "strnom.rs"] mod strnom; use crate::strnom::*; fn skip_literal(mut input: Cursor) -> PResult { //input = whitespace(input)?.0; if input.starts_with("\"") { input = cooked_string(input.advance(1))?.0; debug_assert!(input.starts_with("\"")); return Ok((input.advance(1), true)); } if input.starts_with("b\"") { input = cooked_byte_string(input.advance(2))?.0; debug_assert!(input.starts_with("\"")); return Ok((input.advance(1), true)); } if input.starts_with("\'") { input = input.advance(1); let cur = cooked_char(input)?.0; if !cur.starts_with("\'") { return Ok((symbol(input)?.0, true)); } return Ok((cur.advance(1), true)); } if input.starts_with("b\'") { input = cooked_byte(input.advance(2))?.0; if !input.starts_with("\'") { return Err(LexError { line: input.line }); } return Ok((input.advance(1), true)); } lazy_static! { static ref RAW: Regex = Regex::new(r##"^b?r#*""##).unwrap(); } if RAW.is_match(input.rest) { let q = input.rest.find('r').unwrap(); input = input.advance(q + 1); return raw_string(input).map(|x| (x.0, true)); } Ok((input, false)) } fn new_cursor(s: &str) -> Cursor { Cursor { rest: s, off: 0, line: 0, column: 0 } } #[test] fn test_skip_literal() -> Result<(), LexError> { assert!((skip_literal(new_cursor(r#""fofofo"ok xx"#))?.0).starts_with("ok")); assert!((skip_literal(new_cursor(r#""kk\"kdk"ok xx"#))?.0).starts_with("ok")); assert!((skip_literal(new_cursor("r###\"foo \" bar \\\" \"###ok xx"))?.0).starts_with("ok")); assert!( (skip_literal(new_cursor("br###\"foo 'jjk' \" bar \\\" \"###ok xx"))?.0).starts_with("ok") ); assert!((skip_literal(new_cursor("'4'ok xx"))?.0).starts_with("ok")); assert!((skip_literal(new_cursor("'\''ok xx"))?.0).starts_with("ok")); assert!((skip_literal(new_cursor("b'\''ok xx"))?.0).starts_with("ok")); assert!((skip_literal(new_cursor("'abc ok xx"))?.0).starts_with(" ok")); assert!((skip_literal(new_cursor("'a ok xx"))?.0).starts_with(" ok")); assert!((skip_whitespace(new_cursor("ok xx"))).starts_with("ok")); assert!((skip_whitespace(new_cursor(" ok xx"))).starts_with("ok")); assert!((skip_whitespace(new_cursor(" \n /* /*dd \n // */ */ // foo \n ok xx/* */"))) .starts_with("ok")); Ok(()) } // advance the cursor until it finds the needle. fn find_delimited<'a>(mut input: Cursor<'a>, needle: &str) -> PResult<'a, ()> { let mut stack: Vec<&'static str> = vec![]; while !input.is_empty() { input = skip_whitespace(input); input = skip_literal(input)?.0; if input.is_empty() { break; } if stack.is_empty() && input.starts_with(needle) { return Ok((input, ())); } else if stack.last().map_or(false, |x| input.starts_with(x)) { stack.pop(); } else if input.starts_with("(") { stack.push(")"); } else if input.starts_with("[") { stack.push("]"); } else if input.starts_with("{") { stack.push("}"); } else if input.starts_with(")") || input.starts_with("]") || input.starts_with("}") { return Err(LexError { line: input.line }); } input = input.advance(1); } Err(LexError { line: input.line }) } #[test] fn test_find_delimited() -> Result<(), LexError> { assert!((find_delimited(new_cursor(" x f ok"), "f")?.0).starts_with("f ok")); assert!((find_delimited(new_cursor(" {f} f ok"), "f")?.0).starts_with("f ok")); assert!((find_delimited(new_cursor(" (f\")\" { ( ) } /* ) */ f ) f ok"), "f")?.0) .starts_with("f ok")); Ok(()) } #[test] fn test_cursor_advance() -> Result<(), LexError> { assert_eq!(new_cursor("\n\n\n").advance(2).line, 2); assert_eq!(new_cursor("\n \n\n").advance(2).line, 1); assert_eq!(new_cursor("\n\n\n").advance(2).column, 0); assert_eq!(new_cursor("\n \n\n").advance(2).column, 1); assert_eq!((find_delimited(new_cursor("\n/*\n \n */ ( \n ) /* */ f"), "f")?.0).line, 4); assert_eq!((find_delimited(new_cursor("\n/*\n \n */ ( \n ) /* */ f"), "f")?.0).column, 9); Ok(()) } fn line_directive(path: &Path, cur: Cursor) -> String { let mut line = format!("#line {} \"{}\"\n", cur.line + 1, path.to_string_lossy().replace('\\', "\\\\")); for _ in 0..cur.column { line.push(' '); } line } #[derive(Default)] pub struct Parser { pub closures: Vec, pub classes: Vec, pub snippets: String, pub callbacks_count: u32, current_path: PathBuf, // The current file being parsed mod_dir: PathBuf, mod_error: Option, // An error occuring while visiting the modules } impl Parser { pub fn parse_crate(&mut self, crate_root: PathBuf) -> Result<(), Error> { let parent = crate_root.parent().map(|x| x.to_owned()).unwrap_or_default(); self.parse_mod(crate_root, parent) } fn parse_mod(&mut self, mod_path: PathBuf, submod_dir: PathBuf) -> Result<(), Error> { let mut s = String::new(); let mut f = File::open(&mod_path).map_err(|_| Error::ParseCannotOpenFile { src_path: mod_path.to_str().unwrap().to_owned(), })?; f.read_to_string(&mut s).map_err(|_| Error::ParseCannotOpenFile { src_path: mod_path.to_str().unwrap().to_owned(), })?; let fi = syn::parse_file(&s).map_err(|x| Error::ParseSyntaxError { src_path: mod_path.to_str().unwrap().to_owned(), error: x, })?; let mut current_path = mod_path; let mut mod_dir = submod_dir; swap(&mut self.current_path, &mut current_path); swap(&mut self.mod_dir, &mut mod_dir); self.find_cpp_macros(&s)?; self.visit_file(&fi); if let Some(err) = self.mod_error.take() { return Err(err); } swap(&mut self.current_path, &mut current_path); swap(&mut self.mod_dir, &mut mod_dir); Ok(()) } /* fn parse_macro(&mut self, tts: TokenStream) { let mut last_ident: Option = None; let mut is_macro = false; for t in tts.into_iter() { match t { TokenTree::Punct(ref p) if p.as_char() == '!' => is_macro = true, TokenTree::Ident(i) => { is_macro = false; last_ident = Some(i); } TokenTree::Group(d) => { if is_macro && last_ident.as_ref().map_or(false, |i| i == "cpp") { self.handle_cpp(&d.stream()) } else if is_macro && last_ident.as_ref().map_or(false, |i| i == "cpp_class") { self.handle_cpp_class(&d.stream()) } else { self.parse_macro(d.stream()) } is_macro = false; last_ident = None; } _ => { is_macro = false; last_ident = None; } } } } */ fn find_cpp_macros(&mut self, source: &str) -> Result<(), Error> { let mut cursor = new_cursor(source); while !cursor.is_empty() { cursor = skip_whitespace(cursor); let r = skip_literal(cursor).map_err(|e| self.lex_error(e))?; cursor = r.0; if r.1 { continue; } if let Ok((cur, ident)) = symbol(cursor) { cursor = cur; if ident != "cpp" && ident != "cpp_class" { continue; } cursor = skip_whitespace(cursor); if !cursor.starts_with("!") { continue; } cursor = skip_whitespace(cursor.advance(1)); let delim = if cursor.starts_with("(") { ")" } else if cursor.starts_with("[") { "]" } else if cursor.starts_with("{") { "}" } else { continue; }; cursor = cursor.advance(1); let mut macro_cur = cursor; cursor = find_delimited(cursor, delim).map_err(|e| self.lex_error(e))?.0; let size = (cursor.off - macro_cur.off) as usize; macro_cur.rest = ¯o_cur.rest[..size]; if ident == "cpp" { self.handle_cpp(macro_cur).unwrap_or_else(|e| { panic!("Error while parsing cpp! macro:\n{:?}:{}", self.current_path, e) }); } else { debug_assert_eq!(ident, "cpp_class"); self.handle_cpp_class(macro_cur).unwrap_or_else(|e| { panic!( "Error while parsing cpp_class! macro:\n{:?}:{}", self.current_path, e ) }); } continue; } if cursor.is_empty() { break; } cursor = cursor.advance(1); // Not perfect, but should work } Ok(()) } fn lex_error(&self, e: LexError) -> Error { Error::LexError { src_path: self.current_path.clone().to_str().unwrap().to_owned(), line: e.line, } } fn handle_cpp(&mut self, x: Cursor) -> Result<(), LineError> { // Since syn don't give the exact string, we extract manually let begin = (find_delimited(x, "{")?.0).advance(1); let end = find_delimited(begin, "}")?.0; let extracted = &begin.rest[..(end.off - begin.off) as usize]; let input: ::proc_macro2::TokenStream = x.rest.parse().map_err(|_| LineError(x.line, "TokenStream parse error".into()))?; match ::syn::parse2::(input).map_err(|e| LineError(x.line, e.to_string()))? { Macro::Closure(mut c) => { c.callback_offset = self.callbacks_count; c.body_str = line_directive(&self.current_path, begin) + &expand_sub_rust_macro( extracted.to_string(), ExpandSubMacroType::Closure(&mut self.callbacks_count), ) .map_err(|e| e.add_line(begin.line))?; self.closures.push(c); } Macro::Lit(_l) => { self.snippets.push('\n'); let snip = expand_sub_rust_macro( line_directive(&self.current_path, begin) + extracted, ExpandSubMacroType::Lit, ) .map_err(|e| e.add_line(begin.line))?; self.snippets.push_str(&snip); } } Ok(()) } fn handle_cpp_class(&mut self, x: Cursor) -> Result<(), LineError> { let input: ::proc_macro2::TokenStream = x.rest.parse().map_err(|_| LineError(x.line, "TokenStream parse error".into()))?; let mut class = ::syn::parse2::(input).map_err(|e| LineError(x.line, e.to_string()))?; class.line = line_directive(&self.current_path, x); self.classes.push(class); Ok(()) } } impl<'ast> Visit<'ast> for Parser { /* This is currently commented out because proc_macro2 don't allow us to get the text verbatim (https://github.com/alexcrichton/proc-macro2/issues/110#issuecomment-411959999) fn visit_macro(&mut self, mac: &syn::Macro) { if mac.path.segments.len() != 1 { return; } if mac.path.segments[0].ident == "cpp" { self.handle_cpp(&mac.tts); } else if mac.path.segments[0].ident == "cpp_class" { self.handle_cpp_class(&mac.tts); } else { self.parse_macro(mac.tts.clone()); } }*/ fn visit_item_mod(&mut self, item: &'ast syn::ItemMod) { if self.mod_error.is_some() { return; } if item.content.is_some() { let mut parent = self.mod_dir.join(item.ident.to_string()); swap(&mut self.mod_dir, &mut parent); syn::visit::visit_item_mod(self, item); swap(&mut self.mod_dir, &mut parent); return; } let mut cfg_disabled = false; // Determine the path of the inner module's file for attr in &item.attrs { match &attr.meta { // parse #[path = "foo.rs"]: read module from the specified path syn::Meta::NameValue(syn::MetaNameValue { path, value: syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(s), .. }), .. }) if path.is_ident("path") => { let mod_path = self.mod_dir.join(s.value()); let parent = self.mod_dir.parent().map(|x| x.to_owned()).unwrap_or_default(); return self .parse_mod(mod_path, parent) .unwrap_or_else(|err| self.mod_error = Some(err)); } // parse #[cfg(feature = "feature")]: don't follow modules not enabled by current features syn::Meta::List(list @ syn::MetaList { path, .. }) if path.is_ident("cfg") => { drop(list.parse_nested_meta(|meta| { if meta.path.is_ident("feature") { let feature: syn::LitStr = meta.value()?.parse()?; let feature_env_var = "CARGO_FEATURE_".to_owned() + &feature.value().to_uppercase().replace('-', "_"); if std::env::var_os(feature_env_var).is_none() { cfg_disabled = true; } } Ok(()) })) } _ => {} } } if cfg_disabled { return; } let mod_name = item.ident.to_string(); let subdir = self.mod_dir.join(&mod_name); let subdir_mod = subdir.join("mod.rs"); if subdir_mod.is_file() { return self .parse_mod(subdir_mod, subdir) .unwrap_or_else(|err| self.mod_error = Some(err)); } let adjacent = self.mod_dir.join(format!("{}.rs", mod_name)); if adjacent.is_file() { return self .parse_mod(adjacent, subdir) .unwrap_or_else(|err| self.mod_error = Some(err)); } panic!( "No file with module definition for `mod {}` in file {:?}", mod_name, self.current_path ); } } cpp_build-0.5.9/src/strnom.rs000064400000000000000000000272351046102023000142700ustar 00000000000000//! Fork of the equivalent file from the proc-macro2 file. //! Modified to support line number counting in Cursor. //! Also contains some function from stable.rs of proc_macro2. #![allow(dead_code)] // Why is this needed ? use std::str::{Bytes, CharIndices, Chars}; use unicode_xid::UnicodeXID; #[derive(Debug)] pub struct LexError { pub line: u32, } #[derive(Copy, Clone, Eq, PartialEq)] pub struct Cursor<'a> { pub rest: &'a str, pub off: u32, pub line: u32, pub column: u32, } impl<'a> Cursor<'a> { #[allow(clippy::suspicious_map)] pub fn advance(&self, amt: usize) -> Cursor<'a> { let mut column_start: Option = None; Cursor { rest: &self.rest[amt..], off: self.off + (amt as u32), line: self.line + self.rest[..amt] .char_indices() .filter(|(_, x)| *x == '\n') .map(|(i, _)| { column_start = Some(i); }) .count() as u32, column: match column_start { None => self.column + (amt as u32), Some(i) => (amt - i) as u32 - 1, }, } } pub fn find(&self, p: char) -> Option { self.rest.find(p) } pub fn starts_with(&self, s: &str) -> bool { self.rest.starts_with(s) } pub fn is_empty(&self) -> bool { self.rest.is_empty() } pub fn len(&self) -> usize { self.rest.len() } pub fn as_bytes(&self) -> &'a [u8] { self.rest.as_bytes() } pub fn bytes(&self) -> Bytes<'a> { self.rest.bytes() } pub fn chars(&self) -> Chars<'a> { self.rest.chars() } pub fn char_indices(&self) -> CharIndices<'a> { self.rest.char_indices() } } pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>; pub fn whitespace(input: Cursor) -> PResult<()> { if input.is_empty() { return Err(LexError { line: input.line }); } let bytes = input.as_bytes(); let mut i = 0; while i < bytes.len() { let s = input.advance(i); if bytes[i] == b'/' { if s.starts_with("//") // && (!s.starts_with("///") || s.starts_with("////")) // && !s.starts_with("//!") { if let Some(len) = s.find('\n') { i += len + 1; continue; } break; } else if s.starts_with("/**/") { i += 4; continue; } else if s.starts_with("/*") // && (!s.starts_with("/**") || s.starts_with("/***")) // && !s.starts_with("/*!") { let (_, com) = block_comment(s)?; i += com.len(); continue; } } match bytes[i] { b' ' | 0x09..=0x0d => { i += 1; continue; } b if b <= 0x7f => {} _ => { let ch = s.chars().next().unwrap(); if is_whitespace(ch) { i += ch.len_utf8(); continue; } } } return if i > 0 { Ok((s, ())) } else { Err(LexError { line: s.line }) }; } Ok((input.advance(input.len()), ())) } pub fn block_comment(input: Cursor) -> PResult<&str> { if !input.starts_with("/*") { return Err(LexError { line: input.line }); } let mut depth = 0; let bytes = input.as_bytes(); let mut i = 0; let upper = bytes.len() - 1; while i < upper { if bytes[i] == b'/' && bytes[i + 1] == b'*' { depth += 1; i += 1; // eat '*' } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { depth -= 1; if depth == 0 { return Ok((input.advance(i + 2), &input.rest[..i + 2])); } i += 1; // eat '/' } i += 1; } Err(LexError { line: input.line }) } pub fn skip_whitespace(input: Cursor) -> Cursor { match whitespace(input) { Ok((rest, _)) => rest, Err(_) => input, } } fn is_whitespace(ch: char) -> bool { // Rust treats left-to-right mark and right-to-left mark as whitespace ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' } // --- functions from stable.rs #[inline] fn is_ident_start(c: char) -> bool { c.is_ascii_alphabetic() || c == '_' || (c > '\x7f' && UnicodeXID::is_xid_start(c)) } #[inline] fn is_ident_continue(c: char) -> bool { c.is_ascii_alphanumeric() || c == '_' || (c > '\x7f' && UnicodeXID::is_xid_continue(c)) } pub fn symbol(input: Cursor) -> PResult<&str> { let mut chars = input.char_indices(); let raw = input.starts_with("r#"); if raw { chars.next(); chars.next(); } match chars.next() { Some((_, ch)) if is_ident_start(ch) => {} _ => return Err(LexError { line: input.line }), } let mut end = input.len(); for (i, ch) in chars { if !is_ident_continue(ch) { end = i; break; } } let a = &input.rest[..end]; if a == "r#_" { Err(LexError { line: input.line }) } else { let ident = if raw { &a[2..] } else { a }; Ok((input.advance(end), ident)) } } pub fn cooked_string(input: Cursor) -> PResult<()> { let mut chars = input.char_indices().peekable(); while let Some((byte_offset, ch)) = chars.next() { match ch { '"' => { return Ok((input.advance(byte_offset), ())); } '\r' => { if let Some((_, '\n')) = chars.next() { // ... } else { break; } } '\\' => match chars.next() { Some((_, 'x')) => { if !backslash_x_char(&mut chars) { break; } } Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\')) | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {} Some((_, 'u')) => { if !backslash_u(&mut chars) { break; } } Some((_, '\n')) | Some((_, '\r')) => { while let Some(&(_, ch)) = chars.peek() { if ch.is_whitespace() { chars.next(); } else { break; } } } _ => break, }, _ch => {} } } Err(LexError { line: input.line }) } pub fn cooked_byte_string(mut input: Cursor) -> PResult<()> { let mut bytes = input.bytes().enumerate(); 'outer: while let Some((offset, b)) = bytes.next() { match b { b'"' => { return Ok((input.advance(offset), ())); } b'\r' => { if let Some((_, b'\n')) = bytes.next() { // ... } else { break; } } b'\\' => match bytes.next() { Some((_, b'x')) => { if !backslash_x_byte(&mut bytes) { break; } } Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\')) | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {} Some((newline, b'\n')) | Some((newline, b'\r')) => { let rest = input.advance(newline + 1); for (offset, ch) in rest.char_indices() { if !ch.is_whitespace() { input = rest.advance(offset); bytes = input.bytes().enumerate(); continue 'outer; } } break; } _ => break, }, b if b < 0x80 => {} _ => break, } } Err(LexError { line: input.line }) } pub fn raw_string(input: Cursor) -> PResult<()> { let mut chars = input.char_indices(); let mut n = 0; #[allow(clippy::while_let_on_iterator)] //chars is used in the next loop while let Some((byte_offset, ch)) = chars.next() { match ch { '"' => { n = byte_offset; break; } '#' => {} _ => return Err(LexError { line: input.line }), } } for (byte_offset, ch) in chars { match ch { '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => { let rest = input.advance(byte_offset + 1 + n); return Ok((rest, ())); } '\r' => {} _ => {} } } Err(LexError { line: input.line }) } pub fn cooked_byte(input: Cursor) -> PResult<()> { let mut bytes = input.bytes().enumerate(); let ok = match bytes.next().map(|(_, b)| b) { Some(b'\\') => match bytes.next().map(|(_, b)| b) { Some(b'x') => backslash_x_byte(&mut bytes), Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'') | Some(b'"') => true, _ => false, }, b => b.is_some(), }; if ok { match bytes.next() { Some((offset, _)) => { if input.chars().as_str().is_char_boundary(offset) { Ok((input.advance(offset), ())) } else { Err(LexError { line: input.line }) } } None => Ok((input.advance(input.len()), ())), } } else { Err(LexError { line: input.line }) } } pub fn cooked_char(input: Cursor) -> PResult<()> { let mut chars = input.char_indices(); let ok = match chars.next().map(|(_, ch)| ch) { Some('\\') => match chars.next().map(|(_, ch)| ch) { Some('x') => backslash_x_char(&mut chars), Some('u') => backslash_u(&mut chars), Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => { true } _ => false, }, ch => ch.is_some(), }; if ok { match chars.next() { Some((idx, _)) => Ok((input.advance(idx), ())), None => Ok((input.advance(input.len()), ())), } } else { Err(LexError { line: input.line }) } } macro_rules! next_ch { ($chars:ident @ $pat:pat $(| $rest:pat)*) => { match $chars.next() { Some((_, ch)) => match ch { $pat $(| $rest)* => ch, _ => return false, }, None => return false } }; } fn backslash_x_char(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ '0'..='7'); next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); true } fn backslash_x_byte(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); true } fn backslash_u(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ '{'); next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); loop { let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}'); if c == '}' { return true; } } }