proc-macro2-1.0.28/.cargo_vcs_info.json0000644000000001120000000000100132420ustar { "git": { "sha1": "93c4df24e2d5ef9ddff484a7086f2e0018a13507" } } proc-macro2-1.0.28/.clippy.toml000064400000000000000000000000200072674642500143330ustar 00000000000000msrv = "1.31.0" proc-macro2-1.0.28/.github/workflows/ci.yml000064400000000000000000000042020072674642500166010ustar 00000000000000name: CI on: push: pull_request: schedule: [cron: "40 1 * * *"] jobs: test: name: Rust ${{ matrix.rust }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: rust: [1.31.0, stable, beta] steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: toolchain: ${{ matrix.rust }} profile: minimal override: true - run: cargo test - run: cargo test --no-default-features - run: cargo test --features span-locations - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features nightly: name: Rust nightly runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: toolchain: nightly profile: minimal override: true - run: cargo test - run: cargo test --no-default-features - run: cargo test --no-default-features -- --ignored # run the ignored test to make sure the `proc-macro` feature is disabled - run: cargo test --features span-locations - run: cargo test --manifest-path tests/ui/Cargo.toml - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features - run: RUSTFLAGS='-Z allow-features=' cargo test - run: cargo update -Z minimal-versions && cargo build webassembly: name: WebAssembly runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: toolchain: nightly target: wasm32-unknown-unknown profile: minimal override: true - run: cargo test --target wasm32-unknown-unknown --no-run clippy: name: Clippy runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: toolchain: nightly profile: minimal override: true components: clippy - run: cargo clippy -- -Dclippy::all proc-macro2-1.0.28/.gitignore000064400000000000000000000000360072674642500140570ustar 00000000000000/target **/*.rs.bk Cargo.lock proc-macro2-1.0.28/Cargo.toml0000644000000026630000000000100112550ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "proc-macro2" version = "1.0.28" authors = ["Alex Crichton ", "David Tolnay "] description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n" documentation = "https://docs.rs/proc-macro2" readme = "README.md" keywords = ["macros"] categories = ["development-tools::procedural-macro-helpers"] license = "MIT OR Apache-2.0" repository = "https://github.com/alexcrichton/proc-macro2" [package.metadata.docs.rs] rustc-args = ["--cfg", "procmacro2_semver_exempt"] rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"] targets = ["x86_64-unknown-linux-gnu"] [package.metadata.playground] features = ["span-locations"] [dependencies.unicode-xid] version = "0.2" [dev-dependencies.quote] version = "1.0" default_features = false [features] default = ["proc-macro"] nightly = [] proc-macro = [] span-locations = [] proc-macro2-1.0.28/Cargo.toml.orig000064400000000000000000000037120072674642500147620ustar 00000000000000[package] name = "proc-macro2" version = "1.0.28" # remember to update html_root_url authors = ["Alex Crichton ", "David Tolnay "] license = "MIT OR Apache-2.0" readme = "README.md" keywords = ["macros"] repository = "https://github.com/alexcrichton/proc-macro2" documentation = "https://docs.rs/proc-macro2" categories = ["development-tools::procedural-macro-helpers"] edition = "2018" description = """ A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case. """ [package.metadata.docs.rs] rustc-args = ["--cfg", "procmacro2_semver_exempt"] rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"] targets = ["x86_64-unknown-linux-gnu"] [package.metadata.playground] features = ["span-locations"] [dependencies] unicode-xid = "0.2" [dev-dependencies] quote = { version = "1.0", default_features = false } [features] proc-macro = [] default = ["proc-macro"] # Expose methods Span::start and Span::end which give the line/column location # of a token. span-locations = [] # This feature no longer means anything. nightly = [] [workspace] members = ["benches/bench-libproc-macro", "tests/ui"] [patch.crates-io] # Our doc tests depend on quote which depends on proc-macro2. Without this line, # the proc-macro2 dependency of quote would be the released version of # proc-macro2. Quote would implement its traits for types from that proc-macro2, # meaning impls would be missing when tested against types from the local # proc-macro2. # # GitHub Actions builds that are in progress at the time that you publish may # spuriously fail. This is because they'll be building a local proc-macro2 which # carries the second-most-recent version number, pulling in quote which resolves # to a dependency on the just-published most recent version number. Thus the # patch will fail to apply because the version numbers are different. proc-macro2 = { path = "." } proc-macro2-1.0.28/LICENSE-APACHE000064400000000000000000000251370072674642500140240ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. proc-macro2-1.0.28/LICENSE-MIT000064400000000000000000000020410072674642500135210ustar 00000000000000Copyright (c) 2014 Alex Crichton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. proc-macro2-1.0.28/README.md000064400000000000000000000066140072674642500133560ustar 00000000000000# proc-macro2 [![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions) [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2) [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2) A wrapper around the procedural macro API of the compiler's `proc_macro` crate. This library serves two purposes: - **Bring proc-macro-like functionality to other contexts like build.rs and main.rs.** Types from `proc_macro` are entirely specific to procedural macros and cannot ever exist in code outside of a procedural macro. Meanwhile `proc_macro2` types may exist anywhere including non-macro code. By developing foundational libraries like [syn] and [quote] against `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem becomes easily applicable to many other use cases and we avoid reimplementing non-macro equivalents of those libraries. - **Make procedural macros unit testable.** As a consequence of being specific to procedural macros, nothing that uses `proc_macro` can be executed from a unit test. In order for helper libraries or components of a macro to be testable in isolation, they must be implemented using `proc_macro2`. [syn]: https://github.com/dtolnay/syn [quote]: https://github.com/dtolnay/quote ## Usage ```toml [dependencies] proc-macro2 = "1.0" ``` The skeleton of a typical procedural macro typically looks like this: ```rust extern crate proc_macro; #[proc_macro_derive(MyDerive)] pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = proc_macro2::TokenStream::from(input); let output: proc_macro2::TokenStream = { /* transform input */ }; proc_macro::TokenStream::from(output) } ``` If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate parse errors correctly back to the compiler when parsing fails. [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html ## Unstable features The default feature set of proc-macro2 tracks the most recent stable compiler API. Functionality in `proc_macro` that is not yet stable is not exposed by proc-macro2 by default. To opt into the additional APIs available in the most recent nightly compiler, the `procmacro2_semver_exempt` config flag must be passed to rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs that track the nightly compiler, minor versions of proc-macro2 may make breaking changes to them at any time. ``` RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build ``` Note that this must not only be done for your crate, but for any crate that depends on your crate. This infectious nature is intentional, as it serves as a reminder that you are outside of the normal semver guarantees. Semver exempt methods are marked as such in the proc-macro2 documentation.
#### License Licensed under either of Apache License, Version 2.0 or MIT license at your option.
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this crate by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. proc-macro2-1.0.28/build.rs000064400000000000000000000123740072674642500135440ustar 00000000000000// rustc-cfg emitted by the build script: // // "use_proc_macro" // Link to extern crate proc_macro. Available on any compiler and any target // except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is // enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg // is enabled. // // "wrap_proc_macro" // Wrap types from libproc_macro rather than polyfilling the whole API. // Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set, // because we can't emulate the unstable API without emulating everything // else. Also enabled unconditionally on nightly, in which case the // procmacro2_semver_exempt surface area is implemented by using the // nightly-only proc_macro API. // // "hygiene" // Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at // and Span::located_at. Enabled on Rust 1.45+. // // "proc_macro_span" // Enable non-dummy behavior of Span::start and Span::end methods which // requires an unstable compiler feature. Enabled when building with // nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable // features. // // "super_unstable" // Implement the semver exempt API in terms of the nightly-only proc_macro // API. Enabled when using procmacro2_semver_exempt on a nightly compiler. // // "span_locations" // Provide methods Span::start and Span::end which give the line/column // location of a token. Enabled by procmacro2_semver_exempt or the // "span-locations" Cargo cfg. This is behind a cfg because tracking // location inside spans is a performance hit. use std::env; use std::iter; use std::process::{self, Command}; use std::str; fn main() { println!("cargo:rerun-if-changed=build.rs"); let version = match rustc_version() { Some(version) => version, None => return, }; if version.minor < 31 { eprintln!("Minimum supported rustc version is 1.31"); process::exit(1); } let semver_exempt = cfg!(procmacro2_semver_exempt); if semver_exempt { // https://github.com/alexcrichton/proc-macro2/issues/147 println!("cargo:rustc-cfg=procmacro2_semver_exempt"); } if semver_exempt || cfg!(feature = "span-locations") { println!("cargo:rustc-cfg=span_locations"); } if version.minor < 32 { println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe"); } if version.minor < 39 { println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard"); } if version.minor >= 44 { println!("cargo:rustc-cfg=lexerror_display"); } if version.minor >= 45 { println!("cargo:rustc-cfg=hygiene"); } let target = env::var("TARGET").unwrap(); if !enable_use_proc_macro(&target) { return; } println!("cargo:rustc-cfg=use_proc_macro"); if version.nightly || !semver_exempt { println!("cargo:rustc-cfg=wrap_proc_macro"); } if version.nightly && feature_allowed("proc_macro_span") { println!("cargo:rustc-cfg=proc_macro_span"); } if semver_exempt && version.nightly { println!("cargo:rustc-cfg=super_unstable"); } } fn enable_use_proc_macro(target: &str) -> bool { // wasm targets don't have the `proc_macro` crate, disable this feature. if target.contains("wasm32") { return false; } // Otherwise, only enable it if our feature is actually enabled. cfg!(feature = "proc-macro") } struct RustcVersion { minor: u32, nightly: bool, } fn rustc_version() -> Option { let rustc = env::var_os("RUSTC")?; let output = Command::new(rustc).arg("--version").output().ok()?; let version = str::from_utf8(&output.stdout).ok()?; let nightly = version.contains("nightly") || version.contains("dev"); let mut pieces = version.split('.'); if pieces.next() != Some("rustc 1") { return None; } let minor = pieces.next()?.parse().ok()?; Some(RustcVersion { minor, nightly }) } fn feature_allowed(feature: &str) -> bool { // Recognized formats: // // -Z allow-features=feature1,feature2 // // -Zallow-features=feature1,feature2 let flags_var; let flags_var_string; let mut flags_var_split; let mut flags_none; let flags: &mut dyn Iterator = if let Some(encoded_rustflags) = env::var_os("CARGO_ENCODED_RUSTFLAGS") { flags_var = encoded_rustflags; flags_var_string = flags_var.to_string_lossy(); flags_var_split = flags_var_string.split('\x1f'); &mut flags_var_split } else if let Some(rustflags) = env::var_os("RUSTFLAGS") { flags_var = rustflags; flags_var_string = flags_var.to_string_lossy(); flags_var_split = flags_var_string.split(' '); &mut flags_var_split } else { flags_none = iter::empty(); &mut flags_none }; for mut flag in flags { if flag.starts_with("-Z") { flag = &flag["-Z".len()..]; } if flag.starts_with("allow-features=") { flag = &flag["allow-features=".len()..]; return flag.split(',').any(|allowed| allowed == feature); } } // No allow-features= flag, allowed by default. true } proc-macro2-1.0.28/src/detection.rs000064400000000000000000000046720072674642500152140ustar 00000000000000use std::panic::{self, PanicInfo}; use std::sync::atomic::*; use std::sync::Once; static WORKS: AtomicUsize = AtomicUsize::new(0); static INIT: Once = Once::new(); pub(crate) fn inside_proc_macro() -> bool { match WORKS.load(Ordering::SeqCst) { 1 => return false, 2 => return true, _ => {} } INIT.call_once(initialize); inside_proc_macro() } pub(crate) fn force_fallback() { WORKS.store(1, Ordering::SeqCst); } pub(crate) fn unforce_fallback() { initialize(); } // Swap in a null panic hook to avoid printing "thread panicked" to stderr, // then use catch_unwind to determine whether the compiler's proc_macro is // working. When proc-macro2 is used from outside of a procedural macro all // of the proc_macro crate's APIs currently panic. // // The Once is to prevent the possibility of this ordering: // // thread 1 calls take_hook, gets the user's original hook // thread 1 calls set_hook with the null hook // thread 2 calls take_hook, thinks null hook is the original hook // thread 2 calls set_hook with the null hook // thread 1 calls set_hook with the actual original hook // thread 2 calls set_hook with what it thinks is the original hook // // in which the user's hook has been lost. // // There is still a race condition where a panic in a different thread can // happen during the interval that the user's original panic hook is // unregistered such that their hook is incorrectly not called. This is // sufficiently unlikely and less bad than printing panic messages to stderr // on correct use of this crate. Maybe there is a libstd feature request // here. For now, if a user needs to guarantee that this failure mode does // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from // the main thread before launching any other threads. fn initialize() { type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static; let null_hook: Box = Box::new(|_panic_info| { /* ignore */ }); let sanity_check = &*null_hook as *const PanicHook; let original_hook = panic::take_hook(); panic::set_hook(null_hook); let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok(); WORKS.store(works as usize + 1, Ordering::SeqCst); let hopefully_null_hook = panic::take_hook(); panic::set_hook(original_hook); if sanity_check != &*hopefully_null_hook { panic!("observed race condition in proc_macro2::inside_proc_macro"); } } proc-macro2-1.0.28/src/fallback.rs000064400000000000000000000560070072674642500147740ustar 00000000000000use crate::parse::{self, Cursor}; use crate::{Delimiter, Spacing, TokenTree}; #[cfg(span_locations)] use std::cell::RefCell; #[cfg(span_locations)] use std::cmp; use std::fmt::{self, Debug, Display}; use std::iter::FromIterator; use std::mem; use std::ops::RangeBounds; #[cfg(procmacro2_semver_exempt)] use std::path::Path; use std::path::PathBuf; use std::str::FromStr; use std::vec; use unicode_xid::UnicodeXID; /// Force use of proc-macro2's fallback implementation of the API for now, even /// if the compiler's implementation is available. pub fn force() { #[cfg(wrap_proc_macro)] crate::detection::force_fallback(); } /// Resume using the compiler's implementation of the proc macro API if it is /// available. pub fn unforce() { #[cfg(wrap_proc_macro)] crate::detection::unforce_fallback(); } #[derive(Clone)] pub(crate) struct TokenStream { pub(crate) inner: Vec, } #[derive(Debug)] pub(crate) struct LexError { pub(crate) span: Span, } impl LexError { pub(crate) fn span(&self) -> Span { self.span } fn call_site() -> Self { LexError { span: Span::call_site(), } } } impl TokenStream { pub fn new() -> TokenStream { TokenStream { inner: Vec::new() } } pub fn is_empty(&self) -> bool { self.inner.len() == 0 } fn take_inner(&mut self) -> Vec { mem::replace(&mut self.inner, Vec::new()) } fn push_token(&mut self, token: TokenTree) { // https://github.com/alexcrichton/proc-macro2/issues/235 match token { #[cfg(not(no_bind_by_move_pattern_guard))] TokenTree::Literal(crate::Literal { #[cfg(wrap_proc_macro)] inner: crate::imp::Literal::Fallback(literal), #[cfg(not(wrap_proc_macro))] inner: literal, .. }) if literal.text.starts_with('-') => { push_negative_literal(self, literal); } #[cfg(no_bind_by_move_pattern_guard)] TokenTree::Literal(crate::Literal { #[cfg(wrap_proc_macro)] inner: crate::imp::Literal::Fallback(literal), #[cfg(not(wrap_proc_macro))] inner: literal, .. }) => { if literal.text.starts_with('-') { push_negative_literal(self, literal); } else { self.inner .push(TokenTree::Literal(crate::Literal::_new_stable(literal))); } } _ => self.inner.push(token), } #[cold] fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) { literal.text.remove(0); let mut punct = crate::Punct::new('-', Spacing::Alone); punct.set_span(crate::Span::_new_stable(literal.span)); stream.inner.push(TokenTree::Punct(punct)); stream .inner .push(TokenTree::Literal(crate::Literal::_new_stable(literal))); } } } // Nonrecursive to prevent stack overflow. impl Drop for TokenStream { fn drop(&mut self) { while let Some(token) = self.inner.pop() { let group = match token { TokenTree::Group(group) => group.inner, _ => continue, }; #[cfg(wrap_proc_macro)] let group = match group { crate::imp::Group::Fallback(group) => group, _ => continue, }; let mut group = group; self.inner.extend(group.stream.take_inner()); } } } #[cfg(span_locations)] fn get_cursor(src: &str) -> Cursor { // Create a dummy file & add it to the source map SOURCE_MAP.with(|cm| { let mut cm = cm.borrow_mut(); let name = format!("", cm.files.len()); let span = cm.add_file(&name, src); Cursor { rest: src, off: span.lo, } }) } #[cfg(not(span_locations))] fn get_cursor(src: &str) -> Cursor { Cursor { rest: src } } impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { // Create a dummy file & add it to the source map let cursor = get_cursor(src); parse::token_stream(cursor) } } impl Display for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("cannot parse string into token stream") } } impl Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut joint = false; for (i, tt) in self.inner.iter().enumerate() { if i != 0 && !joint { write!(f, " ")?; } joint = false; match tt { TokenTree::Group(tt) => Display::fmt(tt, f), TokenTree::Ident(tt) => Display::fmt(tt, f), TokenTree::Punct(tt) => { joint = tt.spacing() == Spacing::Joint; Display::fmt(tt, f) } TokenTree::Literal(tt) => Display::fmt(tt, f), }? } Ok(()) } } impl Debug for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("TokenStream ")?; f.debug_list().entries(self.clone()).finish() } } #[cfg(use_proc_macro)] impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> TokenStream { inner .to_string() .parse() .expect("compiler token stream parse failed") } } #[cfg(use_proc_macro)] impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> proc_macro::TokenStream { inner .to_string() .parse() .expect("failed to parse to compiler tokens") } } impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { let mut stream = TokenStream::new(); stream.push_token(tree); stream } } impl FromIterator for TokenStream { fn from_iter>(tokens: I) -> Self { let mut stream = TokenStream::new(); stream.extend(tokens); stream } } impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { let mut v = Vec::new(); for mut stream in streams { v.extend(stream.take_inner()); } TokenStream { inner: v } } } impl Extend for TokenStream { fn extend>(&mut self, tokens: I) { tokens.into_iter().for_each(|token| self.push_token(token)); } } impl Extend for TokenStream { fn extend>(&mut self, streams: I) { self.inner.extend(streams.into_iter().flatten()); } } pub(crate) type TokenTreeIter = vec::IntoIter; impl IntoIterator for TokenStream { type Item = TokenTree; type IntoIter = TokenTreeIter; fn into_iter(mut self) -> TokenTreeIter { self.take_inner().into_iter() } } #[derive(Clone, PartialEq, Eq)] pub(crate) struct SourceFile { path: PathBuf, } impl SourceFile { /// Get the path to this source file as a string. pub fn path(&self) -> PathBuf { self.path.clone() } pub fn is_real(&self) -> bool { // XXX(nika): Support real files in the future? false } } impl Debug for SourceFile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SourceFile") .field("path", &self.path()) .field("is_real", &self.is_real()) .finish() } } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub(crate) struct LineColumn { pub line: usize, pub column: usize, } #[cfg(span_locations)] thread_local! { static SOURCE_MAP: RefCell = RefCell::new(SourceMap { // NOTE: We start with a single dummy file which all call_site() and // def_site() spans reference. files: vec![FileInfo { #[cfg(procmacro2_semver_exempt)] name: "".to_owned(), span: Span { lo: 0, hi: 0 }, lines: vec![0], }], }); } #[cfg(span_locations)] struct FileInfo { #[cfg(procmacro2_semver_exempt)] name: String, span: Span, lines: Vec, } #[cfg(span_locations)] impl FileInfo { fn offset_line_column(&self, offset: usize) -> LineColumn { assert!(self.span_within(Span { lo: offset as u32, hi: offset as u32 })); let offset = offset - self.span.lo as usize; match self.lines.binary_search(&offset) { Ok(found) => LineColumn { line: found + 1, column: 0, }, Err(idx) => LineColumn { line: idx, column: offset - self.lines[idx - 1], }, } } fn span_within(&self, span: Span) -> bool { span.lo >= self.span.lo && span.hi <= self.span.hi } } /// Computes the offsets of each line in the given source string /// and the total number of characters #[cfg(span_locations)] fn lines_offsets(s: &str) -> (usize, Vec) { let mut lines = vec![0]; let mut total = 0; for ch in s.chars() { total += 1; if ch == '\n' { lines.push(total); } } (total, lines) } #[cfg(span_locations)] struct SourceMap { files: Vec, } #[cfg(span_locations)] impl SourceMap { fn next_start_pos(&self) -> u32 { // Add 1 so there's always space between files. // // We'll always have at least 1 file, as we initialize our files list // with a dummy file. self.files.last().unwrap().span.hi + 1 } fn add_file(&mut self, name: &str, src: &str) -> Span { let (len, lines) = lines_offsets(src); let lo = self.next_start_pos(); // XXX(nika): Shouild we bother doing a checked cast or checked add here? let span = Span { lo, hi: lo + (len as u32), }; self.files.push(FileInfo { #[cfg(procmacro2_semver_exempt)] name: name.to_owned(), span, lines, }); #[cfg(not(procmacro2_semver_exempt))] let _ = name; span } fn fileinfo(&self, span: Span) -> &FileInfo { for file in &self.files { if file.span_within(span) { return file; } } panic!("Invalid span with no related FileInfo!"); } } #[derive(Clone, Copy, PartialEq, Eq)] pub(crate) struct Span { #[cfg(span_locations)] pub(crate) lo: u32, #[cfg(span_locations)] pub(crate) hi: u32, } impl Span { #[cfg(not(span_locations))] pub fn call_site() -> Span { Span {} } #[cfg(span_locations)] pub fn call_site() -> Span { Span { lo: 0, hi: 0 } } #[cfg(hygiene)] pub fn mixed_site() -> Span { Span::call_site() } #[cfg(procmacro2_semver_exempt)] pub fn def_site() -> Span { Span::call_site() } pub fn resolved_at(&self, _other: Span) -> Span { // Stable spans consist only of line/column information, so // `resolved_at` and `located_at` only select which span the // caller wants line/column information from. *self } pub fn located_at(&self, other: Span) -> Span { other } #[cfg(procmacro2_semver_exempt)] pub fn source_file(&self) -> SourceFile { SOURCE_MAP.with(|cm| { let cm = cm.borrow(); let fi = cm.fileinfo(*self); SourceFile { path: Path::new(&fi.name).to_owned(), } }) } #[cfg(span_locations)] pub fn start(&self) -> LineColumn { SOURCE_MAP.with(|cm| { let cm = cm.borrow(); let fi = cm.fileinfo(*self); fi.offset_line_column(self.lo as usize) }) } #[cfg(span_locations)] pub fn end(&self) -> LineColumn { SOURCE_MAP.with(|cm| { let cm = cm.borrow(); let fi = cm.fileinfo(*self); fi.offset_line_column(self.hi as usize) }) } #[cfg(not(span_locations))] pub fn join(&self, _other: Span) -> Option { Some(Span {}) } #[cfg(span_locations)] pub fn join(&self, other: Span) -> Option { SOURCE_MAP.with(|cm| { let cm = cm.borrow(); // If `other` is not within the same FileInfo as us, return None. if !cm.fileinfo(*self).span_within(other) { return None; } Some(Span { lo: cmp::min(self.lo, other.lo), hi: cmp::max(self.hi, other.hi), }) }) } #[cfg(not(span_locations))] fn first_byte(self) -> Self { self } #[cfg(span_locations)] fn first_byte(self) -> Self { Span { lo: self.lo, hi: cmp::min(self.lo.saturating_add(1), self.hi), } } #[cfg(not(span_locations))] fn last_byte(self) -> Self { self } #[cfg(span_locations)] fn last_byte(self) -> Self { Span { lo: cmp::max(self.hi.saturating_sub(1), self.lo), hi: self.hi, } } } impl Debug for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { #[cfg(span_locations)] return write!(f, "bytes({}..{})", self.lo, self.hi); #[cfg(not(span_locations))] write!(f, "Span") } } pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { #[cfg(span_locations)] { if span.lo == 0 && span.hi == 0 { return; } } if cfg!(span_locations) { debug.field("span", &span); } } #[derive(Clone)] pub(crate) struct Group { delimiter: Delimiter, stream: TokenStream, span: Span, } impl Group { pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { Group { delimiter, stream, span: Span::call_site(), } } pub fn delimiter(&self) -> Delimiter { self.delimiter } pub fn stream(&self) -> TokenStream { self.stream.clone() } pub fn span(&self) -> Span { self.span } pub fn span_open(&self) -> Span { self.span.first_byte() } pub fn span_close(&self) -> Span { self.span.last_byte() } pub fn set_span(&mut self, span: Span) { self.span = span; } } impl Display for Group { // We attempt to match libproc_macro's formatting. // Empty parens: () // Nonempty parens: (...) // Empty brackets: [] // Nonempty brackets: [...] // Empty braces: { } // Nonempty braces: { ... } fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (open, close) = match self.delimiter { Delimiter::Parenthesis => ("(", ")"), Delimiter::Brace => ("{ ", "}"), Delimiter::Bracket => ("[", "]"), Delimiter::None => ("", ""), }; f.write_str(open)?; Display::fmt(&self.stream, f)?; if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() { f.write_str(" ")?; } f.write_str(close)?; Ok(()) } } impl Debug for Group { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut debug = fmt.debug_struct("Group"); debug.field("delimiter", &self.delimiter); debug.field("stream", &self.stream); debug_span_field_if_nontrivial(&mut debug, self.span); debug.finish() } } #[derive(Clone)] pub(crate) struct Ident { sym: String, span: Span, raw: bool, } impl Ident { fn _new(string: &str, raw: bool, span: Span) -> Ident { validate_ident(string); Ident { sym: string.to_owned(), span, raw, } } pub fn new(string: &str, span: Span) -> Ident { Ident::_new(string, false, span) } pub fn new_raw(string: &str, span: Span) -> Ident { Ident::_new(string, true, span) } pub fn span(&self) -> Span { self.span } pub fn set_span(&mut self, span: Span) { self.span = span; } } pub(crate) fn is_ident_start(c: char) -> bool { ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_' || (c > '\x7f' && UnicodeXID::is_xid_start(c)) } pub(crate) fn is_ident_continue(c: char) -> bool { ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_' || ('0' <= c && c <= '9') || (c > '\x7f' && UnicodeXID::is_xid_continue(c)) } fn validate_ident(string: &str) { let validate = string; if validate.is_empty() { panic!("Ident is not allowed to be empty; use Option"); } if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') { panic!("Ident cannot be a number; use Literal instead"); } fn ident_ok(string: &str) -> bool { let mut chars = string.chars(); let first = chars.next().unwrap(); if !is_ident_start(first) { return false; } for ch in chars { if !is_ident_continue(ch) { return false; } } true } if !ident_ok(validate) { panic!("{:?} is not a valid Ident", string); } } impl PartialEq for Ident { fn eq(&self, other: &Ident) -> bool { self.sym == other.sym && self.raw == other.raw } } impl PartialEq for Ident where T: ?Sized + AsRef, { fn eq(&self, other: &T) -> bool { let other = other.as_ref(); if self.raw { other.starts_with("r#") && self.sym == other[2..] } else { self.sym == other } } } impl Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.raw { f.write_str("r#")?; } Display::fmt(&self.sym, f) } } impl Debug for Ident { // Ident(proc_macro), Ident(r#union) #[cfg(not(span_locations))] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut debug = f.debug_tuple("Ident"); debug.field(&format_args!("{}", self)); debug.finish() } // Ident { // sym: proc_macro, // span: bytes(128..138) // } #[cfg(span_locations)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut debug = f.debug_struct("Ident"); debug.field("sym", &format_args!("{}", self)); debug_span_field_if_nontrivial(&mut debug, self.span); debug.finish() } } #[derive(Clone)] pub(crate) struct Literal { text: String, span: Span, } macro_rules! suffixed_numbers { ($($name:ident => $kind:ident,)*) => ($( pub fn $name(n: $kind) -> Literal { Literal::_new(format!(concat!("{}", stringify!($kind)), n)) } )*) } macro_rules! unsuffixed_numbers { ($($name:ident => $kind:ident,)*) => ($( pub fn $name(n: $kind) -> Literal { Literal::_new(n.to_string()) } )*) } impl Literal { pub(crate) fn _new(text: String) -> Literal { Literal { text, span: Span::call_site(), } } suffixed_numbers! { u8_suffixed => u8, u16_suffixed => u16, u32_suffixed => u32, u64_suffixed => u64, u128_suffixed => u128, usize_suffixed => usize, i8_suffixed => i8, i16_suffixed => i16, i32_suffixed => i32, i64_suffixed => i64, i128_suffixed => i128, isize_suffixed => isize, f32_suffixed => f32, f64_suffixed => f64, } unsuffixed_numbers! { u8_unsuffixed => u8, u16_unsuffixed => u16, u32_unsuffixed => u32, u64_unsuffixed => u64, u128_unsuffixed => u128, usize_unsuffixed => usize, i8_unsuffixed => i8, i16_unsuffixed => i16, i32_unsuffixed => i32, i64_unsuffixed => i64, i128_unsuffixed => i128, isize_unsuffixed => isize, } pub fn f32_unsuffixed(f: f32) -> Literal { let mut s = f.to_string(); if !s.contains('.') { s.push_str(".0"); } Literal::_new(s) } pub fn f64_unsuffixed(f: f64) -> Literal { let mut s = f.to_string(); if !s.contains('.') { s.push_str(".0"); } Literal::_new(s) } pub fn string(t: &str) -> Literal { let mut text = String::with_capacity(t.len() + 2); text.push('"'); for c in t.chars() { if c == '\'' { // escape_debug turns this into "\'" which is unnecessary. text.push(c); } else { text.extend(c.escape_debug()); } } text.push('"'); Literal::_new(text) } pub fn character(t: char) -> Literal { let mut text = String::new(); text.push('\''); if t == '"' { // escape_debug turns this into '\"' which is unnecessary. text.push(t); } else { text.extend(t.escape_debug()); } text.push('\''); Literal::_new(text) } pub fn byte_string(bytes: &[u8]) -> Literal { let mut escaped = "b\"".to_string(); for b in bytes { #[allow(clippy::match_overlapping_arm)] match *b { b'\0' => escaped.push_str(r"\0"), b'\t' => escaped.push_str(r"\t"), b'\n' => escaped.push_str(r"\n"), b'\r' => escaped.push_str(r"\r"), b'"' => escaped.push_str("\\\""), b'\\' => escaped.push_str("\\\\"), b'\x20'..=b'\x7E' => escaped.push(*b as char), _ => escaped.push_str(&format!("\\x{:02X}", b)), } } escaped.push('"'); Literal::_new(escaped) } pub fn span(&self) -> Span { self.span } pub fn set_span(&mut self, span: Span) { self.span = span; } pub fn subspan>(&self, _range: R) -> Option { None } } impl FromStr for Literal { type Err = LexError; fn from_str(repr: &str) -> Result { let cursor = get_cursor(repr); if let Ok((_rest, literal)) = parse::literal(cursor) { if literal.text.len() == repr.len() { return Ok(literal); } } Err(LexError::call_site()) } } impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.text, f) } } impl Debug for Literal { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut debug = fmt.debug_struct("Literal"); debug.field("lit", &format_args!("{}", self.text)); debug_span_field_if_nontrivial(&mut debug, self.span); debug.finish() } } proc-macro2-1.0.28/src/lib.rs000064400000000000000000001205350072674642500140010ustar 00000000000000//! A wrapper around the procedural macro API of the compiler's [`proc_macro`] //! crate. This library serves two purposes: //! //! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/ //! //! - **Bring proc-macro-like functionality to other contexts like build.rs and //! main.rs.** Types from `proc_macro` are entirely specific to procedural //! macros and cannot ever exist in code outside of a procedural macro. //! Meanwhile `proc_macro2` types may exist anywhere including non-macro code. //! By developing foundational libraries like [syn] and [quote] against //! `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem //! becomes easily applicable to many other use cases and we avoid //! reimplementing non-macro equivalents of those libraries. //! //! - **Make procedural macros unit testable.** As a consequence of being //! specific to procedural macros, nothing that uses `proc_macro` can be //! executed from a unit test. In order for helper libraries or components of //! a macro to be testable in isolation, they must be implemented using //! `proc_macro2`. //! //! [syn]: https://github.com/dtolnay/syn //! [quote]: https://github.com/dtolnay/quote //! //! # Usage //! //! The skeleton of a typical procedural macro typically looks like this: //! //! ``` //! extern crate proc_macro; //! //! # const IGNORE: &str = stringify! { //! #[proc_macro_derive(MyDerive)] //! # }; //! # #[cfg(wrap_proc_macro)] //! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { //! let input = proc_macro2::TokenStream::from(input); //! //! let output: proc_macro2::TokenStream = { //! /* transform input */ //! # input //! }; //! //! proc_macro::TokenStream::from(output) //! } //! ``` //! //! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to //! propagate parse errors correctly back to the compiler when parsing fails. //! //! [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html //! //! # Unstable features //! //! The default feature set of proc-macro2 tracks the most recent stable //! compiler API. Functionality in `proc_macro` that is not yet stable is not //! exposed by proc-macro2 by default. //! //! To opt into the additional APIs available in the most recent nightly //! compiler, the `procmacro2_semver_exempt` config flag must be passed to //! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As //! these are unstable APIs that track the nightly compiler, minor versions of //! proc-macro2 may make breaking changes to them at any time. //! //! ```sh //! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build //! ``` //! //! Note that this must not only be done for your crate, but for any crate that //! depends on your crate. This infectious nature is intentional, as it serves //! as a reminder that you are outside of the normal semver guarantees. //! //! Semver exempt methods are marked as such in the proc-macro2 documentation. //! //! # Thread-Safety //! //! Most types in this crate are `!Sync` because the underlying compiler //! types make use of thread-local memory, meaning they cannot be accessed from //! a different thread. // Proc-macro2 types in rustdoc of other crates get linked to here. #![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.28")] #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))] #![cfg_attr(doc_cfg, feature(doc_cfg))] #![allow(clippy::needless_doctest_main, clippy::vec_init_then_push)] #[cfg(use_proc_macro)] extern crate proc_macro; mod marker; mod parse; #[cfg(wrap_proc_macro)] mod detection; // Public for proc_macro2::fallback::force() and unforce(), but those are quite // a niche use case so we omit it from rustdoc. #[doc(hidden)] pub mod fallback; #[cfg(not(wrap_proc_macro))] use crate::fallback as imp; #[path = "wrapper.rs"] #[cfg(wrap_proc_macro)] mod imp; use crate::marker::Marker; use std::cmp::Ordering; use std::error::Error; use std::fmt::{self, Debug, Display}; use std::hash::{Hash, Hasher}; use std::iter::FromIterator; use std::ops::RangeBounds; #[cfg(procmacro2_semver_exempt)] use std::path::PathBuf; use std::str::FromStr; /// An abstract stream of tokens, or more concretely a sequence of token trees. /// /// This type provides interfaces for iterating over token trees and for /// collecting token trees into one stream. /// /// Token stream is both the input and output of `#[proc_macro]`, /// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions. #[derive(Clone)] pub struct TokenStream { inner: imp::TokenStream, _marker: Marker, } /// Error returned from `TokenStream::from_str`. pub struct LexError { inner: imp::LexError, _marker: Marker, } impl TokenStream { fn _new(inner: imp::TokenStream) -> TokenStream { TokenStream { inner, _marker: Marker, } } fn _new_stable(inner: fallback::TokenStream) -> TokenStream { TokenStream { inner: inner.into(), _marker: Marker, } } /// Returns an empty `TokenStream` containing no token trees. pub fn new() -> TokenStream { TokenStream::_new(imp::TokenStream::new()) } /// Checks if this `TokenStream` is empty. pub fn is_empty(&self) -> bool { self.inner.is_empty() } } /// `TokenStream::default()` returns an empty stream, /// i.e. this is equivalent with `TokenStream::new()`. impl Default for TokenStream { fn default() -> Self { TokenStream::new() } } /// Attempts to break the string into tokens and parse those tokens into a token /// stream. /// /// May fail for a number of reasons, for example, if the string contains /// unbalanced delimiters or characters not existing in the language. /// /// NOTE: Some errors may cause panics instead of returning `LexError`. We /// reserve the right to change these errors into `LexError`s later. impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { let e = src.parse().map_err(|e| LexError { inner: e, _marker: Marker, })?; Ok(TokenStream::_new(e)) } } #[cfg(use_proc_macro)] impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> TokenStream { TokenStream::_new(inner.into()) } } #[cfg(use_proc_macro)] impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> proc_macro::TokenStream { inner.inner.into() } } impl From for TokenStream { fn from(token: TokenTree) -> Self { TokenStream::_new(imp::TokenStream::from(token)) } } impl Extend for TokenStream { fn extend>(&mut self, streams: I) { self.inner.extend(streams) } } impl Extend for TokenStream { fn extend>(&mut self, streams: I) { self.inner .extend(streams.into_iter().map(|stream| stream.inner)) } } /// Collects a number of token trees into a single stream. impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { TokenStream::_new(streams.into_iter().collect()) } } impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { TokenStream::_new(streams.into_iter().map(|i| i.inner).collect()) } } /// Prints the token stream as a string that is supposed to be losslessly /// convertible back into the same token stream (modulo spans), except for /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative /// numeric literals. impl Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.inner, f) } } /// Prints token in a form convenient for debugging. impl Debug for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } impl LexError { pub fn span(&self) -> Span { Span::_new(self.inner.span()) } } impl Debug for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } impl Display for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.inner, f) } } impl Error for LexError {} /// The source file of a given `Span`. /// /// This type is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] #[derive(Clone, PartialEq, Eq)] pub struct SourceFile { inner: imp::SourceFile, _marker: Marker, } #[cfg(procmacro2_semver_exempt)] impl SourceFile { fn _new(inner: imp::SourceFile) -> Self { SourceFile { inner, _marker: Marker, } } /// Get the path to this source file. /// /// ### Note /// /// If the code span associated with this `SourceFile` was generated by an /// external macro, this may not be an actual path on the filesystem. Use /// [`is_real`] to check. /// /// Also note that even if `is_real` returns `true`, if /// `--remap-path-prefix` was passed on the command line, the path as given /// may not actually be valid. /// /// [`is_real`]: #method.is_real pub fn path(&self) -> PathBuf { self.inner.path() } /// Returns `true` if this source file is a real source file, and not /// generated by an external macro's expansion. pub fn is_real(&self) -> bool { self.inner.is_real() } } #[cfg(procmacro2_semver_exempt)] impl Debug for SourceFile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } /// A line-column pair representing the start or end of a `Span`. /// /// This type is semver exempt and not exposed by default. #[cfg(span_locations)] #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct LineColumn { /// The 1-indexed line in the source file on which the span starts or ends /// (inclusive). pub line: usize, /// The 0-indexed column (in UTF-8 characters) in the source file on which /// the span starts or ends (inclusive). pub column: usize, } #[cfg(span_locations)] impl Ord for LineColumn { fn cmp(&self, other: &Self) -> Ordering { self.line .cmp(&other.line) .then(self.column.cmp(&other.column)) } } #[cfg(span_locations)] impl PartialOrd for LineColumn { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } /// A region of source code, along with macro expansion information. #[derive(Copy, Clone)] pub struct Span { inner: imp::Span, _marker: Marker, } impl Span { fn _new(inner: imp::Span) -> Span { Span { inner, _marker: Marker, } } fn _new_stable(inner: fallback::Span) -> Span { Span { inner: inner.into(), _marker: Marker, } } /// The span of the invocation of the current procedural macro. /// /// Identifiers created with this span will be resolved as if they were /// written directly at the macro call location (call-site hygiene) and /// other code at the macro call site will be able to refer to them as well. pub fn call_site() -> Span { Span::_new(imp::Span::call_site()) } /// The span located at the invocation of the procedural macro, but with /// local variables, labels, and `$crate` resolved at the definition site /// of the macro. This is the same hygiene behavior as `macro_rules`. /// /// This function requires Rust 1.45 or later. #[cfg(hygiene)] pub fn mixed_site() -> Span { Span::_new(imp::Span::mixed_site()) } /// A span that resolves at the macro definition site. /// /// This method is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] pub fn def_site() -> Span { Span::_new(imp::Span::def_site()) } /// Creates a new span with the same line/column information as `self` but /// that resolves symbols as though it were at `other`. pub fn resolved_at(&self, other: Span) -> Span { Span::_new(self.inner.resolved_at(other.inner)) } /// Creates a new span with the same name resolution behavior as `self` but /// with the line/column information of `other`. pub fn located_at(&self, other: Span) -> Span { Span::_new(self.inner.located_at(other.inner)) } /// Convert `proc_macro2::Span` to `proc_macro::Span`. /// /// This method is available when building with a nightly compiler, or when /// building with rustc 1.29+ *without* semver exempt features. /// /// # Panics /// /// Panics if called from outside of a procedural macro. Unlike /// `proc_macro2::Span`, the `proc_macro::Span` type can only exist within /// the context of a procedural macro invocation. #[cfg(wrap_proc_macro)] pub fn unwrap(self) -> proc_macro::Span { self.inner.unwrap() } // Soft deprecated. Please use Span::unwrap. #[cfg(wrap_proc_macro)] #[doc(hidden)] pub fn unstable(self) -> proc_macro::Span { self.unwrap() } /// The original source file into which this span points. /// /// This method is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] pub fn source_file(&self) -> SourceFile { SourceFile::_new(self.inner.source_file()) } /// Get the starting line/column in the source file for this span. /// /// This method requires the `"span-locations"` feature to be enabled. /// /// When executing in a procedural macro context, the returned line/column /// are only meaningful if compiled with a nightly toolchain. The stable /// toolchain does not have this information available. When executing /// outside of a procedural macro, such as main.rs or build.rs, the /// line/column are always meaningful regardless of toolchain. #[cfg(span_locations)] #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] pub fn start(&self) -> LineColumn { let imp::LineColumn { line, column } = self.inner.start(); LineColumn { line, column } } /// Get the ending line/column in the source file for this span. /// /// This method requires the `"span-locations"` feature to be enabled. /// /// When executing in a procedural macro context, the returned line/column /// are only meaningful if compiled with a nightly toolchain. The stable /// toolchain does not have this information available. When executing /// outside of a procedural macro, such as main.rs or build.rs, the /// line/column are always meaningful regardless of toolchain. #[cfg(span_locations)] #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] pub fn end(&self) -> LineColumn { let imp::LineColumn { line, column } = self.inner.end(); LineColumn { line, column } } /// Create a new span encompassing `self` and `other`. /// /// Returns `None` if `self` and `other` are from different files. /// /// Warning: the underlying [`proc_macro::Span::join`] method is /// nightly-only. When called from within a procedural macro not using a /// nightly compiler, this method will always return `None`. /// /// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join pub fn join(&self, other: Span) -> Option { self.inner.join(other.inner).map(Span::_new) } /// Compares two spans to see if they're equal. /// /// This method is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] pub fn eq(&self, other: &Span) -> bool { self.inner.eq(&other.inner) } } /// Prints a span in a form convenient for debugging. impl Debug for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). #[derive(Clone)] pub enum TokenTree { /// A token stream surrounded by bracket delimiters. Group(Group), /// An identifier. Ident(Ident), /// A single punctuation character (`+`, `,`, `$`, etc.). Punct(Punct), /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc. Literal(Literal), } impl TokenTree { /// Returns the span of this tree, delegating to the `span` method of /// the contained token or a delimited stream. pub fn span(&self) -> Span { match self { TokenTree::Group(t) => t.span(), TokenTree::Ident(t) => t.span(), TokenTree::Punct(t) => t.span(), TokenTree::Literal(t) => t.span(), } } /// Configures the span for *only this token*. /// /// Note that if this token is a `Group` then this method will not configure /// the span of each of the internal tokens, this will simply delegate to /// the `set_span` method of each variant. pub fn set_span(&mut self, span: Span) { match self { TokenTree::Group(t) => t.set_span(span), TokenTree::Ident(t) => t.set_span(span), TokenTree::Punct(t) => t.set_span(span), TokenTree::Literal(t) => t.set_span(span), } } } impl From for TokenTree { fn from(g: Group) -> TokenTree { TokenTree::Group(g) } } impl From for TokenTree { fn from(g: Ident) -> TokenTree { TokenTree::Ident(g) } } impl From for TokenTree { fn from(g: Punct) -> TokenTree { TokenTree::Punct(g) } } impl From for TokenTree { fn from(g: Literal) -> TokenTree { TokenTree::Literal(g) } } /// Prints the token tree as a string that is supposed to be losslessly /// convertible back into the same token tree (modulo spans), except for /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative /// numeric literals. impl Display for TokenTree { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { TokenTree::Group(t) => Display::fmt(t, f), TokenTree::Ident(t) => Display::fmt(t, f), TokenTree::Punct(t) => Display::fmt(t, f), TokenTree::Literal(t) => Display::fmt(t, f), } } } /// Prints token tree in a form convenient for debugging. impl Debug for TokenTree { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // Each of these has the name in the struct type in the derived debug, // so don't bother with an extra layer of indirection match self { TokenTree::Group(t) => Debug::fmt(t, f), TokenTree::Ident(t) => { let mut debug = f.debug_struct("Ident"); debug.field("sym", &format_args!("{}", t)); imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner); debug.finish() } TokenTree::Punct(t) => Debug::fmt(t, f), TokenTree::Literal(t) => Debug::fmt(t, f), } } } /// A delimited token stream. /// /// A `Group` internally contains a `TokenStream` which is surrounded by /// `Delimiter`s. #[derive(Clone)] pub struct Group { inner: imp::Group, } /// Describes how a sequence of token trees is delimited. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Delimiter { /// `( ... )` Parenthesis, /// `{ ... }` Brace, /// `[ ... ]` Bracket, /// `Ø ... Ø` /// /// An implicit delimiter, that may, for example, appear around tokens /// coming from a "macro variable" `$var`. It is important to preserve /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`. /// Implicit delimiters may not survive roundtrip of a token stream through /// a string. None, } impl Group { fn _new(inner: imp::Group) -> Self { Group { inner } } fn _new_stable(inner: fallback::Group) -> Self { Group { inner: inner.into(), } } /// Creates a new `Group` with the given delimiter and token stream. /// /// This constructor will set the span for this group to /// `Span::call_site()`. To change the span you can use the `set_span` /// method below. pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { Group { inner: imp::Group::new(delimiter, stream.inner), } } /// Returns the delimiter of this `Group` pub fn delimiter(&self) -> Delimiter { self.inner.delimiter() } /// Returns the `TokenStream` of tokens that are delimited in this `Group`. /// /// Note that the returned token stream does not include the delimiter /// returned above. pub fn stream(&self) -> TokenStream { TokenStream::_new(self.inner.stream()) } /// Returns the span for the delimiters of this token stream, spanning the /// entire `Group`. /// /// ```text /// pub fn span(&self) -> Span { /// ^^^^^^^ /// ``` pub fn span(&self) -> Span { Span::_new(self.inner.span()) } /// Returns the span pointing to the opening delimiter of this group. /// /// ```text /// pub fn span_open(&self) -> Span { /// ^ /// ``` pub fn span_open(&self) -> Span { Span::_new(self.inner.span_open()) } /// Returns the span pointing to the closing delimiter of this group. /// /// ```text /// pub fn span_close(&self) -> Span { /// ^ /// ``` pub fn span_close(&self) -> Span { Span::_new(self.inner.span_close()) } /// Configures the span for this `Group`'s delimiters, but not its internal /// tokens. /// /// This method will **not** set the span of all the internal tokens spanned /// by this group, but rather it will only set the span of the delimiter /// tokens at the level of the `Group`. pub fn set_span(&mut self, span: Span) { self.inner.set_span(span.inner) } } /// Prints the group as a string that should be losslessly convertible back /// into the same group (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters. impl Display for Group { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.inner, formatter) } } impl Debug for Group { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, formatter) } } /// A `Punct` is a single punctuation character like `+`, `-` or `#`. /// /// Multicharacter operators like `+=` are represented as two instances of /// `Punct` with different forms of `Spacing` returned. #[derive(Clone)] pub struct Punct { ch: char, spacing: Spacing, span: Span, } /// Whether a `Punct` is followed immediately by another `Punct` or followed by /// another token or whitespace. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Spacing { /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`. Alone, /// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`. /// /// Additionally, single quote `'` can join with identifiers to form /// lifetimes `'ident`. Joint, } impl Punct { /// Creates a new `Punct` from the given character and spacing. /// /// The `ch` argument must be a valid punctuation character permitted by the /// language, otherwise the function will panic. /// /// The returned `Punct` will have the default span of `Span::call_site()` /// which can be further configured with the `set_span` method below. pub fn new(ch: char, spacing: Spacing) -> Punct { Punct { ch, spacing, span: Span::call_site(), } } /// Returns the value of this punctuation character as `char`. pub fn as_char(&self) -> char { self.ch } /// Returns the spacing of this punctuation character, indicating whether /// it's immediately followed by another `Punct` in the token stream, so /// they can potentially be combined into a multicharacter operator /// (`Joint`), or it's followed by some other token or whitespace (`Alone`) /// so the operator has certainly ended. pub fn spacing(&self) -> Spacing { self.spacing } /// Returns the span for this punctuation character. pub fn span(&self) -> Span { self.span } /// Configure the span for this punctuation character. pub fn set_span(&mut self, span: Span) { self.span = span; } } /// Prints the punctuation character as a string that should be losslessly /// convertible back into the same character. impl Display for Punct { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.ch, f) } } impl Debug for Punct { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut debug = fmt.debug_struct("Punct"); debug.field("char", &self.ch); debug.field("spacing", &self.spacing); imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner); debug.finish() } } /// A word of Rust code, which may be a keyword or legal variable name. /// /// An identifier consists of at least one Unicode code point, the first of /// which has the XID_Start property and the rest of which have the XID_Continue /// property. /// /// - The empty string is not an identifier. Use `Option`. /// - A lifetime is not an identifier. Use `syn::Lifetime` instead. /// /// An identifier constructed with `Ident::new` is permitted to be a Rust /// keyword, though parsing one through its [`Parse`] implementation rejects /// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the /// behaviour of `Ident::new`. /// /// [`Parse`]: https://docs.rs/syn/1.0/syn/parse/trait.Parse.html /// /// # Examples /// /// A new ident can be created from a string using the `Ident::new` function. /// A span must be provided explicitly which governs the name resolution /// behavior of the resulting identifier. /// /// ``` /// use proc_macro2::{Ident, Span}; /// /// fn main() { /// let call_ident = Ident::new("calligraphy", Span::call_site()); /// /// println!("{}", call_ident); /// } /// ``` /// /// An ident can be interpolated into a token stream using the `quote!` macro. /// /// ``` /// use proc_macro2::{Ident, Span}; /// use quote::quote; /// /// fn main() { /// let ident = Ident::new("demo", Span::call_site()); /// /// // Create a variable binding whose name is this ident. /// let expanded = quote! { let #ident = 10; }; /// /// // Create a variable binding with a slightly different name. /// let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site()); /// let expanded = quote! { let #temp_ident = 10; }; /// } /// ``` /// /// A string representation of the ident is available through the `to_string()` /// method. /// /// ``` /// # use proc_macro2::{Ident, Span}; /// # /// # let ident = Ident::new("another_identifier", Span::call_site()); /// # /// // Examine the ident as a string. /// let ident_string = ident.to_string(); /// if ident_string.len() > 60 { /// println!("Very long identifier: {}", ident_string) /// } /// ``` #[derive(Clone)] pub struct Ident { inner: imp::Ident, _marker: Marker, } impl Ident { fn _new(inner: imp::Ident) -> Ident { Ident { inner, _marker: Marker, } } /// Creates a new `Ident` with the given `string` as well as the specified /// `span`. /// /// The `string` argument must be a valid identifier permitted by the /// language, otherwise the function will panic. /// /// Note that `span`, currently in rustc, configures the hygiene information /// for this identifier. /// /// As of this time `Span::call_site()` explicitly opts-in to "call-site" /// hygiene meaning that identifiers created with this span will be resolved /// as if they were written directly at the location of the macro call, and /// other code at the macro call site will be able to refer to them as well. /// /// Later spans like `Span::def_site()` will allow to opt-in to /// "definition-site" hygiene meaning that identifiers created with this /// span will be resolved at the location of the macro definition and other /// code at the macro call site will not be able to refer to them. /// /// Due to the current importance of hygiene this constructor, unlike other /// tokens, requires a `Span` to be specified at construction. /// /// # Panics /// /// Panics if the input string is neither a keyword nor a legal variable /// name. If you are not sure whether the string contains an identifier and /// need to handle an error case, use /// syn::parse_str::<Ident> /// rather than `Ident::new`. pub fn new(string: &str, span: Span) -> Ident { Ident::_new(imp::Ident::new(string, span.inner)) } /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). /// /// This method is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] pub fn new_raw(string: &str, span: Span) -> Ident { Ident::_new_raw(string, span) } fn _new_raw(string: &str, span: Span) -> Ident { Ident::_new(imp::Ident::new_raw(string, span.inner)) } /// Returns the span of this `Ident`. pub fn span(&self) -> Span { Span::_new(self.inner.span()) } /// Configures the span of this `Ident`, possibly changing its hygiene /// context. pub fn set_span(&mut self, span: Span) { self.inner.set_span(span.inner); } } impl PartialEq for Ident { fn eq(&self, other: &Ident) -> bool { self.inner == other.inner } } impl PartialEq for Ident where T: ?Sized + AsRef, { fn eq(&self, other: &T) -> bool { self.inner == other } } impl Eq for Ident {} impl PartialOrd for Ident { fn partial_cmp(&self, other: &Ident) -> Option { Some(self.cmp(other)) } } impl Ord for Ident { fn cmp(&self, other: &Ident) -> Ordering { self.to_string().cmp(&other.to_string()) } } impl Hash for Ident { fn hash(&self, hasher: &mut H) { self.to_string().hash(hasher) } } /// Prints the identifier as a string that should be losslessly convertible back /// into the same identifier. impl Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.inner, f) } } impl Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } /// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`), /// byte character (`b'a'`), an integer or floating point number with or without /// a suffix (`1`, `1u8`, `2.3`, `2.3f32`). /// /// Boolean literals like `true` and `false` do not belong here, they are /// `Ident`s. #[derive(Clone)] pub struct Literal { inner: imp::Literal, _marker: Marker, } macro_rules! suffixed_int_literals { ($($name:ident => $kind:ident,)*) => ($( /// Creates a new suffixed integer literal with the specified value. /// /// This function will create an integer like `1u32` where the integer /// value specified is the first part of the token and the integral is /// also suffixed at the end. Literals created from negative numbers may /// not survive roundtrips through `TokenStream` or strings and may be /// broken into two tokens (`-` and positive literal). /// /// Literals created through this method have the `Span::call_site()` /// span by default, which can be configured with the `set_span` method /// below. pub fn $name(n: $kind) -> Literal { Literal::_new(imp::Literal::$name(n)) } )*) } macro_rules! unsuffixed_int_literals { ($($name:ident => $kind:ident,)*) => ($( /// Creates a new unsuffixed integer literal with the specified value. /// /// This function will create an integer like `1` where the integer /// value specified is the first part of the token. No suffix is /// specified on this token, meaning that invocations like /// `Literal::i8_unsuffixed(1)` are equivalent to /// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers /// may not survive roundtrips through `TokenStream` or strings and may /// be broken into two tokens (`-` and positive literal). /// /// Literals created through this method have the `Span::call_site()` /// span by default, which can be configured with the `set_span` method /// below. pub fn $name(n: $kind) -> Literal { Literal::_new(imp::Literal::$name(n)) } )*) } impl Literal { fn _new(inner: imp::Literal) -> Literal { Literal { inner, _marker: Marker, } } fn _new_stable(inner: fallback::Literal) -> Literal { Literal { inner: inner.into(), _marker: Marker, } } suffixed_int_literals! { u8_suffixed => u8, u16_suffixed => u16, u32_suffixed => u32, u64_suffixed => u64, u128_suffixed => u128, usize_suffixed => usize, i8_suffixed => i8, i16_suffixed => i16, i32_suffixed => i32, i64_suffixed => i64, i128_suffixed => i128, isize_suffixed => isize, } unsuffixed_int_literals! { u8_unsuffixed => u8, u16_unsuffixed => u16, u32_unsuffixed => u32, u64_unsuffixed => u64, u128_unsuffixed => u128, usize_unsuffixed => usize, i8_unsuffixed => i8, i16_unsuffixed => i16, i32_unsuffixed => i32, i64_unsuffixed => i64, i128_unsuffixed => i128, isize_unsuffixed => isize, } /// Creates a new unsuffixed floating-point literal. /// /// This constructor is similar to those like `Literal::i8_unsuffixed` where /// the float's value is emitted directly into the token but no suffix is /// used, so it may be inferred to be a `f64` later in the compiler. /// Literals created from negative numbers may not survive rountrips through /// `TokenStream` or strings and may be broken into two tokens (`-` and /// positive literal). /// /// # Panics /// /// This function requires that the specified float is finite, for example /// if it is infinity or NaN this function will panic. pub fn f64_unsuffixed(f: f64) -> Literal { assert!(f.is_finite()); Literal::_new(imp::Literal::f64_unsuffixed(f)) } /// Creates a new suffixed floating-point literal. /// /// This constructor will create a literal like `1.0f64` where the value /// specified is the preceding part of the token and `f64` is the suffix of /// the token. This token will always be inferred to be an `f64` in the /// compiler. Literals created from negative numbers may not survive /// rountrips through `TokenStream` or strings and may be broken into two /// tokens (`-` and positive literal). /// /// # Panics /// /// This function requires that the specified float is finite, for example /// if it is infinity or NaN this function will panic. pub fn f64_suffixed(f: f64) -> Literal { assert!(f.is_finite()); Literal::_new(imp::Literal::f64_suffixed(f)) } /// Creates a new unsuffixed floating-point literal. /// /// This constructor is similar to those like `Literal::i8_unsuffixed` where /// the float's value is emitted directly into the token but no suffix is /// used, so it may be inferred to be a `f64` later in the compiler. /// Literals created from negative numbers may not survive rountrips through /// `TokenStream` or strings and may be broken into two tokens (`-` and /// positive literal). /// /// # Panics /// /// This function requires that the specified float is finite, for example /// if it is infinity or NaN this function will panic. pub fn f32_unsuffixed(f: f32) -> Literal { assert!(f.is_finite()); Literal::_new(imp::Literal::f32_unsuffixed(f)) } /// Creates a new suffixed floating-point literal. /// /// This constructor will create a literal like `1.0f32` where the value /// specified is the preceding part of the token and `f32` is the suffix of /// the token. This token will always be inferred to be an `f32` in the /// compiler. Literals created from negative numbers may not survive /// rountrips through `TokenStream` or strings and may be broken into two /// tokens (`-` and positive literal). /// /// # Panics /// /// This function requires that the specified float is finite, for example /// if it is infinity or NaN this function will panic. pub fn f32_suffixed(f: f32) -> Literal { assert!(f.is_finite()); Literal::_new(imp::Literal::f32_suffixed(f)) } /// String literal. pub fn string(string: &str) -> Literal { Literal::_new(imp::Literal::string(string)) } /// Character literal. pub fn character(ch: char) -> Literal { Literal::_new(imp::Literal::character(ch)) } /// Byte string literal. pub fn byte_string(s: &[u8]) -> Literal { Literal::_new(imp::Literal::byte_string(s)) } /// Returns the span encompassing this literal. pub fn span(&self) -> Span { Span::_new(self.inner.span()) } /// Configures the span associated for this literal. pub fn set_span(&mut self, span: Span) { self.inner.set_span(span.inner); } /// Returns a `Span` that is a subset of `self.span()` containing only /// the source bytes in range `range`. Returns `None` if the would-be /// trimmed span is outside the bounds of `self`. /// /// Warning: the underlying [`proc_macro::Literal::subspan`] method is /// nightly-only. When called from within a procedural macro not using a /// nightly compiler, this method will always return `None`. /// /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan pub fn subspan>(&self, range: R) -> Option { self.inner.subspan(range).map(Span::_new) } } impl FromStr for Literal { type Err = LexError; fn from_str(repr: &str) -> Result { repr.parse().map(Literal::_new).map_err(|inner| LexError { inner, _marker: Marker, }) } } impl Debug for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.inner, f) } } /// Public implementation details for the `TokenStream` type, such as iterators. pub mod token_stream { use crate::marker::Marker; use crate::{imp, TokenTree}; use std::fmt::{self, Debug}; pub use crate::TokenStream; /// An iterator over `TokenStream`'s `TokenTree`s. /// /// The iteration is "shallow", e.g. the iterator doesn't recurse into /// delimited groups, and returns whole groups as token trees. #[derive(Clone)] pub struct IntoIter { inner: imp::TokenTreeIter, _marker: Marker, } impl Iterator for IntoIter { type Item = TokenTree; fn next(&mut self) -> Option { self.inner.next() } } impl Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.inner, f) } } impl IntoIterator for TokenStream { type Item = TokenTree; type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { IntoIter { inner: self.inner.into_iter(), _marker: Marker, } } } } proc-macro2-1.0.28/src/marker.rs000064400000000000000000000007560072674642500145160ustar 00000000000000use std::marker::PhantomData; use std::panic::{RefUnwindSafe, UnwindSafe}; use std::rc::Rc; // Zero sized marker with the correct set of autotrait impls we want all proc // macro types to have. pub(crate) type Marker = PhantomData; pub(crate) use self::value::*; mod value { pub(crate) use std::marker::PhantomData as Marker; } pub(crate) struct ProcMacroAutoTraits(Rc<()>); impl UnwindSafe for ProcMacroAutoTraits {} impl RefUnwindSafe for ProcMacroAutoTraits {} proc-macro2-1.0.28/src/parse.rs000064400000000000000000000600260072674642500143430ustar 00000000000000use crate::fallback::{ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream, }; use crate::{Delimiter, Punct, Spacing, TokenTree}; use std::char; use std::str::{Bytes, CharIndices, Chars}; #[derive(Copy, Clone, Eq, PartialEq)] pub(crate) struct Cursor<'a> { pub rest: &'a str, #[cfg(span_locations)] pub off: u32, } impl<'a> Cursor<'a> { fn advance(&self, bytes: usize) -> Cursor<'a> { let (_front, rest) = self.rest.split_at(bytes); Cursor { rest, #[cfg(span_locations)] off: self.off + _front.chars().count() as u32, } } fn starts_with(&self, s: &str) -> bool { self.rest.starts_with(s) } fn is_empty(&self) -> bool { self.rest.is_empty() } fn len(&self) -> usize { self.rest.len() } fn as_bytes(&self) -> &'a [u8] { self.rest.as_bytes() } fn bytes(&self) -> Bytes<'a> { self.rest.bytes() } fn chars(&self) -> Chars<'a> { self.rest.chars() } fn char_indices(&self) -> CharIndices<'a> { self.rest.char_indices() } fn parse(&self, tag: &str) -> Result, Reject> { if self.starts_with(tag) { Ok(self.advance(tag.len())) } else { Err(Reject) } } } pub(crate) struct Reject; type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>; fn skip_whitespace(input: Cursor) -> Cursor { let mut s = input; while !s.is_empty() { let byte = s.as_bytes()[0]; if byte == b'/' { if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) && !s.starts_with("//!") { let (cursor, _) = take_until_newline_or_eof(s); s = cursor; continue; } else if s.starts_with("/**/") { s = s.advance(4); continue; } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) && !s.starts_with("/*!") { match block_comment(s) { Ok((rest, _)) => { s = rest; continue; } Err(Reject) => return s, } } } match byte { b' ' | 0x09..=0x0d => { s = s.advance(1); continue; } b if b <= 0x7f => {} _ => { let ch = s.chars().next().unwrap(); if is_whitespace(ch) { s = s.advance(ch.len_utf8()); continue; } } } return s; } s } fn block_comment(input: Cursor) -> PResult<&str> { if !input.starts_with("/*") { return Err(Reject); } let mut depth = 0; let bytes = input.as_bytes(); let mut i = 0; let upper = bytes.len() - 1; while i < upper { if bytes[i] == b'/' && bytes[i + 1] == b'*' { depth += 1; i += 1; // eat '*' } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { depth -= 1; if depth == 0 { return Ok((input.advance(i + 2), &input.rest[..i + 2])); } i += 1; // eat '/' } i += 1; } Err(Reject) } fn is_whitespace(ch: char) -> bool { // Rust treats left-to-right mark and right-to-left mark as whitespace ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' } fn word_break(input: Cursor) -> Result { match input.chars().next() { Some(ch) if is_ident_continue(ch) => Err(Reject), Some(_) | None => Ok(input), } } pub(crate) fn token_stream(mut input: Cursor) -> Result { let mut trees = Vec::new(); let mut stack = Vec::new(); loop { input = skip_whitespace(input); if let Ok((rest, tt)) = doc_comment(input) { trees.extend(tt); input = rest; continue; } #[cfg(span_locations)] let lo = input.off; let first = match input.bytes().next() { Some(first) => first, None => match stack.last() { None => return Ok(TokenStream { inner: trees }), #[cfg(span_locations)] Some((lo, _frame)) => { return Err(LexError { span: Span { lo: *lo, hi: *lo }, }) } #[cfg(not(span_locations))] Some(_frame) => return Err(LexError { span: Span {} }), }, }; if let Some(open_delimiter) = match first { b'(' => Some(Delimiter::Parenthesis), b'[' => Some(Delimiter::Bracket), b'{' => Some(Delimiter::Brace), _ => None, } { input = input.advance(1); let frame = (open_delimiter, trees); #[cfg(span_locations)] let frame = (lo, frame); stack.push(frame); trees = Vec::new(); } else if let Some(close_delimiter) = match first { b')' => Some(Delimiter::Parenthesis), b']' => Some(Delimiter::Bracket), b'}' => Some(Delimiter::Brace), _ => None, } { let frame = match stack.pop() { Some(frame) => frame, None => return Err(lex_error(input)), }; #[cfg(span_locations)] let (lo, frame) = frame; let (open_delimiter, outer) = frame; if open_delimiter != close_delimiter { return Err(lex_error(input)); } input = input.advance(1); let mut g = Group::new(open_delimiter, TokenStream { inner: trees }); g.set_span(Span { #[cfg(span_locations)] lo, #[cfg(span_locations)] hi: input.off, }); trees = outer; trees.push(TokenTree::Group(crate::Group::_new_stable(g))); } else { let (rest, mut tt) = match leaf_token(input) { Ok((rest, tt)) => (rest, tt), Err(Reject) => return Err(lex_error(input)), }; tt.set_span(crate::Span::_new_stable(Span { #[cfg(span_locations)] lo, #[cfg(span_locations)] hi: rest.off, })); trees.push(tt); input = rest; } } } fn lex_error(cursor: Cursor) -> LexError { #[cfg(not(span_locations))] let _ = cursor; LexError { span: Span { #[cfg(span_locations)] lo: cursor.off, #[cfg(span_locations)] hi: cursor.off, }, } } fn leaf_token(input: Cursor) -> PResult { if let Ok((input, l)) = literal(input) { // must be parsed before ident Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l)))) } else if let Ok((input, p)) = punct(input) { Ok((input, TokenTree::Punct(p))) } else if let Ok((input, i)) = ident(input) { Ok((input, TokenTree::Ident(i))) } else { Err(Reject) } } fn ident(input: Cursor) -> PResult { if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"] .iter() .any(|prefix| input.starts_with(prefix)) { Err(Reject) } else { ident_any(input) } } fn ident_any(input: Cursor) -> PResult { let raw = input.starts_with("r#"); let rest = input.advance((raw as usize) << 1); let (rest, sym) = ident_not_raw(rest)?; if !raw { let ident = crate::Ident::new(sym, crate::Span::call_site()); return Ok((rest, ident)); } if sym == "_" { return Err(Reject); } let ident = crate::Ident::_new_raw(sym, crate::Span::call_site()); Ok((rest, ident)) } fn ident_not_raw(input: Cursor) -> PResult<&str> { let mut chars = input.char_indices(); match chars.next() { Some((_, ch)) if is_ident_start(ch) => {} _ => return Err(Reject), } let mut end = input.len(); for (i, ch) in chars { if !is_ident_continue(ch) { end = i; break; } } Ok((input.advance(end), &input.rest[..end])) } pub(crate) fn literal(input: Cursor) -> PResult { let rest = literal_nocapture(input)?; let end = input.len() - rest.len(); Ok((rest, Literal::_new(input.rest[..end].to_string()))) } fn literal_nocapture(input: Cursor) -> Result { if let Ok(ok) = string(input) { Ok(ok) } else if let Ok(ok) = byte_string(input) { Ok(ok) } else if let Ok(ok) = byte(input) { Ok(ok) } else if let Ok(ok) = character(input) { Ok(ok) } else if let Ok(ok) = float(input) { Ok(ok) } else if let Ok(ok) = int(input) { Ok(ok) } else { Err(Reject) } } fn literal_suffix(input: Cursor) -> Cursor { match ident_not_raw(input) { Ok((input, _)) => input, Err(Reject) => input, } } fn string(input: Cursor) -> Result { if let Ok(input) = input.parse("\"") { cooked_string(input) } else if let Ok(input) = input.parse("r") { raw_string(input) } else { Err(Reject) } } fn cooked_string(input: Cursor) -> Result { let mut chars = input.char_indices().peekable(); while let Some((i, ch)) = chars.next() { match ch { '"' => { let input = input.advance(i + 1); return Ok(literal_suffix(input)); } '\r' => match chars.next() { Some((_, '\n')) => {} _ => break, }, '\\' => match chars.next() { Some((_, 'x')) => { if !backslash_x_char(&mut chars) { break; } } Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\')) | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {} Some((_, 'u')) => { if !backslash_u(&mut chars) { break; } } Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => { let mut last = ch; loop { if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { return Err(Reject); } match chars.peek() { Some((_, ch)) if ch.is_whitespace() => { last = *ch; chars.next(); } _ => break, } } } _ => break, }, _ch => {} } } Err(Reject) } fn byte_string(input: Cursor) -> Result { if let Ok(input) = input.parse("b\"") { cooked_byte_string(input) } else if let Ok(input) = input.parse("br") { raw_string(input) } else { Err(Reject) } } fn cooked_byte_string(mut input: Cursor) -> Result { let mut bytes = input.bytes().enumerate(); while let Some((offset, b)) = bytes.next() { match b { b'"' => { let input = input.advance(offset + 1); return Ok(literal_suffix(input)); } b'\r' => match bytes.next() { Some((_, b'\n')) => {} _ => break, }, b'\\' => match bytes.next() { Some((_, b'x')) => { if !backslash_x_byte(&mut bytes) { break; } } Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\')) | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {} Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => { let mut last = b as char; let rest = input.advance(newline + 1); let mut chars = rest.char_indices(); loop { if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { return Err(Reject); } match chars.next() { Some((_, ch)) if ch.is_whitespace() => last = ch, Some((offset, _)) => { input = rest.advance(offset); bytes = input.bytes().enumerate(); break; } None => return Err(Reject), } } } _ => break, }, b if b < 0x80 => {} _ => break, } } Err(Reject) } fn raw_string(input: Cursor) -> Result { let mut chars = input.char_indices(); let mut n = 0; for (i, ch) in &mut chars { match ch { '"' => { n = i; break; } '#' => {} _ => return Err(Reject), } } while let Some((i, ch)) = chars.next() { match ch { '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => { let rest = input.advance(i + 1 + n); return Ok(literal_suffix(rest)); } '\r' => match chars.next() { Some((_, '\n')) => {} _ => break, }, _ => {} } } Err(Reject) } fn byte(input: Cursor) -> Result { let input = input.parse("b'")?; let mut bytes = input.bytes().enumerate(); let ok = match bytes.next().map(|(_, b)| b) { Some(b'\\') => match bytes.next().map(|(_, b)| b) { Some(b'x') => backslash_x_byte(&mut bytes), Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'') | Some(b'"') => true, _ => false, }, b => b.is_some(), }; if !ok { return Err(Reject); } let (offset, _) = bytes.next().ok_or(Reject)?; if !input.chars().as_str().is_char_boundary(offset) { return Err(Reject); } let input = input.advance(offset).parse("'")?; Ok(literal_suffix(input)) } fn character(input: Cursor) -> Result { let input = input.parse("'")?; let mut chars = input.char_indices(); let ok = match chars.next().map(|(_, ch)| ch) { Some('\\') => match chars.next().map(|(_, ch)| ch) { Some('x') => backslash_x_char(&mut chars), Some('u') => backslash_u(&mut chars), Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => { true } _ => false, }, ch => ch.is_some(), }; if !ok { return Err(Reject); } let (idx, _) = chars.next().ok_or(Reject)?; let input = input.advance(idx).parse("'")?; Ok(literal_suffix(input)) } macro_rules! next_ch { ($chars:ident @ $pat:pat $(| $rest:pat)*) => { match $chars.next() { Some((_, ch)) => match ch { $pat $(| $rest)* => ch, _ => return false, }, None => return false, } }; } fn backslash_x_char(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ '0'..='7'); next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); true } fn backslash_x_byte(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); true } fn backslash_u(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ '{'); let mut value = 0; let mut len = 0; for (_, ch) in chars { let digit = match ch { '0'..='9' => ch as u8 - b'0', 'a'..='f' => 10 + ch as u8 - b'a', 'A'..='F' => 10 + ch as u8 - b'A', '_' if len > 0 => continue, '}' if len > 0 => return char::from_u32(value).is_some(), _ => return false, }; if len == 6 { return false; } value *= 0x10; value += u32::from(digit); len += 1; } false } fn float(input: Cursor) -> Result { let mut rest = float_digits(input)?; if let Some(ch) = rest.chars().next() { if is_ident_start(ch) { rest = ident_not_raw(rest)?.0; } } word_break(rest) } fn float_digits(input: Cursor) -> Result { let mut chars = input.chars().peekable(); match chars.next() { Some(ch) if ch >= '0' && ch <= '9' => {} _ => return Err(Reject), } let mut len = 1; let mut has_dot = false; let mut has_exp = false; while let Some(&ch) = chars.peek() { match ch { '0'..='9' | '_' => { chars.next(); len += 1; } '.' => { if has_dot { break; } chars.next(); if chars .peek() .map(|&ch| ch == '.' || is_ident_start(ch)) .unwrap_or(false) { return Err(Reject); } len += 1; has_dot = true; } 'e' | 'E' => { chars.next(); len += 1; has_exp = true; break; } _ => break, } } if !(has_dot || has_exp) { return Err(Reject); } if has_exp { let token_before_exp = if has_dot { Ok(input.advance(len - 1)) } else { Err(Reject) }; let mut has_sign = false; let mut has_exp_value = false; while let Some(&ch) = chars.peek() { match ch { '+' | '-' => { if has_exp_value { break; } if has_sign { return token_before_exp; } chars.next(); len += 1; has_sign = true; } '0'..='9' => { chars.next(); len += 1; has_exp_value = true; } '_' => { chars.next(); len += 1; } _ => break, } } if !has_exp_value { return token_before_exp; } } Ok(input.advance(len)) } fn int(input: Cursor) -> Result { let mut rest = digits(input)?; if let Some(ch) = rest.chars().next() { if is_ident_start(ch) { rest = ident_not_raw(rest)?.0; } } word_break(rest) } fn digits(mut input: Cursor) -> Result { let base = if input.starts_with("0x") { input = input.advance(2); 16 } else if input.starts_with("0o") { input = input.advance(2); 8 } else if input.starts_with("0b") { input = input.advance(2); 2 } else { 10 }; let mut len = 0; let mut empty = true; for b in input.bytes() { match b { b'0'..=b'9' => { let digit = (b - b'0') as u64; if digit >= base { return Err(Reject); } } b'a'..=b'f' => { let digit = 10 + (b - b'a') as u64; if digit >= base { break; } } b'A'..=b'F' => { let digit = 10 + (b - b'A') as u64; if digit >= base { break; } } b'_' => { if empty && base == 10 { return Err(Reject); } len += 1; continue; } _ => break, }; len += 1; empty = false; } if empty { Err(Reject) } else { Ok(input.advance(len)) } } fn punct(input: Cursor) -> PResult { let (rest, ch) = punct_char(input)?; if ch == '\'' { if ident_any(rest)?.0.starts_with("'") { Err(Reject) } else { Ok((rest, Punct::new('\'', Spacing::Joint))) } } else { let kind = match punct_char(rest) { Ok(_) => Spacing::Joint, Err(Reject) => Spacing::Alone, }; Ok((rest, Punct::new(ch, kind))) } } fn punct_char(input: Cursor) -> PResult { if input.starts_with("//") || input.starts_with("/*") { // Do not accept `/` of a comment as a punct. return Err(Reject); } let mut chars = input.chars(); let first = match chars.next() { Some(ch) => ch, None => { return Err(Reject); } }; let recognized = "~!@#$%^&*-=+|;:,<.>/?'"; if recognized.contains(first) { Ok((input.advance(first.len_utf8()), first)) } else { Err(Reject) } } fn doc_comment(input: Cursor) -> PResult> { #[cfg(span_locations)] let lo = input.off; let (rest, (comment, inner)) = doc_comment_contents(input)?; let span = crate::Span::_new_stable(Span { #[cfg(span_locations)] lo, #[cfg(span_locations)] hi: rest.off, }); let mut scan_for_bare_cr = comment; while let Some(cr) = scan_for_bare_cr.find('\r') { let rest = &scan_for_bare_cr[cr + 1..]; if !rest.starts_with('\n') { return Err(Reject); } scan_for_bare_cr = rest; } let mut trees = Vec::new(); trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone))); if inner { trees.push(Punct::new('!', Spacing::Alone).into()); } let mut stream = vec![ TokenTree::Ident(crate::Ident::new("doc", span)), TokenTree::Punct(Punct::new('=', Spacing::Alone)), TokenTree::Literal(crate::Literal::string(comment)), ]; for tt in stream.iter_mut() { tt.set_span(span); } let group = Group::new(Delimiter::Bracket, stream.into_iter().collect()); trees.push(crate::Group::_new_stable(group).into()); for tt in trees.iter_mut() { tt.set_span(span); } Ok((rest, trees)) } fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> { if input.starts_with("//!") { let input = input.advance(3); let (input, s) = take_until_newline_or_eof(input); Ok((input, (s, true))) } else if input.starts_with("/*!") { let (input, s) = block_comment(input)?; Ok((input, (&s[3..s.len() - 2], true))) } else if input.starts_with("///") { let input = input.advance(3); if input.starts_with("/") { return Err(Reject); } let (input, s) = take_until_newline_or_eof(input); Ok((input, (s, false))) } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') { let (input, s) = block_comment(input)?; Ok((input, (&s[3..s.len() - 2], false))) } else { Err(Reject) } } fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) { let chars = input.char_indices(); for (i, ch) in chars { if ch == '\n' { return (input.advance(i), &input.rest[..i]); } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') { return (input.advance(i + 1), &input.rest[..i]); } } (input.advance(input.len()), input.rest) } proc-macro2-1.0.28/src/wrapper.rs000064400000000000000000000704360072674642500147170ustar 00000000000000use crate::detection::inside_proc_macro; use crate::{fallback, Delimiter, Punct, Spacing, TokenTree}; use std::fmt::{self, Debug, Display}; use std::iter::FromIterator; use std::ops::RangeBounds; use std::panic; #[cfg(super_unstable)] use std::path::PathBuf; use std::str::FromStr; #[derive(Clone)] pub(crate) enum TokenStream { Compiler(DeferredTokenStream), Fallback(fallback::TokenStream), } // Work around https://github.com/rust-lang/rust/issues/65080. // In `impl Extend for TokenStream` which is used heavily by quote, // we hold on to the appended tokens and do proc_macro::TokenStream::extend as // late as possible to batch together consecutive uses of the Extend impl. #[derive(Clone)] pub(crate) struct DeferredTokenStream { stream: proc_macro::TokenStream, extra: Vec, } pub(crate) enum LexError { Compiler(proc_macro::LexError), Fallback(fallback::LexError), } impl LexError { fn call_site() -> Self { LexError::Fallback(fallback::LexError { span: fallback::Span::call_site(), }) } } fn mismatch() -> ! { panic!("stable/nightly mismatch") } impl DeferredTokenStream { fn new(stream: proc_macro::TokenStream) -> Self { DeferredTokenStream { stream, extra: Vec::new(), } } fn is_empty(&self) -> bool { self.stream.is_empty() && self.extra.is_empty() } fn evaluate_now(&mut self) { // If-check provides a fast short circuit for the common case of `extra` // being empty, which saves a round trip over the proc macro bridge. // Improves macro expansion time in winrt by 6% in debug mode. if !self.extra.is_empty() { self.stream.extend(self.extra.drain(..)); } } fn into_token_stream(mut self) -> proc_macro::TokenStream { self.evaluate_now(); self.stream } } impl TokenStream { pub fn new() -> TokenStream { if inside_proc_macro() { TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new())) } else { TokenStream::Fallback(fallback::TokenStream::new()) } } pub fn is_empty(&self) -> bool { match self { TokenStream::Compiler(tts) => tts.is_empty(), TokenStream::Fallback(tts) => tts.is_empty(), } } fn unwrap_nightly(self) -> proc_macro::TokenStream { match self { TokenStream::Compiler(s) => s.into_token_stream(), TokenStream::Fallback(_) => mismatch(), } } fn unwrap_stable(self) -> fallback::TokenStream { match self { TokenStream::Compiler(_) => mismatch(), TokenStream::Fallback(s) => s, } } } impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { if inside_proc_macro() { Ok(TokenStream::Compiler(DeferredTokenStream::new( proc_macro_parse(src)?, ))) } else { Ok(TokenStream::Fallback(src.parse()?)) } } } // Work around https://github.com/rust-lang/rust/issues/58736. fn proc_macro_parse(src: &str) -> Result { let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler)); result.unwrap_or_else(|_| Err(LexError::call_site())) } impl Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f), TokenStream::Fallback(tts) => Display::fmt(tts, f), } } } impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> TokenStream { TokenStream::Compiler(DeferredTokenStream::new(inner)) } } impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> proc_macro::TokenStream { match inner { TokenStream::Compiler(inner) => inner.into_token_stream(), TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(), } } } impl From for TokenStream { fn from(inner: fallback::TokenStream) -> TokenStream { TokenStream::Fallback(inner) } } // Assumes inside_proc_macro(). fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree { match token { TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(), TokenTree::Punct(tt) => { let spacing = match tt.spacing() { Spacing::Joint => proc_macro::Spacing::Joint, Spacing::Alone => proc_macro::Spacing::Alone, }; let mut punct = proc_macro::Punct::new(tt.as_char(), spacing); punct.set_span(tt.span().inner.unwrap_nightly()); punct.into() } TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(), TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(), } } impl From for TokenStream { fn from(token: TokenTree) -> TokenStream { if inside_proc_macro() { TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into())) } else { TokenStream::Fallback(token.into()) } } } impl FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { if inside_proc_macro() { TokenStream::Compiler(DeferredTokenStream::new( trees.into_iter().map(into_compiler_token).collect(), )) } else { TokenStream::Fallback(trees.into_iter().collect()) } } } impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { let mut streams = streams.into_iter(); match streams.next() { Some(TokenStream::Compiler(mut first)) => { first.evaluate_now(); first.stream.extend(streams.map(|s| match s { TokenStream::Compiler(s) => s.into_token_stream(), TokenStream::Fallback(_) => mismatch(), })); TokenStream::Compiler(first) } Some(TokenStream::Fallback(mut first)) => { first.extend(streams.map(|s| match s { TokenStream::Fallback(s) => s, TokenStream::Compiler(_) => mismatch(), })); TokenStream::Fallback(first) } None => TokenStream::new(), } } } impl Extend for TokenStream { fn extend>(&mut self, stream: I) { match self { TokenStream::Compiler(tts) => { // Here is the reason for DeferredTokenStream. for token in stream { tts.extra.push(into_compiler_token(token)); } } TokenStream::Fallback(tts) => tts.extend(stream), } } } impl Extend for TokenStream { fn extend>(&mut self, streams: I) { match self { TokenStream::Compiler(tts) => { tts.evaluate_now(); tts.stream .extend(streams.into_iter().map(TokenStream::unwrap_nightly)); } TokenStream::Fallback(tts) => { tts.extend(streams.into_iter().map(TokenStream::unwrap_stable)); } } } } impl Debug for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f), TokenStream::Fallback(tts) => Debug::fmt(tts, f), } } } impl LexError { pub(crate) fn span(&self) -> Span { match self { LexError::Compiler(_) => Span::call_site(), LexError::Fallback(e) => Span::Fallback(e.span()), } } } impl From for LexError { fn from(e: proc_macro::LexError) -> LexError { LexError::Compiler(e) } } impl From for LexError { fn from(e: fallback::LexError) -> LexError { LexError::Fallback(e) } } impl Debug for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { LexError::Compiler(e) => Debug::fmt(e, f), LexError::Fallback(e) => Debug::fmt(e, f), } } } impl Display for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { #[cfg(lexerror_display)] LexError::Compiler(e) => Display::fmt(e, f), #[cfg(not(lexerror_display))] LexError::Compiler(_e) => Display::fmt( &fallback::LexError { span: fallback::Span::call_site(), }, f, ), LexError::Fallback(e) => Display::fmt(e, f), } } } #[derive(Clone)] pub(crate) enum TokenTreeIter { Compiler(proc_macro::token_stream::IntoIter), Fallback(fallback::TokenTreeIter), } impl IntoIterator for TokenStream { type Item = TokenTree; type IntoIter = TokenTreeIter; fn into_iter(self) -> TokenTreeIter { match self { TokenStream::Compiler(tts) => { TokenTreeIter::Compiler(tts.into_token_stream().into_iter()) } TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()), } } } impl Iterator for TokenTreeIter { type Item = TokenTree; fn next(&mut self) -> Option { let token = match self { TokenTreeIter::Compiler(iter) => iter.next()?, TokenTreeIter::Fallback(iter) => return iter.next(), }; Some(match token { proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(), proc_macro::TokenTree::Punct(tt) => { let spacing = match tt.spacing() { proc_macro::Spacing::Joint => Spacing::Joint, proc_macro::Spacing::Alone => Spacing::Alone, }; let mut o = Punct::new(tt.as_char(), spacing); o.set_span(crate::Span::_new(Span::Compiler(tt.span()))); o.into() } proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(), proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(), }) } fn size_hint(&self) -> (usize, Option) { match self { TokenTreeIter::Compiler(tts) => tts.size_hint(), TokenTreeIter::Fallback(tts) => tts.size_hint(), } } } impl Debug for TokenTreeIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("TokenTreeIter").finish() } } #[derive(Clone, PartialEq, Eq)] #[cfg(super_unstable)] pub(crate) enum SourceFile { Compiler(proc_macro::SourceFile), Fallback(fallback::SourceFile), } #[cfg(super_unstable)] impl SourceFile { fn nightly(sf: proc_macro::SourceFile) -> Self { SourceFile::Compiler(sf) } /// Get the path to this source file as a string. pub fn path(&self) -> PathBuf { match self { SourceFile::Compiler(a) => a.path(), SourceFile::Fallback(a) => a.path(), } } pub fn is_real(&self) -> bool { match self { SourceFile::Compiler(a) => a.is_real(), SourceFile::Fallback(a) => a.is_real(), } } } #[cfg(super_unstable)] impl Debug for SourceFile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SourceFile::Compiler(a) => Debug::fmt(a, f), SourceFile::Fallback(a) => Debug::fmt(a, f), } } } #[cfg(any(super_unstable, feature = "span-locations"))] pub(crate) struct LineColumn { pub line: usize, pub column: usize, } #[derive(Copy, Clone)] pub(crate) enum Span { Compiler(proc_macro::Span), Fallback(fallback::Span), } impl Span { pub fn call_site() -> Span { if inside_proc_macro() { Span::Compiler(proc_macro::Span::call_site()) } else { Span::Fallback(fallback::Span::call_site()) } } #[cfg(hygiene)] pub fn mixed_site() -> Span { if inside_proc_macro() { Span::Compiler(proc_macro::Span::mixed_site()) } else { Span::Fallback(fallback::Span::mixed_site()) } } #[cfg(super_unstable)] pub fn def_site() -> Span { if inside_proc_macro() { Span::Compiler(proc_macro::Span::def_site()) } else { Span::Fallback(fallback::Span::def_site()) } } pub fn resolved_at(&self, other: Span) -> Span { match (self, other) { #[cfg(hygiene)] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), // Name resolution affects semantics, but location is only cosmetic #[cfg(not(hygiene))] (Span::Compiler(_), Span::Compiler(_)) => other, (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), _ => mismatch(), } } pub fn located_at(&self, other: Span) -> Span { match (self, other) { #[cfg(hygiene)] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), // Name resolution affects semantics, but location is only cosmetic #[cfg(not(hygiene))] (Span::Compiler(_), Span::Compiler(_)) => *self, (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), _ => mismatch(), } } pub fn unwrap(self) -> proc_macro::Span { match self { Span::Compiler(s) => s, Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"), } } #[cfg(super_unstable)] pub fn source_file(&self) -> SourceFile { match self { Span::Compiler(s) => SourceFile::nightly(s.source_file()), Span::Fallback(s) => SourceFile::Fallback(s.source_file()), } } #[cfg(any(super_unstable, feature = "span-locations"))] pub fn start(&self) -> LineColumn { match self { #[cfg(proc_macro_span)] Span::Compiler(s) => { let proc_macro::LineColumn { line, column } = s.start(); LineColumn { line, column } } #[cfg(not(proc_macro_span))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => { let fallback::LineColumn { line, column } = s.start(); LineColumn { line, column } } } } #[cfg(any(super_unstable, feature = "span-locations"))] pub fn end(&self) -> LineColumn { match self { #[cfg(proc_macro_span)] Span::Compiler(s) => { let proc_macro::LineColumn { line, column } = s.end(); LineColumn { line, column } } #[cfg(not(proc_macro_span))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => { let fallback::LineColumn { line, column } = s.end(); LineColumn { line, column } } } } pub fn join(&self, other: Span) -> Option { let ret = match (self, other) { #[cfg(proc_macro_span)] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?), (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?), _ => return None, }; Some(ret) } #[cfg(super_unstable)] pub fn eq(&self, other: &Span) -> bool { match (self, other) { (Span::Compiler(a), Span::Compiler(b)) => a.eq(b), (Span::Fallback(a), Span::Fallback(b)) => a.eq(b), _ => false, } } fn unwrap_nightly(self) -> proc_macro::Span { match self { Span::Compiler(s) => s, Span::Fallback(_) => mismatch(), } } } impl From for crate::Span { fn from(proc_span: proc_macro::Span) -> crate::Span { crate::Span::_new(Span::Compiler(proc_span)) } } impl From for Span { fn from(inner: fallback::Span) -> Span { Span::Fallback(inner) } } impl Debug for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Span::Compiler(s) => Debug::fmt(s, f), Span::Fallback(s) => Debug::fmt(s, f), } } } pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { match span { Span::Compiler(s) => { debug.field("span", &s); } Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s), } } #[derive(Clone)] pub(crate) enum Group { Compiler(proc_macro::Group), Fallback(fallback::Group), } impl Group { pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { match stream { TokenStream::Compiler(tts) => { let delimiter = match delimiter { Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, Delimiter::Bracket => proc_macro::Delimiter::Bracket, Delimiter::Brace => proc_macro::Delimiter::Brace, Delimiter::None => proc_macro::Delimiter::None, }; Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream())) } TokenStream::Fallback(stream) => { Group::Fallback(fallback::Group::new(delimiter, stream)) } } } pub fn delimiter(&self) -> Delimiter { match self { Group::Compiler(g) => match g.delimiter() { proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, proc_macro::Delimiter::Bracket => Delimiter::Bracket, proc_macro::Delimiter::Brace => Delimiter::Brace, proc_macro::Delimiter::None => Delimiter::None, }, Group::Fallback(g) => g.delimiter(), } } pub fn stream(&self) -> TokenStream { match self { Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())), Group::Fallback(g) => TokenStream::Fallback(g.stream()), } } pub fn span(&self) -> Span { match self { Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span()), } } pub fn span_open(&self) -> Span { match self { #[cfg(proc_macro_span)] Group::Compiler(g) => Span::Compiler(g.span_open()), #[cfg(not(proc_macro_span))] Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span_open()), } } pub fn span_close(&self) -> Span { match self { #[cfg(proc_macro_span)] Group::Compiler(g) => Span::Compiler(g.span_close()), #[cfg(not(proc_macro_span))] Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span_close()), } } pub fn set_span(&mut self, span: Span) { match (self, span) { (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s), (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s), _ => mismatch(), } } fn unwrap_nightly(self) -> proc_macro::Group { match self { Group::Compiler(g) => g, Group::Fallback(_) => mismatch(), } } } impl From for Group { fn from(g: fallback::Group) -> Self { Group::Fallback(g) } } impl Display for Group { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match self { Group::Compiler(group) => Display::fmt(group, formatter), Group::Fallback(group) => Display::fmt(group, formatter), } } } impl Debug for Group { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match self { Group::Compiler(group) => Debug::fmt(group, formatter), Group::Fallback(group) => Debug::fmt(group, formatter), } } } #[derive(Clone)] pub(crate) enum Ident { Compiler(proc_macro::Ident), Fallback(fallback::Ident), } impl Ident { pub fn new(string: &str, span: Span) -> Ident { match span { Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)), Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)), } } pub fn new_raw(string: &str, span: Span) -> Ident { match span { Span::Compiler(s) => { let p: proc_macro::TokenStream = string.parse().unwrap(); let ident = match p.into_iter().next() { Some(proc_macro::TokenTree::Ident(mut i)) => { i.set_span(s); i } _ => panic!(), }; Ident::Compiler(ident) } Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)), } } pub fn span(&self) -> Span { match self { Ident::Compiler(t) => Span::Compiler(t.span()), Ident::Fallback(t) => Span::Fallback(t.span()), } } pub fn set_span(&mut self, span: Span) { match (self, span) { (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s), (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s), _ => mismatch(), } } fn unwrap_nightly(self) -> proc_macro::Ident { match self { Ident::Compiler(s) => s, Ident::Fallback(_) => mismatch(), } } } impl PartialEq for Ident { fn eq(&self, other: &Ident) -> bool { match (self, other) { (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(), (Ident::Fallback(t), Ident::Fallback(o)) => t == o, _ => mismatch(), } } } impl PartialEq for Ident where T: ?Sized + AsRef, { fn eq(&self, other: &T) -> bool { let other = other.as_ref(); match self { Ident::Compiler(t) => t.to_string() == other, Ident::Fallback(t) => t == other, } } } impl Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Ident::Compiler(t) => Display::fmt(t, f), Ident::Fallback(t) => Display::fmt(t, f), } } } impl Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Ident::Compiler(t) => Debug::fmt(t, f), Ident::Fallback(t) => Debug::fmt(t, f), } } } #[derive(Clone)] pub(crate) enum Literal { Compiler(proc_macro::Literal), Fallback(fallback::Literal), } macro_rules! suffixed_numbers { ($($name:ident => $kind:ident,)*) => ($( pub fn $name(n: $kind) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::$name(n)) } else { Literal::Fallback(fallback::Literal::$name(n)) } } )*) } macro_rules! unsuffixed_integers { ($($name:ident => $kind:ident,)*) => ($( pub fn $name(n: $kind) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::$name(n)) } else { Literal::Fallback(fallback::Literal::$name(n)) } } )*) } impl Literal { suffixed_numbers! { u8_suffixed => u8, u16_suffixed => u16, u32_suffixed => u32, u64_suffixed => u64, u128_suffixed => u128, usize_suffixed => usize, i8_suffixed => i8, i16_suffixed => i16, i32_suffixed => i32, i64_suffixed => i64, i128_suffixed => i128, isize_suffixed => isize, f32_suffixed => f32, f64_suffixed => f64, } unsuffixed_integers! { u8_unsuffixed => u8, u16_unsuffixed => u16, u32_unsuffixed => u32, u64_unsuffixed => u64, u128_unsuffixed => u128, usize_unsuffixed => usize, i8_unsuffixed => i8, i16_unsuffixed => i16, i32_unsuffixed => i32, i64_unsuffixed => i64, i128_unsuffixed => i128, isize_unsuffixed => isize, } pub fn f32_unsuffixed(f: f32) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f)) } else { Literal::Fallback(fallback::Literal::f32_unsuffixed(f)) } } pub fn f64_unsuffixed(f: f64) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f)) } else { Literal::Fallback(fallback::Literal::f64_unsuffixed(f)) } } pub fn string(t: &str) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::string(t)) } else { Literal::Fallback(fallback::Literal::string(t)) } } pub fn character(t: char) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::character(t)) } else { Literal::Fallback(fallback::Literal::character(t)) } } pub fn byte_string(bytes: &[u8]) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::byte_string(bytes)) } else { Literal::Fallback(fallback::Literal::byte_string(bytes)) } } pub fn span(&self) -> Span { match self { Literal::Compiler(lit) => Span::Compiler(lit.span()), Literal::Fallback(lit) => Span::Fallback(lit.span()), } } pub fn set_span(&mut self, span: Span) { match (self, span) { (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s), (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s), _ => mismatch(), } } pub fn subspan>(&self, range: R) -> Option { match self { #[cfg(proc_macro_span)] Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler), #[cfg(not(proc_macro_span))] Literal::Compiler(_lit) => None, Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback), } } fn unwrap_nightly(self) -> proc_macro::Literal { match self { Literal::Compiler(s) => s, Literal::Fallback(_) => mismatch(), } } } impl From for Literal { fn from(s: fallback::Literal) -> Literal { Literal::Fallback(s) } } impl FromStr for Literal { type Err = LexError; fn from_str(repr: &str) -> Result { if inside_proc_macro() { // TODO: use libproc_macro's FromStr impl once it is available in // rustc. https://github.com/rust-lang/rust/pull/84717 let tokens = proc_macro_parse(repr)?; let mut iter = tokens.into_iter(); if let (Some(proc_macro::TokenTree::Literal(literal)), None) = (iter.next(), iter.next()) { if literal.to_string().len() == repr.len() { return Ok(Literal::Compiler(literal)); } } Err(LexError::call_site()) } else { let literal = fallback::Literal::from_str(repr)?; Ok(Literal::Fallback(literal)) } } } impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Literal::Compiler(t) => Display::fmt(t, f), Literal::Fallback(t) => Display::fmt(t, f), } } } impl Debug for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Literal::Compiler(t) => Debug::fmt(t, f), Literal::Fallback(t) => Debug::fmt(t, f), } } } proc-macro2-1.0.28/tests/comments.rs000064400000000000000000000064500072674642500154320ustar 00000000000000use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; // #[doc = "..."] -> "..." fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal { lit_of_doc_comment(tokens, false) } // #![doc = "..."] -> "..." fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal { lit_of_doc_comment(tokens, true) } fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal { let mut iter = tokens.clone().into_iter(); match iter.next().unwrap() { TokenTree::Punct(punct) => { assert_eq!(punct.as_char(), '#'); assert_eq!(punct.spacing(), Spacing::Alone); } _ => panic!("wrong token {:?}", tokens), } if inner { match iter.next().unwrap() { TokenTree::Punct(punct) => { assert_eq!(punct.as_char(), '!'); assert_eq!(punct.spacing(), Spacing::Alone); } _ => panic!("wrong token {:?}", tokens), } } iter = match iter.next().unwrap() { TokenTree::Group(group) => { assert_eq!(group.delimiter(), Delimiter::Bracket); assert!(iter.next().is_none(), "unexpected token {:?}", tokens); group.stream().into_iter() } _ => panic!("wrong token {:?}", tokens), }; match iter.next().unwrap() { TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"), _ => panic!("wrong token {:?}", tokens), } match iter.next().unwrap() { TokenTree::Punct(punct) => { assert_eq!(punct.as_char(), '='); assert_eq!(punct.spacing(), Spacing::Alone); } _ => panic!("wrong token {:?}", tokens), } match iter.next().unwrap() { TokenTree::Literal(literal) => { assert!(iter.next().is_none(), "unexpected token {:?}", tokens); literal } _ => panic!("wrong token {:?}", tokens), } } #[test] fn closed_immediately() { let stream = "/**/".parse::().unwrap(); let tokens = stream.into_iter().collect::>(); assert!(tokens.is_empty(), "not empty -- {:?}", tokens); } #[test] fn incomplete() { assert!("/*/".parse::().is_err()); } #[test] fn lit() { let stream = "/// doc".parse::().unwrap(); let lit = lit_of_outer_doc_comment(stream); assert_eq!(lit.to_string(), "\" doc\""); let stream = "//! doc".parse::().unwrap(); let lit = lit_of_inner_doc_comment(stream); assert_eq!(lit.to_string(), "\" doc\""); let stream = "/** doc */".parse::().unwrap(); let lit = lit_of_outer_doc_comment(stream); assert_eq!(lit.to_string(), "\" doc \""); let stream = "/*! doc */".parse::().unwrap(); let lit = lit_of_inner_doc_comment(stream); assert_eq!(lit.to_string(), "\" doc \""); } #[test] fn carriage_return() { let stream = "///\r\n".parse::().unwrap(); let lit = lit_of_outer_doc_comment(stream); assert_eq!(lit.to_string(), "\"\""); let stream = "/**\r\n*/".parse::().unwrap(); let lit = lit_of_outer_doc_comment(stream); assert_eq!(lit.to_string(), "\"\\r\\n\""); "///\r".parse::().unwrap_err(); "///\r \n".parse::().unwrap_err(); "/**\r \n*/".parse::().unwrap_err(); } proc-macro2-1.0.28/tests/features.rs000064400000000000000000000002300072674642500154110ustar 00000000000000#[test] #[ignore] fn make_sure_no_proc_macro() { assert!( !cfg!(feature = "proc-macro"), "still compiled with proc_macro?" ); } proc-macro2-1.0.28/tests/marker.rs000064400000000000000000000045120072674642500150630ustar 00000000000000use proc_macro2::*; macro_rules! assert_impl { ($ty:ident is $($marker:ident) and +) => { #[test] #[allow(non_snake_case)] fn $ty() { fn assert_implemented() {} assert_implemented::<$ty>(); } }; ($ty:ident is not $($marker:ident) or +) => { #[test] #[allow(non_snake_case)] fn $ty() { $( { // Implemented for types that implement $marker. trait IsNotImplemented { fn assert_not_implemented() {} } impl IsNotImplemented for T {} // Implemented for the type being tested. trait IsImplemented { fn assert_not_implemented() {} } impl IsImplemented for $ty {} // If $ty does not implement $marker, there is no ambiguity // in the following trait method call. <$ty>::assert_not_implemented(); } )+ } }; } assert_impl!(Delimiter is Send and Sync); assert_impl!(Spacing is Send and Sync); assert_impl!(Group is not Send or Sync); assert_impl!(Ident is not Send or Sync); assert_impl!(LexError is not Send or Sync); assert_impl!(Literal is not Send or Sync); assert_impl!(Punct is not Send or Sync); assert_impl!(Span is not Send or Sync); assert_impl!(TokenStream is not Send or Sync); assert_impl!(TokenTree is not Send or Sync); #[cfg(procmacro2_semver_exempt)] mod semver_exempt { use super::*; assert_impl!(LineColumn is Send and Sync); assert_impl!(SourceFile is not Send or Sync); } #[cfg(not(no_libprocmacro_unwind_safe))] mod unwind_safe { use super::*; use std::panic::{RefUnwindSafe, UnwindSafe}; macro_rules! assert_unwind_safe { ($($types:ident)*) => { $( assert_impl!($types is UnwindSafe and RefUnwindSafe); )* }; } assert_unwind_safe! { Delimiter Group Ident LexError Literal Punct Spacing Span TokenStream TokenTree } #[cfg(procmacro2_semver_exempt)] assert_unwind_safe! { LineColumn SourceFile } } proc-macro2-1.0.28/tests/test.rs000064400000000000000000000373050072674642500145670ustar 00000000000000use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; use std::panic; use std::str::{self, FromStr}; #[test] fn idents() { assert_eq!( Ident::new("String", Span::call_site()).to_string(), "String" ); assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn"); assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_"); } #[test] #[cfg(procmacro2_semver_exempt)] fn raw_idents() { assert_eq!( Ident::new_raw("String", Span::call_site()).to_string(), "r#String" ); assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn"); assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_"); } #[test] #[should_panic(expected = "Ident is not allowed to be empty; use Option")] fn ident_empty() { Ident::new("", Span::call_site()); } #[test] #[should_panic(expected = "Ident cannot be a number; use Literal instead")] fn ident_number() { Ident::new("255", Span::call_site()); } #[test] #[should_panic(expected = "\"a#\" is not a valid Ident")] fn ident_invalid() { Ident::new("a#", Span::call_site()); } #[test] #[should_panic(expected = "not a valid Ident")] fn raw_ident_empty() { Ident::new("r#", Span::call_site()); } #[test] #[should_panic(expected = "not a valid Ident")] fn raw_ident_number() { Ident::new("r#255", Span::call_site()); } #[test] #[should_panic(expected = "\"r#a#\" is not a valid Ident")] fn raw_ident_invalid() { Ident::new("r#a#", Span::call_site()); } #[test] #[should_panic(expected = "not a valid Ident")] fn lifetime_empty() { Ident::new("'", Span::call_site()); } #[test] #[should_panic(expected = "not a valid Ident")] fn lifetime_number() { Ident::new("'255", Span::call_site()); } #[test] fn lifetime_invalid() { let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site())); match result { Err(box_any) => { let message = box_any.downcast_ref::().unwrap(); let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0 let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 .. assert!( message == expected1 || message == expected2, "panic message does not match expected string\n\ \x20 panic message: `{:?}`\n\ \x20expected message: `{:?}`", message, expected2, ); } Ok(_) => panic!("test did not panic as expected"), } } #[test] fn literal_string() { assert_eq!(Literal::string("foo").to_string(), "\"foo\""); assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); assert_eq!(Literal::string("didn't").to_string(), "\"didn't\""); } #[test] fn literal_raw_string() { "r\"\r\n\"".parse::().unwrap(); } #[test] fn literal_character() { assert_eq!(Literal::character('x').to_string(), "'x'"); assert_eq!(Literal::character('\'').to_string(), "'\\''"); assert_eq!(Literal::character('"').to_string(), "'\"'"); } #[test] fn literal_float() { assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0"); } #[test] fn literal_suffix() { fn token_count(p: &str) -> usize { p.parse::().unwrap().into_iter().count() } assert_eq!(token_count("999u256"), 1); assert_eq!(token_count("999r#u256"), 3); assert_eq!(token_count("1."), 1); assert_eq!(token_count("1.f32"), 3); assert_eq!(token_count("1.0_0"), 1); assert_eq!(token_count("1._0"), 3); assert_eq!(token_count("1._m"), 3); assert_eq!(token_count("\"\"s"), 1); assert_eq!(token_count("r\"\"r"), 1); assert_eq!(token_count("b\"\"b"), 1); assert_eq!(token_count("br\"\"br"), 1); assert_eq!(token_count("r#\"\"#r"), 1); assert_eq!(token_count("'c'c"), 1); assert_eq!(token_count("b'b'b"), 1); assert_eq!(token_count("0E"), 1); assert_eq!(token_count("0o0A"), 1); assert_eq!(token_count("0E--0"), 4); assert_eq!(token_count("0.0ECMA"), 1); } #[test] fn literal_iter_negative() { let negative_literal = Literal::i32_suffixed(-3); let tokens = TokenStream::from(TokenTree::Literal(negative_literal)); let mut iter = tokens.into_iter(); match iter.next().unwrap() { TokenTree::Punct(punct) => { assert_eq!(punct.as_char(), '-'); assert_eq!(punct.spacing(), Spacing::Alone); } unexpected => panic!("unexpected token {:?}", unexpected), } match iter.next().unwrap() { TokenTree::Literal(literal) => { assert_eq!(literal.to_string(), "3i32"); } unexpected => panic!("unexpected token {:?}", unexpected), } assert!(iter.next().is_none()); } #[test] fn literal_parse() { assert!("1".parse::().is_ok()); assert!("1.0".parse::().is_ok()); assert!("'a'".parse::().is_ok()); assert!("\"\n\"".parse::().is_ok()); assert!("0 1".parse::().is_err()); assert!(" 0".parse::().is_err()); assert!("0 ".parse::().is_err()); assert!("/* comment */0".parse::().is_err()); assert!("0/* comment */".parse::().is_err()); assert!("0// comment".parse::().is_err()); } #[test] fn roundtrip() { fn roundtrip(p: &str) { println!("parse: {}", p); let s = p.parse::().unwrap().to_string(); println!("first: {}", s); let s2 = s.to_string().parse::().unwrap().to_string(); assert_eq!(s, s2); } roundtrip("a"); roundtrip("<<"); roundtrip("<<="); roundtrip( " 1 1.0 1f32 2f64 1usize 4isize 4e10 1_000 1_0i32 8u8 9 0 0xffffffffffffffffffffffffffffffff 1x 1u80 1f320 ", ); roundtrip("'a"); roundtrip("'_"); roundtrip("'static"); roundtrip("'\\u{10__FFFF}'"); roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); } #[test] fn fail() { fn fail(p: &str) { if let Ok(s) = p.parse::() { panic!("should have failed to parse: {}\n{:#?}", p, s); } } fail("' static"); fail("r#1"); fail("r#_"); fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits) fail("\"\\u{999999}\""); // outside of valid range of char fail("\"\\u{_0}\""); // leading underscore fail("\"\\u{}\""); // empty fail("b\"\r\""); // bare carriage return in byte string fail("r\"\r\""); // bare carriage return in raw string fail("\"\\\r \""); // backslash carriage return fail("'aa'aa"); fail("br##\"\"#"); fail("\"\\\n\u{85}\r\""); } #[cfg(span_locations)] #[test] fn span_test() { check_spans( "\ /// This is a document comment testing 123 { testing 234 }", &[ (1, 0, 1, 30), // # (1, 0, 1, 30), // [ ... ] (1, 0, 1, 30), // doc (1, 0, 1, 30), // = (1, 0, 1, 30), // "This is..." (2, 0, 2, 7), // testing (2, 8, 2, 11), // 123 (3, 0, 5, 1), // { ... } (4, 2, 4, 9), // testing (4, 10, 4, 13), // 234 ], ); } #[cfg(procmacro2_semver_exempt)] #[cfg(not(nightly))] #[test] fn default_span() { let start = Span::call_site().start(); assert_eq!(start.line, 1); assert_eq!(start.column, 0); let end = Span::call_site().end(); assert_eq!(end.line, 1); assert_eq!(end.column, 0); let source_file = Span::call_site().source_file(); assert_eq!(source_file.path().to_string_lossy(), ""); assert!(!source_file.is_real()); } #[cfg(procmacro2_semver_exempt)] #[test] fn span_join() { let source1 = "aaa\nbbb" .parse::() .unwrap() .into_iter() .collect::>(); let source2 = "ccc\nddd" .parse::() .unwrap() .into_iter() .collect::>(); assert!(source1[0].span().source_file() != source2[0].span().source_file()); assert_eq!( source1[0].span().source_file(), source1[1].span().source_file() ); let joined1 = source1[0].span().join(source1[1].span()); let joined2 = source1[0].span().join(source2[0].span()); assert!(joined1.is_some()); assert!(joined2.is_none()); let start = joined1.unwrap().start(); let end = joined1.unwrap().end(); assert_eq!(start.line, 1); assert_eq!(start.column, 0); assert_eq!(end.line, 2); assert_eq!(end.column, 3); assert_eq!( joined1.unwrap().source_file(), source1[0].span().source_file() ); } #[test] fn no_panic() { let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); assert!(s.parse::().is_err()); } #[test] fn punct_before_comment() { let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter(); match tts.next().unwrap() { TokenTree::Punct(tt) => { assert_eq!(tt.as_char(), '~'); assert_eq!(tt.spacing(), Spacing::Alone); } wrong => panic!("wrong token {:?}", wrong), } } #[test] fn joint_last_token() { // This test verifies that we match the behavior of libproc_macro *not* in // the range nightly-2020-09-06 through nightly-2020-09-10, in which this // behavior was temporarily broken. // See https://github.com/rust-lang/rust/issues/76399 let joint_punct = Punct::new(':', Spacing::Joint); let stream = TokenStream::from(TokenTree::Punct(joint_punct)); let punct = match stream.into_iter().next().unwrap() { TokenTree::Punct(punct) => punct, _ => unreachable!(), }; assert_eq!(punct.spacing(), Spacing::Joint); } #[test] fn raw_identifier() { let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); match tts.next().unwrap() { TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()), wrong => panic!("wrong token {:?}", wrong), } assert!(tts.next().is_none()); } #[test] fn test_debug_ident() { let ident = Ident::new("proc_macro", Span::call_site()); #[cfg(not(span_locations))] let expected = "Ident(proc_macro)"; #[cfg(span_locations)] let expected = "Ident { sym: proc_macro }"; assert_eq!(expected, format!("{:?}", ident)); } #[test] fn test_debug_tokenstream() { let tts = TokenStream::from_str("[a + 1]").unwrap(); #[cfg(not(span_locations))] let expected = "\ TokenStream [ Group { delimiter: Bracket, stream: TokenStream [ Ident { sym: a, }, Punct { char: '+', spacing: Alone, }, Literal { lit: 1, }, ], }, ]\ "; #[cfg(not(span_locations))] let expected_before_trailing_commas = "\ TokenStream [ Group { delimiter: Bracket, stream: TokenStream [ Ident { sym: a }, Punct { char: '+', spacing: Alone }, Literal { lit: 1 } ] } ]\ "; #[cfg(span_locations)] let expected = "\ TokenStream [ Group { delimiter: Bracket, stream: TokenStream [ Ident { sym: a, span: bytes(2..3), }, Punct { char: '+', spacing: Alone, span: bytes(4..5), }, Literal { lit: 1, span: bytes(6..7), }, ], span: bytes(1..8), }, ]\ "; #[cfg(span_locations)] let expected_before_trailing_commas = "\ TokenStream [ Group { delimiter: Bracket, stream: TokenStream [ Ident { sym: a, span: bytes(2..3) }, Punct { char: '+', spacing: Alone, span: bytes(4..5) }, Literal { lit: 1, span: bytes(6..7) } ], span: bytes(1..8) } ]\ "; let actual = format!("{:#?}", tts); if actual.ends_with(",\n]") { assert_eq!(expected, actual); } else { assert_eq!(expected_before_trailing_commas, actual); } } #[test] fn default_tokenstream_is_empty() { let default_token_stream: TokenStream = Default::default(); assert!(default_token_stream.is_empty()); } #[test] fn tuple_indexing() { // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322 let mut tokens = "tuple.0.0".parse::().unwrap().into_iter(); assert_eq!("tuple", tokens.next().unwrap().to_string()); assert_eq!(".", tokens.next().unwrap().to_string()); assert_eq!("0.0", tokens.next().unwrap().to_string()); assert!(tokens.next().is_none()); } #[cfg(span_locations)] #[test] fn non_ascii_tokens() { check_spans("// abc", &[]); check_spans("// ábc", &[]); check_spans("// abc x", &[]); check_spans("// ábc x", &[]); check_spans("/* abc */ x", &[(1, 10, 1, 11)]); check_spans("/* ábc */ x", &[(1, 10, 1, 11)]); check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]); check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]); check_spans("/*** abc */ x", &[(1, 12, 1, 13)]); check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); check_spans(r#""abc""#, &[(1, 0, 1, 5)]); check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]); check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]); check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); check_spans("'a'", &[(1, 0, 1, 3)]); check_spans("'á'", &[(1, 0, 1, 3)]); check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); check_spans("abc", &[(1, 0, 1, 3)]); check_spans("ábc", &[(1, 0, 1, 3)]); check_spans("ábć", &[(1, 0, 1, 3)]); check_spans("abc// foo", &[(1, 0, 1, 3)]); check_spans("ábc// foo", &[(1, 0, 1, 3)]); check_spans("ábć// foo", &[(1, 0, 1, 3)]); check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]); } #[cfg(span_locations)] fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { let ts = p.parse::().unwrap(); check_spans_internal(ts, &mut lines); assert!(lines.is_empty(), "leftover ranges: {:?}", lines); } #[cfg(span_locations)] fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) { for i in ts { if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { *lines = rest; let start = i.span().start(); assert_eq!(start.line, sline, "sline did not match for {}", i); assert_eq!(start.column, scol, "scol did not match for {}", i); let end = i.span().end(); assert_eq!(end.line, eline, "eline did not match for {}", i); assert_eq!(end.column, ecol, "ecol did not match for {}", i); if let TokenTree::Group(g) = i { check_spans_internal(g.stream().clone(), lines); } } } } proc-macro2-1.0.28/tests/test_fmt.rs000064400000000000000000000024150072674642500154270ustar 00000000000000use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; use std::iter::{self, FromIterator}; #[test] fn test_fmt_group() { let ident = Ident::new("x", Span::call_site()); let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident))); let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new()); let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone()); let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new()); let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone()); let braces_empty = Group::new(Delimiter::Brace, TokenStream::new()); let braces_nonempty = Group::new(Delimiter::Brace, inner.clone()); let none_empty = Group::new(Delimiter::None, TokenStream::new()); let none_nonempty = Group::new(Delimiter::None, inner.clone()); // Matches libproc_macro. assert_eq!("()", parens_empty.to_string()); assert_eq!("(x)", parens_nonempty.to_string()); assert_eq!("[]", brackets_empty.to_string()); assert_eq!("[x]", brackets_nonempty.to_string()); assert_eq!("{ }", braces_empty.to_string()); assert_eq!("{ x }", braces_nonempty.to_string()); assert_eq!("", none_empty.to_string()); assert_eq!("x", none_nonempty.to_string()); }