derive-deftly-macros-1.0.0/.cargo_vcs_info.json0000644000000001440000000000100150560ustar { "git": { "sha1": "a830445ec1f8e3ddad090950f2fdd4eb1d5bf112" }, "path_in_vcs": "macros" }derive-deftly-macros-1.0.0/Cargo.lock0000644000000147730000000000100130460ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "derive-deftly-macros" version = "1.0.0" dependencies = [ "heck", "indexmap", "itertools", "proc-macro-crate", "proc-macro2", "quote", "sha3", "strum", "syn", "void", ] [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "indexmap" version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "itertools" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" dependencies = [ "either", ] [[package]] name = "keccak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] [[package]] name = "libc" version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "proc-macro-crate" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ "toml_edit", ] [[package]] name = "proc-macro2" version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] [[package]] name = "rustversion" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "sha3" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" dependencies = [ "digest", "keccak", ] [[package]] name = "strum" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce1475c515a4f03a8a7129bb5228b81a781a86cb0b3fbbc19e1c556d491a401f" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9688894b43459159c82bfa5a5fa0435c19cbe3c9b427fa1dd7b1ce0c279b18a7" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", "syn", ] [[package]] name = "syn" version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" [[package]] name = "toml_edit" version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ "indexmap", "toml_datetime", "winnow", ] [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicode-ident" version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "void" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "winnow" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" dependencies = [ "memchr", ] derive-deftly-macros-1.0.0/Cargo.toml0000644000000033510000000000100130570ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.56" name = "derive-deftly-macros" version = "1.0.0" authors = [ "Ian Jackson ", "and the contributors to Rust derive-deftly", ] build = "build.rs" autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Macros that implement the derive_deftly crate" homepage = "https://gitlab.torproject.org/Diziet/rust-derive-deftly" readme = "README.md" license = "MIT" repository = "https://gitlab.torproject.org/Diziet/rust-derive-deftly" [features] beta = [] case = ["heck"] expect = [ "sha3", "syn/full", ] meta-as-expr = ["syn/full"] meta-as-items = ["syn/full"] [lib] name = "derive_deftly_macros" path = "macros.rs" proc-macro = true [dependencies.heck] version = ">=0.4, <0.6" optional = true [dependencies.indexmap] version = ">=1.8, <3" [dependencies.itertools] version = ">=0.10.1, <0.16" [dependencies.proc-macro-crate] version = ">=1.1.3, <4" [dependencies.proc-macro2] version = "1.0.53" [dependencies.quote] version = "1" [dependencies.sha3] version = "0.10" optional = true [dependencies.strum] version = ">=0.24, <0.28" features = ["derive"] [dependencies.syn] version = "2.0.53" features = ["extra-traits"] [dependencies.void] version = "1" derive-deftly-macros-1.0.0/Cargo.toml.orig000064400000000000000000000021611046102023000165360ustar 00000000000000[package] name = "derive-deftly-macros" version = "1.0.0" edition = "2021" readme = "README.md" authors=["Ian Jackson ", "and the contributors to Rust derive-deftly"] license = "MIT" description="Macros that implement the derive_deftly crate" homepage = "https://gitlab.torproject.org/Diziet/rust-derive-deftly" repository = "https://gitlab.torproject.org/Diziet/rust-derive-deftly" rust-version = "1.56" # After editing this file, you will probably need to run # maint/update-bizarre # to update the "bizarre" testing versions in tests/pub-export/bizarre-* [features] case = ["heck"] expect = ["sha3", "syn/full"] meta-as-expr = ["syn/full"] meta-as-items = ["syn/full"] beta = [] [dependencies] indexmap = ">=1.8, <3" itertools = ">=0.10.1, <0.16" proc-macro-crate = ">=1.1.3, <4" proc-macro2 = "1.0.53" quote = "1" sha3 = { version = "0.10", optional = true } strum = { version = ">=0.24, <0.28", features = ["derive"] } syn = { version = "2.0.53", features = ["extra-traits"] } void = "1" heck = { version = ">=0.4, <0.6", optional = true } [lib] path = "macros.rs" proc-macro = true derive-deftly-macros-1.0.0/HACKING.md000064400000000000000000000331741046102023000152450ustar 00000000000000# **Hacking on derive-deftly (`HACKING.md`)** Rust procedural macros are a somewhat awkward environment, and, especially, testing them can be complex. * [Required reading](#required-reading) * [User-facing documentation](#user-facing-documentation) * [Generated and auto-updated files in the git tree](#generated-and-auto-updated-files-in-the-git-tree) * [`tests/pub-export/bizarre-facade/*` etc., updated by `maint/update-bizarre`](#testspub-exportbizarre-facade-etc-updated-by-maintupdate-bizarre) * [`Cargo.lock`, updated by `nailing-cargo update`.](#cargolock-updated-by-nailing-cargo-update) * [`Cargo.lock.minimal`, updated by `update-minimal-versions`.](#cargolockminimal-updated-by-update-minimal-versions) * [Tables of contents in various `*.md`, updated by `maint/update-tocs`.](#tables-of-contents-in-various-md-updated-by-maintupdate-tocs) * [Cross-references in `reference.md`, updated by `maint/update-reference-xrefs`](#cross-references-in-referencemd-updated-by-maintupdate-reference-xrefs) * [Testing - see `tests/tests.rs`](#testing---see-teststestsrs) * [Reporting errors during template parsing and expansion](#reporting-errors-during-template-parsing-and-expansion) * [Adding an expansion keyword](#adding-an-expansion-keyword) * [Accessing the driver](#accessing-the-driver) * [Expansion keywords with content or arguments](#expansion-keywords-with-content-or-arguments) * [Adding a keyword that can appear in `${paste }` and/or `${CASE }`](#adding-a-keyword-that-can-appear-in-paste--andor-case-) * [Adding a boolean keyword](#adding-a-boolean-keyword) * [clippy](#clippy) * [Updating the pinned clippy (housekeeping task)](#updating-the-pinned-clippy-housekeeping-task) * [clippy `#[allow]`s - strategy and policy](#clippy-allows---strategy-and-policy) * [Updating the pinned Nightly Rust (used in tests and CI)](#updating-the-pinned-nightly-rust-used-in-tests-and-ci) * [Choose which Nightly Rust version to update to](#choose-which-nightly-rust-version-to-update-to) * [Update the nightly version number](#update-the-nightly-version-number) * [Update the cargo-expand version](#update-the-cargo-expand-version) * [Prepare and merge the changes](#prepare-and-merge-the-changes) * [Compatibility testing (and semver updates)](#compatibility-testing-and-semver-updates) ## Required reading derive-deftly uses types and traits from [`syn`] and [`mod@quote`], extensively. It will be very helpful to run one of ```text maint/build-docs-local --dev # or cargo doc --document-private-items --workspace ``` to get a local rendering including for the internal APIs. That will also get a **rendering of this file with working links**, as `target/doc/derive_deftly_macros/_doc_hacking/index.html`. [`NOTES.md`](_doc_notes) has some ideas for the future, which we may or may not implement. (Comments welcome!) ## User-facing documentation Our user-facing documentation is divided between our `rustdoc` documentation and our `mdbook` source. The user guide (currently only an introduction) lives in book/src/*. See the [`mdbook`](https://rust-lang.github.io/mdBook/) documentation for more implementation. To build all the user-facing documentation, run `maint/build-docs-local` from the top-level directory, and look in the directory it tells you. ## Generated and auto-updated files in the git tree The git tree contains some files which are actually maintained by scripts in `maint/`. ### `tests/pub-export/bizarre-facade/*` etc., updated by `maint/update-bizarre` "Bizarre" version of `derive-deftly`, used for [cross-crate compatibility testing](../../pub_b/index.html). CI will check that these outputs are up to date with the normal top-level `Cargo.toml`s, and `pub-b.rs`, from which they are generated. ### `Cargo.lock`, updated by `nailing-cargo update`. Example lockfile. Used in the CI tests, which (in most tests) pin all of our dependencies. If you're not a user of [`nailing-cargo`](https://diziet.dreamwidth.org/tag/nailing-cargo) you can update this simply by copying a `Cargo.lock` made with `cargo update`. ### `Cargo.lock.minimal`, updated by `update-minimal-versions`. Minimal versions of our dependencies, used for CI testing of our MSRV, etc. `update-minimal-versions` runs `cargo +nightly update ...`, so you have to have a Rust Nightly installed. ### Tables of contents in various `*.md`, updated by `maint/update-tocs`. These are inserted at the `` marker. Checked by CI, but it's only a warning if it's not up to date. ### Cross-references in `reference.md`, updated by `maint/update-reference-xrefs` There are `x:...` and `c:...` `
`s, surrounding each heading describing expansions and conditions. And indexes, at the bottom of the file. Again, checked by CI, but it's only a warning if it's not up to date. ## Testing - see `tests/tests.rs` derive-deftly has comprehensive tests. But, they are complicated (mostly because testing proc macros is complicated). You have to use **a particular version of Nightly Rust**. **See [`tests/tests.rs`](../../derive_deftly_tests/index.html)** for information on how to run and update the tests. ## Reporting errors during template parsing and expansion Generally, we use only `syn::Error` as the error type. Use the [`MakeError`] convenience trait's [`.error()`](MakeError::error) method to construct errors. Often, it is a good idea to generate an error pointing at the relevant parts of both the driver and the template; [`MakeError`]'s implementation on [`[ErrorLoc]`](ErrorLoc) is good for this. ## Adding an expansion keyword You need to decide if it should be useable in `${paste }`. Generally, identifiers (or identifier-like things) strings, and types should, and other things shouldn't. For now let's assume it shouldn't be useable in `${paste }`. And, you need to decide if it should be useable as a boolean expression, in `${if }` etc. Again, for now, let's assume not. Add the keyword to [`pub enum SubstDetails`](syntax::SubstDetails) in `syntax.rs`. If the keyword isn't a Rust keyword, use its name precisely, in lowercase. The enum variannt should contain: * Any arguments allowed and supplied, in their parsed form * Markers `O::NotInPaste` and `O::NotInBool`, as applicable. Add the keyword to the parser in `impl ... Parse for Subst`. Use the provided `keyword!` macro. For the markers, use `not_in_paste?` and `not_in_bool?`. The compiler will now insist you add arms to various matches. Most will be obvious. The meat of the expansion - what your new keyword means - is in `SubstDetails::expand`, in `expand.rs`. For an expansion which isn't permitted in `${paste ..}`, call [`out.append_tokens_with()`](framework::ExpansionOutput::append_tokens_with) or [`out.append_tokens()`](framework::ExpansionOutput::append_tokens). You'll also want to add documentation to `doc/reference.md`, arrangements for debug printing in `macros/dbg_allkw.rs`, test cases in `tests/expand/` and maybe `tests/ui/`, and possibly discussion in `book/src/`. ### Accessing the driver Information about the driver (and the current variant and field) is available via [`framework::Context`]. (Use the methods on `Context`, such as [`field()`](framework::Context::field), to get access to the per-field and per-variant details, rather than using `Context.variant` and open-coding the error handling for `None`.) ### Expansion keywords with content or arguments Parse the content from `input`, in the `keyword!` invocation. See `tmeta` et al for an example. Usually it is best to make a Rust type to represent the content or arguments, if there isn't a suitable one already. To parse a boolean expression, use `Subst`. (Probably, in a `Box`, like in `when`). Normally it is best to put the `O::Not...` markers directly in the `SubstDetails` enum variant; that makes it easier to extract them for use in the `match` arms. It is fine to have thsee markers in an argument type *as well*. For a sophisticated example of this, see `SubstMeta`, which allows `... as ...`, except in boolean context. For named arguments, use [`syntax::ParseUsingSubkeywords`]. ### Adding a keyword that can appear in `${paste }` and/or `${CASE }` Removing `O::NotInPaste` marker from a `SubstDetails` variant will allow the template to contain that keyword within `${paste}` and `${CASE}`. You won't be able to call `out.append_tokens` any more. Instead, you must use one of the more specific [`framework::ExpansionOutput`] methods, such as `append_identfrag` or `append_idpath`. ### Adding a boolean keyword This is fairly straightforward. Use `is_enum` (say) as an example. ## clippy We *do* run clippy, but we turn off all `style` and `complexity` lints. In CI, we test with a pinned version of clippy, currently 1.79.0, because clippy often introduces new lints, and we want to handle that breakage in a controlled fashion. If your MR branch fails the clippy job, you can repro locally with: ```text rustup toolchain add 1.79 rustup component add clippy cargo +1.79 clippy --locked --workspace --all-features ``` ### Updating the pinned clippy (housekeeping task) * Update the version in `.gitlab-ci.yml`, and above. * Run the new clippy and fix or allow lints as appropriate. ### clippy `#[allow]`s - strategy and policy We put `#![allow(clippy::style, clippy::complexity)]` in every top-level Rust file. In tests, we have `#![allow(clippy::style, clippy::complexity, clippy::perf)]`. (Some files which are sufficiently simple to not trigger any lints, are lacking these annotations. We'll add them as needed.) Feel free to add an `#[allow]` if it seems like clippy wants you to make the code worse. We often prefer code which isn't "minimal", if it seems clearer, or more consistent with other nearby code, or if it might make future edits easier. For a clippy false positive, link to the upstream bug report, eg `#[allow(clippy::non_minimal_cfg)] // rust-clippy/issues/13007` ## Updating the pinned Nightly Rust (used in tests and CI) The docker image and the nightly version number `+nightly-YYYY-MM-DD` must be kept in sync. `cargo expand` will probably need updating too. ### Choose which Nightly Rust version to update to Use this to select a corresponding Nightly Rust and container image:
To parse the json, You can use a rune like this:
`curl https://www.chiark.greenend.org.uk/~ian/docker-tags-history/rustlang,rust/tags.2025-02-10T13:08+00:00.gz | zcat | jq -r '.results[] | select(.name | match("^nightly-bookworm$")) | .images[] | select(.architecture | match("^amd64"))'` Pick a date just before upstream Rust branched for a release, since that way we'll maybe have a less-buggy nightly. Note the date `YYYY-MM-DD` (from the `tags.` part of the URL) and use the `jq` rune to get the sha256 image digest. (The Docker official way seems to be to visit and look for the `nightly-bookworm` tag, or whatever. However, as far as I can tell historical information is not available, even though the images *are* retained!) ### Update the nightly version number Install the chosen nightly: `rustup toolchain add nightly-YYYY-MM-DD` Then run: ```text TRYBUILD=overwrite MACROTEST=overwrite STDERRTEST=overwrite \ cargo +nightly-YYYY-MM-DD test --workspace --all-features ``` **Inspect the output carefully** before committing. Use `git-grep` on the old nightly date string and fix all the instances. ### Update the cargo-expand version Quite likely, you'll need to update cargo-expand too, since it may not build with the new nightly. Find the most recent version on `crates.io`, and `cargo install --locked --version 1.0.NN cargo-expand` Run the overwriting test rune, above. **Inspect the output carefully** before committing. Edit `.gitlab-ci.yml` with search-and-replace to fix all the occurrences. ### Prepare and merge the changes 1. Make an MR of any preparatory fixes you found you needed to make. Such changes ought to work with all compiler versions, and should be made into an MR of their own. 2. When that has merged, make an MR containing *one commit*: - gitlab image update - `cargo expand` update - consequential changes to expected test outputs - `Cargo.lock` update (minimal-versions ought not to change here) This has to be its own MR because it changes, along the way, things that the `every-commit` test assumes don't change. Splitting it into multiple MRs arranges to refresh those assumptions.

3. Finally, make an MR for any changes you wish to make to this doc. ## Compatibility testing (and semver updates) New template features can be added, and that's just a semver addition, as usual; if a breaking change to a template feature is needed, that is just a semver breaking change. More intrusive or technical changes can cause semver breaks *in crates that export templates*. See [the public API docs for `define_derive_deftly`](macro@define_derive_deftly#you-must-re-export-derive_deftly-semver-implications) and [`template_export_semver_check`](macro@template_export_semver_check). Such breaking changes should be avoided if at all possible. There are complex arrangements for testing that compatibility isn't broken accidentally, mostly in `compat/`. (See `tests/compat/README.md`.) If it is necessary to make such a totally breaking change, consult the git history and see how it was done last time. (`git annotate` on the implementation of `template_export_semver_check` or `git log -G` on a relevant version number may be helpful to find the relevant MR and its commits). derive-deftly-macros-1.0.0/NOTES.md000064400000000000000000000316441046102023000150710ustar 00000000000000# **Notes and plans (`NOTES.md`)** # Future template features ## Stringification See [#64](https://gitlab.torproject.org/Diziet/rust-derive-deftly/-/issues/64). This is a new expansion context, admitting: * Literal strings (including raw strings, but not byte strings) * Expansions (inner template code must also be valid in string context) * Doubled dollars `$$` Not permitted are other kinds of non-expansion tokens, including non-string literals, punctuation, or identifiers. Expansions that are valid in token context are all allowed. Leaf expansions expand to their string representation (`TokenStream as Display`). Expansions that yield types are stringified *as types* (which has an impact on spacing). The precise formatting of the output is not to be relied on. ### Keyword `${string ...}` Body is a template valid in string context. When expanded in token context, expands to a string literal, whose value is all the content strings, concatenated. Within `${string ...}`, literal strings and the results of inner `${string }` expansions (and `$"..."` templates) are not re-quoted. ### Case conversion Case conversion is allowed inside stringification; the contents must be a valid stringification content. Case conversion is then applied to the stringified text. So inside `${stringify }`, case changing doesn't force pasting context. Identifier pasing `${paste }` and `$< ... >` is also allowed, and expands to the string representation of the type. ### Interaction with `$define` User-defined expansions whose body is precisely `${string }` or `$"..."` are not requoted in string context. For other user defined expansions, the content is evaluated as tokens, and that token stream is converted to a string (requoting any string literals it may contain). This is in semantics similar to `${paste }`, and ensures that any particular template text is statically either string context, or not. Examples: ```rust,ignore ${define A { ${string "a"} }} $A ${define B { ${string "b"} }} ${string $B} ${define C { "c" }} ${string $C} ``` Expands to: ```rust,ignore "a" "b" r#""c""# ``` ### String templating `$"..."` This is a convenient syntax for specifying a string template. The leading `$` is followed by a (possbily raw) string literal. The string literal contents is lexed as follows: * Literal text * Expansions in the form `$keyword` (or user-defined `$NAME`). * Doubled dollars `$$` * Possibly, at some point `${keyword ARGS...}` but with limitations. Possible limitation producing an initial level of support is: * the characters `"'` cannot appear in the `ARGS...` * Possibly, at some point `$< ... >` with the same limitation. This template is equivalent to a `${string ...}` containing the same elements. Possibly in the future we might support `${"template with $1 positional arguments" $< blarky $fname >}` or similar. ### Doc attributes We could support `$///` and `$//!`. The compiler turns these into `#[doc(...)]` and `#![doc(...)]`. So we would have to recognise `$#[doc` and `$#![doc`. (This means we would find it hard (or maybe impossible) to use `$#` for anything other than attributes.) The literal would be processed as for `$"..."`. ### Byte strings We could support byte literals maybe. `${string }` doesn't have a way to request them; there would probably have to be `${byte_string }`. `$"..."` does have a natural way: `$b"..."`. Supporting byte strings would mean allowing `b"..."` within (the relevant) string context, posing the possibility of `${string b"non-utf8 bytes"}` which has to be an error. When is this error checked? Is there one kind of string context, or two? Do we allow `${string b"valid utf8"}`? We could detect the invalid utf-8 at expansion time. But perhaps we should declare that we reserve the right to check this statically, and that wouldn't be a breaking change? ## Scopes (for meta, and loops) See [discussion in #36](https://gitlab.torproject.org/Diziet/rust-derive-deftly/-/issues/36#note_3015721) ### Demo - cross product, allowing access to outer repetition ```rust,ignore ${for let a = fields { ${for let b = fields { self_cross_product_contains(${a.fname} ${b.fname}); }} }} ``` ### Fixing awkwardness re optional meta attributes [#40](https://gitlab.torproject.org/Diziet/rust-derive-deftly/-/issues/40) ```rust,ignore ${if let m = fmeta(call) { ${m.meta as expr} (&self.$fname); }} ``` ### Handling meta attribute multiplicity [#36](https://gitlab.torproject.org/Diziet/rust-derive-deftly/-/issues/36) ```rust,ignore ${for let m = fmeta(call) { ${m.meta as expr} (&self.$fname); }} ``` ### Looking for a thing in various places: ```rust,ignore ${if let m = any(fmeta(call), vmeta(fields(call)), tmeta(fields(call))) {..}} ${for let m = all(fmeta(call), vmeta(fields(call)), tmeta(fields(call))) {..}} ${if let m = select1(fmeta(call), fmeta(obsolete_name_for_call)) {..}} ``` ### Meta scope referring to nonexistent meta item? Can a meta item scope refer to a putative, but actually nonexistent, item? Not sure if we need this. [#62](https://gitlab.torproject.org/Diziet/rust-derive-deftly/-/issues/62) I suggest not, in the first instance. ```rust,ignore ${let m = fmeta(call) { ${if ${m.meta} { ... }} }} ``` ### Details of binding in `if ... any` etc. What about mixed binding and non-binding conditions? Options are (a) reject this (b) on the non-binding arms, bind to nonexistent meta. I think I prefer (a). It's the more cautious approach, certainly. ```rust,ignore ${if let m = any(fmeta(call), true) { ${if ${m.meta} { ... }} }} ``` ### Meta scopes vs repeat scopes Every scope is either a meta scope or a repeat scope. `${scope.meta}` is only allowed for meta scopes. Other expansions, including notably `${scope.Xmeta}`, are only allowed for repeat scopes. ```rust,ignore ${for let m = fmeta(call) { .. ${m.fmeta(call)} .. // ERROR, user wrote `fmeta`, wanted `meta` .. ${m.meta(call)} .. // OK }} ${for let m = fields { .. ${m.fmeta(call)} .. // OK .. ${m.meta(call)} .. // ERROR, m isn't a meta scope // user must write ${m.fmeta} }} ``` #### Alternative With a meta scope `m`, the only legal expansion using it is `${m.meta((call)}` or whatever. (Right now; But expansions other than `$Xmeta` might be OK to support. Not `$Xmeta` because it has too much scope for error: since `${if let m = fmeta(call) { .. ${m.fmeta(..)} .. }}` doesn't do at all what the user might expect.) So, instead, we could say that you don't need to write `.meta` and have it be just `${m(call)}`. But: * Now it lives in the same namespace as keywords, and is a user-defined name, so it must be uppercase. `${M(call)}`. * This reduces the similarity between meta scopes and normal scopes. * It would prevent us supporting eg `${m.fname}` in the future, which might be useful with something like `${if let m = find1(field, fmeta(call)) { $m.fname ... }}`. (meaning "find the precisely one field with `#[deftly(call)]`, and then expand stuff with it), or other things I haven't thought of yet. * If we support arguments to user-defined meta items, the syntax for passing them wouldn't look like the meta syntax, so `${M(call)}` is syntactically weird. ### Binding and checking Binding is dynamic (like `${define }`) (despite the use of `let` which is often lexical in other languages including Rust). (Meta attribute checking is dynamic and precise, naturally.) ## Splitting off fields and handling subsets of the generics Syntax and semantics TBD. Some notes: ```text For things that need to split off fields struct Foo as above { and talk only about subsets of the generics field: Box, generic parameter uses (for fields) $fgens T, $fgens_omitted 'l, C For explicit iteration, within ${for tgens ...} or $( ... ) $tgname 'l T C $tgbounds ??? Something for being able to handle structs/unions/enums equally in template, whatever that means. We need to expand something to struct/union/enum, and possibly the brackets and commas in enum { ..., ..., }, and so on. ``` # Future plans wrt macro namespace questions ## Deriving from things other than data structures It would be nice to be able to eventually support deriving from items (traits, fns, ...). This would have to be an attribute proc macro. Attribute proc macros get to modify their applicand, but we would not do that. Ideally that attribute macro would be `#[derive_deftly]`. However: * We are already using that as an inert helper attribute for `#[derive(Deftly)]`. Possibly we could experiment to see how that would interact with a non-inert attribute macro, except that: * It is not possible to have an attribute macro and a function-like macro with the same name; even though the invocation syntaxes (and implementing macro function signatures) are different. ## Proposed taxonomy of macros and attributes We won't implement all of this right away, but it is good to have a plan to make sure the names we take now won't get in the way. * **`#[derive(Deftly)]`**: invokes the from-struct derivation machinery; enables: 1. use of `#[derive_deftly(ReuseableMacro)]` on this very struct 2. later use of `derive_deftly_adhoc!` of the same struct (if `#[derive_defly_adhoc]` specified) 3. `#[deftly(...)]` attributes on bits of the data structure (checked via chaining technique). * **`define_derive_deftly!{ [export] MACNAME: TEMPLATE }`**: define a reusable template, which may be invoked as `#[derive_deftly(MACNAME)]` (within a struct annotated with `#[derive(Deftly)]` or `#[item_derive_deftly(MACNAME)]`. * **`derive_deftly_adhoc!{ DRIVERNAME: TEMPLATE }`**: adhoc derivation from something previously annotated with `#[derive(Deftly)]` or `#[item_derive_deftly]`. `DRIVERNAME` is an item path; we conflate the type and value namespaces. * **`#[derive_defly_adhoc]`**: Inert helper attribute to enable use of `derive_deftly_adhoc!`. * **`#[item_derive_deftly(MACNAME)]`**: attribute macro to be applied to items. The item is reproduced unchanged, except that `#[deftly]` attributes *in places where we would look for them* are filtered out. `#[item_derive_deftly]` will look forward to see if there are further `#[item_derive_deftly]` attributes, so that they can be combined and processed together (this is necessary for correctness of meta attr handling). Template *must* use `for ...` option. `#[derive_deftly_adhoc]` works as usual. It's an error to have `#[item_derive_deftly]` without the `()`. * **`#[deftly]`**: Inert helper attribute for `#[derive(Deftly)]`. Filtered-out attribute for `#[item_derive_deftly]`. Contents available via `$Xmeta`. * **`#[only_derive_deftly]`**: attribute macro to be applied to items; like `#[item_derive_deftly]` but *consumes and does not emit* the item. (We don't really need to be sure about this name; this will be an unusual case and we can give it whatever name seems good, later.) ## consume and not emit: ### Composition problem with `#[deftly]` attributes You should be able to compose mutually-ignorant derive-deftly templates. In particular you should be able to chain transforming templates, and transforming templates should be able to output invocations of normal deriving templates. This won't work without doing "something", because the outer invocation (the first to process the input) will see a bunch of unrecognised `#[deftly]` attributes. I'm not sure what the answer is, but perhaps a template option for accepting `#[deftly]` attrs and `${attr}` for transforming them. Then the caller could `#[deftly(inner(foo))]` and the inner template would receive `#[deftly(foo)]`. Perhaps. ### Possible alternative syntax/naming * **`#[transform_deftly]`**: attribute macro to be applied to items. * d-d option `transform`. Insists that this template is for `#[transform_deftly]` only. ## `for ...` d-d options Currently, we have `for enum`, `for struct`, `for union`. These are fine. We want also want ways to say: * `struct` or `enum`, not `union`: `for data`? * Particular kind of item: `fn`, `trait`, `mod`, `const`. * Any item: `item` (with `#[item_derive_adhoc]` for non-data items). * Combinations of the above: eg `for fn/const`? Outstanding questions, therefore: * Does `for any` mean anything? * What keyword is "`struct`/`enum`"? * Do we need a keyword for `struct`/`enum`/`union`? Probably, since this is going to be the default! * Is `/` the right separator for "or"? ### Internals * **`derive_deftly_engine!`**: Proc macro that does all the work. * **`derive_deftly_driver_DRIVERNAME!`**: `macro_rules` macro generated by `#[derive(Deftly)]` and `#[item_derive_deftly]`, embodying a driver. * **`derive_deftly_template_MACNAME!`**: `macro_rules` macro generated by `define_derive_deftly!`, embodying a template. # Things to check before declaring 1.0 None! But we should get some experience with the renamed crate, probably by upgrading arti to it. derive-deftly-macros-1.0.0/README.md000064400000000000000000000001001046102023000151150ustar 00000000000000Macros for `derive_deftly` **Import `derive_deftly` instead.** derive-deftly-macros-1.0.0/accum.rs000064400000000000000000000077151046102023000153170ustar 00000000000000//! `derive_deftly_engine!()`, parsing accumulations use super::framework::*; use adviseable::*; /// `derive_deftly_engine! accumulated form, accumulated information /// /// We don't reify the whole input; /// instead, we accumulate directly in the `Parse` impl. #[derive(Debug)] pub struct EngineFinalInput { driver: syn::DeriveInput, accum: Accumulated, } #[derive(Debug)] pub struct Accumulated { metas: meta::CheckUsed, } impl EngineFinalInput { pub fn parse_adviseable_remainder( driver: syn::DeriveInput, input: ParseStream, ) -> AdviseableResult { let _empty_next_brackets_contents; let _ = bracketed!(_empty_next_brackets_contents in input); let accum; let _ = bracketed!(accum in input); let accum = accum.parse()?; let _: TokenStream = input.parse()?; Ok(AOk(EngineFinalInput { driver, accum })) } } impl Parse for Accumulated { fn parse(input: ParseStream) -> syn::Result { use meta::CheckUsed as mCU; let mut metas = mCU::Check(meta::Accum::default()); struct Ignore; while !input.is_empty() { let kind: syn::Ident = input.parse()?; match if kind == "_meta_used" { if let mCU::Check(m) = &mut metas { match input.parse()? { mCU::Check(y) => m.used.push(y), mCU::Unchecked => metas = mCU::Unchecked, } continue; } else { Ignore } } else if kind == "_meta_recog" { if let mCU::Check(m) = &mut metas { let content; let _brackets = bracketed!(content in input); let input = content; while !input.is_empty() { use meta::Usage as mU; let allow = match input.parse()? { Some::(_) => mU::BoolOnly, None => mU::Value, }; let desig = input.parse()?; m.recog.update(desig, allow); } continue; } else { Ignore } } else if kind == "error" { metas = mCU::Unchecked; Ignore } else if kind.to_string().starts_with('_') { Ignore } else { return Err( kind.error("unrecognised mandatory accumulation kind") ); } { Ignore => { let _: TokenTree = input.parse()?; } } } Ok(Accumulated { metas }) } } impl EngineFinalInput { pub fn process(self) -> syn::Result { let r = Context::call( &self.driver, &dummy_path(), // template_crate, not used by our f None, // template_name |ctx| { if let mCU::Check(m) = &self.accum.metas { for group in &m.used { adviseable_parse2_call( group.content.clone(), |input| { ctx.decode_update_metas_used(input)?; Ok(AOk(())) }, )? } } let mut errors = ErrorAccumulator::default(); if let mCU::Check(m) = &self.accum.metas { ctx.check_metas_used(&mut errors, &m.recog); } errors.finish() }, ); let mut out = TokenStream::new(); match r { Ok(()) => {} Err(e) => e.into_compile_error().to_tokens(&mut out), } Ok(out) } } derive-deftly-macros-1.0.0/adhoc.rs000064400000000000000000000035011046102023000152720ustar 00000000000000//! Macro impl for adhoc template application `derive_deftly_adhoc!` use super::prelude::*; #[derive(Debug, Clone)] struct TemplateInvocation { driver: syn::Path, options: UnprocessedOptions, colon: Token![:], template: TokenStream, } impl Parse for TemplateInvocation { fn parse(input: ParseStream) -> syn::Result { let driver = input.parse()?; let options = UnprocessedOptions::parse(&input, OpContext::TemplateAdhoc)?; let colon = input.parse()?; let template = input.parse()?; Ok(TemplateInvocation { driver, options, colon, template, }) } } /// This is `derive_deftly_adhoc!` /// /// It parses /// ```rust,ignore /// StructName: /// SOME_TOKENS /// ``` /// and expand it to an invocation of /// ```rust,ignore /// derive_deftly_driver_StructName /// ``` /// as per NOTES.txt. pub fn derive_deftly_adhoc( input: TokenStream, ) -> Result { let TemplateInvocation { driver, options, colon, template, } = syn::parse2(input)?; dprint_block!( [&driver.to_token_stream(), &template], "derive_deftly_adhoc! input", ); let driver_mac_name = { let mut name = driver; let last = name.segments.last_mut().ok_or_else(|| { colon.error( "expected non-empty path for driver struct name, found colon", ) })?; last.ident = format_ident!("derive_deftly_driver_{}", &last.ident); name }; let output = quote! { #driver_mac_name !{ { #template } { ($) } ( crate; [#options] ; ) [] [] } }; dprint_block!(&output, "derive_deftly_adhoc! output"); Ok(output) } derive-deftly-macros-1.0.0/adviseable.rs000064400000000000000000000145631046102023000163250ustar 00000000000000//! errors with compatibility advice //! //! Suitable for local glob import. //! //! ### Non-local errors from syn //! //! `syn` automatically produces "unexpected token" errors, //! if not all of the input is consumed, somewhere. //! //! Empirically: //! these errors are squirreled away somewhere, and surface //! on return from one of the top-level syn `parse` functions //! (the ones that provide a `ParseStream`). //! //! If the top-level function would return `Ok`, //! the unexpected tokens error appears instead. //! But if there's going to be an error anyway, //! the unexpected tokens error is discarded. use super::prelude::*; /// A `Result` whose error might, or might not, need compat advice /// /// * `Err(raw)`: unexpected error, probably mismatched /// derive-deftly versions. /// [`adviseable_parse2`] will return the error /// but with compat advice for the user added. /// * `Ok(ErrNeedsNoAdvice(cooked))`: "expected" error, fully reported for /// the user's benefit. Returned as-is by `parse_advised`. /// * `Ok(Ok(T))`. All went well. /// /// This odd structure is to add a note to most of the errors that /// come out of syn parsing. The `braced!` etc. macros insist that the /// calling scope throws precisely `syn::Error`; `Into` /// isn't enough. /// /// This is the return type of `ParseAdviseable::parse_adviseable`. // // This is all rather unsatisfactory. For example, it implies // the AOk and ErrNNA type aliases and consequent ad-hoc glob imports // of this module. We'd prefer a custom error type, convertible from // syn::Error, but syn won't allow that. pub type AdviseableResult = syn::Result>; /// Typically found as `syn::Result>` #[derive(Debug)] pub enum AdviseableInnerResult { /// Success Ok(T), /// Failure, but doesn't need advice /// /// Typically found as /// `sync::Result::Ok(AdvisedInnerResult::NeedsNoAdvice(..))`. ErrNeedsNoAdvice(syn::Error), } /// Types that can be parsed, but might need compat advice pub trait ParseAdviseable { fn parse_adviseable(input: ParseStream) -> AdviseableResult where Self: Sized; } pub use AdviseableInnerResult::ErrNeedsNoAdvice as ErrNNA; pub use AdviseableInnerResult::Ok as AOk; /// Parses with a callback, and produces advice if necessary /// /// Version of `adviseable_parse2` that takes a callback function, /// rather than a trait impl. pub fn adviseable_parse2_call( input: TokenStream, call: impl FnOnce(ParseStream) -> AdviseableResult, ) -> syn::Result { // If somehow our closure doesn't get called, we want to give // advice, so make that the default. let mut needs_advice = true; let ar = Parser::parse2( |input: ParseStream<'_>| { // When we're returning an error that needs no advice, we must // return *Err* from here, not Ok(NNA), because if we return Ok, // syn might surface an unexpected tokens error instead of the // non-advice-needing error we actually want. // // Encoding the advice-needed status in the error would // be difficult, given how opaque syn::Error is. So // we smuggle a &mut bool into the closure. match call(input) { Err(needs) => Err(needs), Ok(ErrNNA(unadv)) => { needs_advice = false; Err(unadv) } Ok(AOk(y)) => Ok(y), } }, input, ); ar.map_err(|e| { if needs_advice { advise_incompatibility(e) } else { e } }) } /// **Parses `T`, and produces advice if necessary** (principal entrypoint) /// /// All top-level proc_macro entrypoints that want to give advice, /// should ideally call this. /// (See the note in `advise_incompatibility`.) pub fn adviseable_parse2( input: TokenStream, ) -> syn::Result { adviseable_parse2_call(input, T::parse_adviseable) } impl AdviseableInnerResult { pub fn map(self, f: impl FnOnce(T) -> U) -> AdviseableInnerResult { match self { AOk(y) => AOk(f(y)), ErrNNA(e) => ErrNNA(e), } } } /// Add a warning about derive-deftly version incompatibility /// /// ### Lost advice hazard /// /// Take care! /// `syn` generates errors from unprocessed tokens in [`syn::parse2`] etc. /// Calling this function *within* `syn::parse2` /// (ie, somewhere you have a `ParseStream`, /// will result in those errors not receiving advice. /// /// Ideally, call this from functions that have a `TokenStream`. /// If you do that, then functions *isnide* that can /// use this method, avoiding the problem: /// any errors stored up by `syn` will emerge at that call site, /// and be properly dealt with. pub fn advise_incompatibility(err_needing_advice: syn::Error) -> syn::Error { let mut advice = Span::call_site().error( "bad input to derive_deftly_engine inner template expansion proc macro; might be due to incompatible derive-deftly versions(s)" ); advice.combine(err_needing_advice); advice } /// Within `parse_adviseable`, handle errors *without* giving advice /// /// `parse_unadvised! { CONTENT_IDENT => || CLOSURE_BODY }` /// expects `CONTENT_IDENT` to be the contents from /// [`braced!`], [`bracketed!]` or [`parenthesized!`]. /// Calls the closure. /// Errors within the closure won't get advice. /// /// `parse_unadvised! { CONTENT_IDENT }` /// shorthand for calling `.parse()` on the content. /// /// # Sketch /// /// ```rust,ignore /// let something; /// let _ = bracketed!(something in input); /// parse_unadvised! { /// something => || { /// // do something with something, eg something.parse() /// Ok(...) /// } /// } /// ``` macro_rules! parse_unadvised { { $content:ident } => { parse_unadvised! { $content => || $content.parse() } }; { $content:ident => || $( $call:tt )* } => { match syn::parse::Parser::parse2( // We convert the input to the `TokenStream2` and back, // so that we surface "unexpected token errors" here // rather than at the toplevel parsing call. |$content: ParseStream<'_>| -> syn::Result<_> { $($call)* }, $content.parse::()? ) { Ok(y) => y, Err::<_, syn::Error>(e) => return Ok(ErrNNA(e)), } } } derive-deftly-macros-1.0.0/approx_equal.rs000064400000000000000000000234531046102023000167240ustar 00000000000000//! Implementation of `${approx_equal ..}`, and support functions // // # Note on error handling // // Many functions here take `cmp_loc: ErrorLoc` and return `syn::Result`. // `cmp_loc` is the comparison operator (`kw_span` in `boolean.rs`, // referring to the `approx_equal` keyword. // // When generating errors, we include this in our list of ErrorLocs. // // An alternative would be to return a bespoke error type, // consisting of the pieces to make the error from. // I experimented with this, but it's definitely worse. // Also this has trouble handling a `syn::Error` from other code we call. use super::prelude::*; use proc_macro2::Group; use Equality::*; /// Return value of a (perhaps approximate) equality comparison /// /// (Avoids use of `bool`) #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Equality { Equal, Different, } impl Equality { /// Compare `a` and `b` /// /// (Name is short but avoids clash with `Ord::cmp`) pub fn cmpeq(a: &T, b: &T) -> Self { if a == b { Equal } else { Different } } } /// Compare, and return early if different /// /// * **`cmpeq!(d: Equality)`**: /// If `d` is `Different`, returns `Ok(d)`. /// (The containing scope should return `Result`.) /// /// * **`cmpeq!(a: T, b: T);`**: /// compares `a` and `b` using `Equality::cmpeq`, /// and returns immediately if `a != b`, /// or the comparison failed. macro_rules! cmpeq { { $a:expr, $b:expr } => { cmpeq!(Equality::cmpeq(&$a, &$b)); }; { $r:expr } => { if let d @ Different = $r { return Ok(d); } }; } /// Return the input, but with `None`-delimited `Group`s flattened away /// /// Loses some span information. pub fn flatten_none_groups(ts: TokenStream) -> TokenStream { fn recurse(out: &mut TokenStream, input: TokenStream) { for tt in input { match tt { TT::Group(g) if g.delimiter() == Delimiter::None => { recurse(out, g.stream()); } TT::Group(g) => { let span = g.span(); let mut g = Group::new( g.delimiter(), flatten_none_groups(g.stream()), ); // We lose some span information here. g.set_span(span); out.extend([TT::Group(g)]); } _ => out.extend([tt]), } } } let mut out = TokenStream::new(); recurse(&mut out, ts); out } trait LitComparable { fn lc_compare( a: &Self, b: &Self, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result; } trait LitConvertible { type V: Eq; fn lc_convert(&self, cmp_loc: &ErrorLoc<'_>) -> syn::Result; } fn str_check_suffix( suffix: &str, span: Span, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result<()> { if suffix.is_empty() { Ok(()) } else { Err([(span, "literal"), *cmp_loc].error( "comparison of string/byte/character literals with suffixes is not supported" )) } } macro_rules! impl_LitComparable_str { { $lit:ty, $val:ty } => { impl LitConvertible for $lit { type V = $val; fn lc_convert(&self, cmp_loc: &ErrorLoc<'_>) -> syn::Result { str_check_suffix(self.suffix(), self.span(), cmp_loc)?; Ok(self.value()) } } } } impl_LitComparable_str!(syn::LitStr, String); impl_LitComparable_str!(syn::LitByteStr, Vec); impl_LitComparable_str!(syn::LitByte, u8); impl_LitComparable_str!(syn::LitChar, char); impl LitComparable for T { fn lc_compare( a: &Self, b: &Self, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { Ok(Equality::cmpeq( // &a.lc_convert(cmp_loc)?, &b.lc_convert(cmp_loc)?, )) } } impl LitConvertible for syn::LitBool { type V = (); fn lc_convert(&self, _cmp_loc: &ErrorLoc<'_>) -> syn::Result { Err(self.error( "internal error - TokenTree::Literal parsed as syn::Lit::Bool", )) } } impl LitConvertible for syn::LitFloat { type V = String; fn lc_convert(&self, _cmp_loc: &ErrorLoc<'_>) -> syn::Result { Ok(self.token().to_string()) } } impl LitComparable for syn::LitInt { fn lc_compare( a: &Self, b: &Self, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { match ( a.base10_parse::(), b.base10_parse::(), ) { (Ok(a), Ok(b)) => Ok(Equality::cmpeq(&a, &b)), (Err(ae), Err(be)) => Err( [(a.span(), &*format!("left: {}", ae)), (b.span(), &*format!("right: {}", be)), *cmp_loc, ].error( "integer literal comparison with both values >u64 is not supported" )), (Err(_), Ok(_)) | (Ok(_), Err(_)) => Ok(Different), } } } fn lit_cmpeq( a: &TokenTree, b: &TokenTree, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { let mk_lit = |tt: &TokenTree| -> syn::Result { syn::parse2(tt.clone().into()) }; let a = mk_lit(a)?; let b = mk_lit(b)?; syn_lit_cmpeq_approx(a, b, cmp_loc) } /// Compare two literals the way `approx_equal` does /// /// `pub` just so that the tests in `directly.rs` can call it pub fn syn_lit_cmpeq_approx( a: syn::Lit, b: syn::Lit, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { macro_rules! match_lits { { $( $V:ident )* } => { let mut error_locs = vec![]; for (lit, lr) in [(&a, "left"), (&b, "right")] { match lit { $( syn::Lit::$V(_) => {} )* _ => error_locs.push((lit.span(), lr)), } } if !error_locs.is_empty() { return Err(error_locs.error( "unsupported literal(s) in approx_equal comparison" )); } match (&a, &b) { $( (syn::Lit::$V(a), syn::Lit::$V(b)) => LitComparable::lc_compare(a, b, cmp_loc), )* _ => Ok(Different), } } } // We do not support comparison of `CStr`. // c"..." literals are recognised only by Rust 1.77, // and we would need syn 2.0.59 to parse them. // So this would require // - bumping our syn dependency to 2.0.59 globally, // or somehow making that feature-conditional, // or messing about parsing the lockfile in build.rs. // - Adding an MSRV-influencing feature, // or testing the rustc version in build.rs. // I hoping we can put this off. match_lits! { Str ByteStr Byte Char Bool Int Float } } fn tt_cmpeq( a: TokenTree, b: TokenTree, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { let discrim = |tt: &_| match tt { TT::Punct(_) => 0, TT::Literal(_) => 1, TT::Ident(_) => 2, TT::Group(_) => 3, }; cmpeq!(discrim(&a), discrim(&b)); match (a, b) { (TT::Group(a), TT::Group(b)) => group_cmpeq(a, b, cmp_loc), (a @ TT::Literal(_), b @ TT::Literal(_)) => lit_cmpeq(&a, &b, cmp_loc), (a, b) => Ok(Equality::cmpeq(&a.to_string(), &b.to_string())), } } fn group_cmpeq( a: Group, b: Group, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { let delim = |g: &Group| Group::new(g.delimiter(), TokenStream::new()).to_string(); cmpeq!(delim(&a), delim(&b)); ts_cmpeq(a.stream(), b.stream(), cmp_loc) } /// Internal, recursive, comparison of flattened `TokenStream`s fn ts_cmpeq( a: TokenStream, b: TokenStream, cmp_loc: &ErrorLoc<'_>, ) -> syn::Result { for ab in a.into_iter().zip_longest(b) { let (a, b) = match ab { EitherOrBoth::Both(a, b) => (a, b), EitherOrBoth::Left(_) => return Ok(Different), EitherOrBoth::Right(_) => return Ok(Different), }; match tt_cmpeq(a, b, cmp_loc)? { Equal => {} neq => return Ok(neq), } } return Ok(Equal); } /// Compares two `TokenStream`s for "equivalence" /// /// We intend that two `TokenStream`s count as "equivalent" /// if they mean the same thing to the compiler, /// modulo any differences in spans. /// /// We also disregard spacing. This is not 100% justifiable but /// I think there are no token sequences differing only in spacing /// which are *both* valid and which differ in meaning. /// /// ### Why ?! /// /// `< <` and `<<` demonstrate that it is not possible to provide /// a fully correct and coherent equality function on Rust tokens, /// without knowing the parsing context: /// /// In places where `<<` is a shift operator, `< <` is not legal. /// But in places where `<<` introduces two lots of generics, /// `<<` means the same. /// /// I think a function which treats `< <` and `<<` as equal is more useful /// than one that doesn't, because it will DTRT for types. /// /// ### `None`-delimited `Group`s /// /// We flatten these /// /// This is necessary, because otherwise /// apparently-identical pieces of code count as different. /// /// This does mean that two things which are `approx_equal` /// can be expressions with different values! /// /// But, the Rust grammar for types doesn't permit ambiguity, /// so the type equality guarantee of `approx_equal` is preserved. // // Comparing for equality has to be done by steam. // And a lot of stringification. pub fn tokens_cmpeq( a: TokenStream, b: TokenStream, cmp_span: Span, ) -> syn::Result { let a = flatten_none_groups(a); let b = flatten_none_groups(b); ts_cmpeq(a, b, &(cmp_span, "comparison")) } derive-deftly-macros-1.0.0/beta.rs000064400000000000000000000062141046102023000151330ustar 00000000000000//! Handle `beta_deftly` template option, when `beta` cargo feature enabled //! //! For instructions on adding a beta feature, //! see [`beta::Enabled`]. //! //! This is a bit annoying. It has to be an ambient property, //! so that the syn `Parse` trait can be implemented. use super::prelude::*; use std::panic::{catch_unwind, resume_unwind, AssertUnwindSafe}; /// Token indicating that beta feature(s) are or can be enabled /// /// When adding a new beta feature: /// /// * Put an instance of [`beta::Enabled`] /// in the appropriate piece of parsed template syntax, /// For example, in the [`SubstDetails`](super::syntax::SubstDetails) /// along with the `O::` markers. /// /// * When parsing, obtain the value from [`Enabled::new_for_syntax`]. /// /// * Add a test case to `tests/minimal-ui/disabled.rs` /// which *omits* the `beta_deftly` option, and therefore fails, /// thus demonstrating that the feature gate works as intended. /// #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[non_exhaustive] pub struct Enabled {} #[derive(Debug, Copy, Clone, Eq, PartialEq)] enum ThreadState { Unset, Set(Option), } use ThreadState as TS; thread_local! { static ENABLED: Cell = Cell::new(TS::Unset); } /// Workaround for `LazyStatic::get` MSRV of 1.73.0. fn threadlocal_get() -> ThreadState { ENABLED.with(|c| c.get()) } fn threadlocal_set(s: ThreadState) { ENABLED.with(|c| c.set(s)) } /// Call `f` with beta features enabled or not /// /// Used by the parser for `TopTemplate` pub fn with_maybe_enabled( enabled: Option, f: impl FnOnce() -> R, ) -> R { assert_eq!(threadlocal_get(), TS::Unset); threadlocal_set(TS::Set(enabled)); // Unwind safety: we re-throw the panic, // so even if f or R wasn't, no-one observes any broken invariant. let r = catch_unwind(AssertUnwindSafe(f)); threadlocal_set(TS::Unset); match r { Ok(r) => r, Err(e) => resume_unwind(e), } } impl Enabled { /// If the cargo feature is enabled, return `Ok(Enabled)` /// /// Used when parsing the `beta_deftly` template option. // // (In this version of the source code it *always* returns Ok. // Returning Err is done by beta_disabled.rs.) pub fn new_for_dd_option(_: Span) -> syn::Result { Ok(Enabled {}) } /// If the `beta_deftly` template feature is enabled, return `Ok(Enabled)` /// /// Used when parsing beta syntax, in templates. #[allow(dead_code)] // sometimes we might not have any beta features pub fn new_for_syntax(span: Span) -> syn::Result { match threadlocal_get() { TS::Unset => { Err(span.error("internal error! beta::ENABLED Unset")) } TS::Set(ue) => ue.ok_or_else(|| { span.error( "beta derive-deftly feature used, without `beta_deftly` template option" ) }), } } /// Makes `new_for_syntax` work properly within `f`, in test cases #[cfg(test)] #[allow(dead_code)] pub fn test_with_parsing(f: impl FnOnce() -> R) -> R { with_maybe_enabled(Some(Enabled {}), f) } } derive-deftly-macros-1.0.0/beta_disabled.rs000064400000000000000000000014651046102023000167650ustar 00000000000000//! Version of `mod beta` for when the cargo feature is disabled //! //! See `beta.rs` for dev documentation. use super::prelude::*; #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Enabled {} pub fn with_maybe_enabled(_: Option, f: impl FnOnce() -> R) -> R { f() } impl Enabled { pub fn new_for_dd_option(span: Span) -> syn::Result { Err(span.error( "derive-deftly's `beta_deftly` template option is only available when the `beta` cargo feature is also enabled" )) } #[allow(dead_code)] // sometimes we might not have any beta features pub fn new_for_syntax(span: Span) -> syn::Result { Err(span.error( "beta derive-deftly feature used, which requires both the `beta` cargo feature and the `beta_deftly` template option" )) } } derive-deftly-macros-1.0.0/boolean.rs000064400000000000000000000161261046102023000156420ustar 00000000000000//! Handling of boolean evaluation (conditions) use super::framework::*; /// Implementor of [`SubstParseContext`] for booleans /// /// No values of this type are ever created - /// it's just a generic parameter, used to select the associated /// marker types (and their constructor methods( in SubstParseContext. /// /// So it can be uninhabited. #[derive(Debug)] pub struct BooleanContext(Void); pub struct Found; fn is_found(r: Result<(), Found>) -> bool { r.is_err() } impl SubstParseContext for BooleanContext { type NotInPaste = (); type NotInBool = Void; type BoolOnly = (); const IS_BOOL: bool = true; type DbgContent = Subst; fn not_in_paste(_: &impl Spanned) -> syn::Result<()> { Ok(()) } fn bool_only(_: &impl Spanned) -> syn::Result<()> { Ok(()) } fn meta_recog_usage() -> meta::Usage { meta::Usage::BoolOnly } fn not_in_bool(span: &impl Spanned) -> syn::Result { Err(span.error( "derive-deftly construct is an expansion - not valid in a condition", )) } type SpecialParseContext = (); fn missing_keyword_arguments(kw_span: Span) -> syn::Result { Err(kw_span.error("missing parameters to condition")) } } impl Subst { pub fn eval_bool(&self, ctx: &Context) -> syn::Result { // eprintln!("@@@@@@@@@@@@@@@@@@@@ EVAL {:?}", self); let kw_span = self.kw_span; let v_fields = || ctx.variant(&kw_span).map(|v| &v.fields); use syn::Fields as SF; let expand = |x: &Template<_>| { let mut out = TokenAccumulator::new(); x.expand(ctx, &mut out); let out = out.tokens()?; Ok::(out) }; let r = match &self.sd { SD::is_enum(..) => ctx.is_enum(), SD::is_struct(..) => matches!(ctx.top.data, syn::Data::Struct(_)), SD::is_union(..) => matches!(ctx.top.data, syn::Data::Union(_)), SD::v_is_unit(..) => matches!(v_fields()?, SF::Unit), SD::v_is_tuple(..) => matches!(v_fields()?, SF::Unnamed(..)), SD::v_is_named(..) => matches!(v_fields()?, SF::Named(..)), SD::tgens(_) => !ctx.top.generics.params.is_empty(), SD::Xmeta(sm) => { let meta::SubstMeta { desig: meta::Desig { label, scope: _ }, as_, default, } = sm; match default { None => {} Some((_, nb, _)) => void::unreachable(*nb), }; use meta::SubstAs as mSA; if let Some(as_) = as_ { match as_ { mSA::expr(nb, ..) | mSA::ident(nb, ..) | mSA::items(nb, ..) | mSA::path(nb) | mSA::str(nb) | mSA::token_stream(nb, ..) | mSA::ty(nb) => void::unreachable(*nb), } }; is_found(label.search_eval_bool(sm.pmetas(ctx, kw_span)?)) } SD::is_empty(_, content) => expand(content)?.is_empty(), SD::approx_equal(_, [a, b]) => { tokens_cmpeq(expand(a)?, expand(b)?, kw_span)? == Equality::Equal } SD::UserDefined(name) => name.lookup_eval_bool(ctx)?, SD::False(..) => false, SD::True(..) => true, SD::not(v, _) => !v.eval_bool(ctx)?, SD::any(vs, _) => vs .iter() .find_map(|v| match v.eval_bool(ctx) { Ok(true) => Some(Ok(true)), Err(e) => Some(Err(e)), Ok(false) => None, }) .unwrap_or(Ok(false))?, SD::all(vs, _) => vs .iter() .find_map(|v| match v.eval_bool(ctx) { Ok(true) => None, Err(e) => Some(Err(e)), Ok(false) => Some(Ok(false)), }) .unwrap_or(Ok(true))?, SD::Vis(vis, _) => match vis.syn_vis(ctx, kw_span)? { syn::Visibility::Public(_) => true, _ => false, }, SD::dbg(ddr) => { let r = ddr.content_parsed.eval_bool(ctx); let () = &ddr.content_string; let w = |s: fmt::Arguments| { eprintln!( "derive-deftly dbg condition {} evaluated to {}", ddr.display_heading(ctx), s, ) }; match &r { Ok(y) => w(format_args!("{:?}", y)), Err(e) => w(format_args!("error: {}", e)), }; r? } // ## maint/check-keywords-documented NotInBool ## SD::tname(not_in_bool) | SD::ttype(not_in_bool) | SD::tdeftype(not_in_bool) | SD::vname(not_in_bool) | SD::fname(not_in_bool) | SD::ftype(not_in_bool) | SD::vtype(_, _, not_in_bool) | SD::tdefkwd(not_in_bool) | SD::tattrs(_, _, not_in_bool) | SD::vattrs(_, _, not_in_bool) | SD::fattrs(_, _, not_in_bool) | SD::tdefgens(_, not_in_bool) | SD::tgnames(_, not_in_bool) | SD::twheres(_, not_in_bool) | SD::vpat(_, _, not_in_bool) | SD::fpatname(not_in_bool) | SD::tdefvariants(_, _, not_in_bool) | SD::fdefine(_, _, not_in_bool) | SD::vdefbody(_, _, _, not_in_bool) | SD::paste(_, not_in_bool) | SD::ChangeCase(_, _, not_in_bool) | SD::when(_, not_in_bool) | SD::define(_, not_in_bool) | SD::defcond(_, not_in_bool) | SD::For(_, not_in_bool) | SD::If(_, not_in_bool) | SD::select1(_, not_in_bool) | SD::ignore(_, not_in_bool) | SD::error(_, not_in_bool) | SD::dbg_all_keywords(not_in_bool) | SD::Crate(_, not_in_bool) => void::unreachable(*not_in_bool), }; Ok(r) } } impl DefinitionName { fn lookup_eval_bool(&self, ctx: &Context<'_>) -> syn::Result { let (def, ctx) = ctx.find_definition::(self)?.ok_or_else(|| { let mut error = self.error(format!( "user-defined condition `{}` not found", self, )); if let Some(def) = ctx.definitions.find_raw::(self) { // Condition syntax looks like fine tokens, // so the ${define } wouldn't spot this mistake. error.combine( def.name.error( "this user-defined expansion used as a condition (perhaps you meant ${defcond ?}" ) ); } error })?; def.body.eval_bool(&ctx) } } derive-deftly-macros-1.0.0/build.rs000064400000000000000000000015311046102023000153140ustar 00000000000000// build.rs for derive-deftly-macros // We recompile some of our macro code in tests, with cargo // features that aren't declared in the package's Cargo.toml. // And, we have a nonstandard RUSTFLAGS --cfg=derive_deftly_dprint // for debugging output. // // Here, we tell rustc, via cargo, that these are all allowed. // // Another effect is that this build.rs file // causes OUT_DIR to be available in `check.rs`. // This is a subset of tests/build.rs, written to be as minimal // (and fast to compile) as possible. fn main() { // We must use deprecated single colon `cargo:rustc...` syntax, // because otherwise cargo thinks we're violating our MSRV. // https://github.com/rust-lang/cargo/issues/14147 println!( r#"cargo:rustc-check-cfg=cfg(derive_deftly_dprint) cargo:rustc-check-cfg=cfg(feature, values("bizarre"))"# ); } derive-deftly-macros-1.0.0/check.rs000064400000000000000000000140221046102023000152710ustar 00000000000000//! Implementation of the `expect` option use crate::prelude::*; /// Value for an `expect` #[derive(Debug, Clone, Copy, Eq, PartialEq, EnumString, Display)] #[allow(non_camel_case_types)] pub enum Target { items, expr, } /// Local context for a syntax check operation struct Checking<'t> { ctx: &'t framework::Context<'t>, output: &'t mut TokenStream, target: DdOptVal, } /// Main entrypoint /// /// Checks that `output` can be parsed as `target`. /// /// If not, replaces `output` with something which will generate /// compiler error(s) which the user will find helpful: /// * A `compile_error!` invocation with the original error span /// * include_file!` for a generated temporary file /// containing the text of the output, /// so that the compiler will point to the actual error. pub fn check_expected_target_syntax( ctx: &framework::Context, output: &mut TokenStream, target: DdOptVal, ) { check::Checking { ctx, output, target, } .check(); } pub fn check_expect_opcontext( op: &DdOptVal, context: OpContext, ) -> syn::Result<()> { use OpContext as OC; match (context, op.value) { (OC::TemplateDefinition, Target::items) => Ok(()), (OC::TemplateDefinition, _) => { Err(op.span.error( "predefined templates must always expand to items", // )) } _ => Ok(()), } } impl Target { /// Checks if `ts` can parse as `self`, returning the error if not fn perform_check(self, ts: TokenStream) -> Option { fn chk(ts: TokenStream) -> Option { syn::parse2::>(ts).err() } use Target::*; match self { items => chk::>>(ts), expr => chk::(ts), } } /// Tokens for `include!...` to include syntax element(s) like `self` fn include_syntax(self, file: &str) -> TokenStream { use Target::*; match self { items => quote! { include!{ #file } }, expr => quote! { include!( #file ) }, } } /// Make a single output, syntactically a `self.target`, out of pieces /// /// `err` is a `compile_error!` call, /// and `expansion` is typically the template expansion output. fn combine_outputs( self, mut err: TokenStream, expansion: TokenStream, ) -> TokenStream { use Target::*; match self { items => { err.extend(expansion); err } expr => quote!( ( #err, #expansion ) ), } } } impl Checking<'_> { /// Checks that `tokens` can be parsed as `T` /// /// Does the actual work of [`check_expected_target_syntax`] fn check(self) { let err = self.target.value.perform_check(self.output.clone()); let err = match err { Some(err) => err, None => return, }; let broken = mem::take(self.output); let err = err.into_compile_error(); let expansion = expand_via_file(self.ctx, self.target.value, broken) .map_err(|e| { Span::call_site() .error(format!( "derive-deftly was unable to write out the expansion to a file for fuller syntax error reporting: {}", e )) .into_compile_error() }) .unwrap_or_else(|e| e); *self.output = self.target.value.combine_outputs(err, expansion); } } /// Constructs an `include!` which includes the text for `broken` /// /// Appends the `include` to `checking.output`. /// /// If this can't be done, reports why not. fn expand_via_file( ctx: &framework::Context, target: Target, broken: TokenStream, ) -> Result { use sha3::{Digest as _, Sha3_256}; use std::{fs, io, io::Write as _, path::PathBuf}; let text = format!( "// {}, should have been {}:\n{}\n", ctx.expansion_description(), target, broken, ); let hash: String = { let mut hasher = Sha3_256::new(); hasher.update(&text); let hash = hasher.finalize(); const HASH_LEN_BYTES: usize = 12; hash[0..HASH_LEN_BYTES].iter().fold( String::with_capacity(HASH_LEN_BYTES * 2), |mut s, b| { write!(s, "{:02x}", b).expect("write to String failed"); s }, ) }; let dir: PathBuf = [env!("OUT_DIR"), "derive-deftly~expansions~"] .iter() .collect(); match fs::create_dir(&dir) { Ok(()) => {} Err(e) if e.kind() == io::ErrorKind::AlreadyExists => {} Err(e) => return Err(format!("create dir {:?}: {}", &dir, e)), }; let leaf = format!("dd-{}.rs", hash); let some_file = |leaf: &str| { let mut file = dir.clone(); file.push(leaf); file }; let file = some_file(&leaf); let file = file .to_str() .ok_or_else(|| format!("non UTF-8 path? from env var! {:?}", file))?; // We *overwrite* the file in place. // // This is because it's theoretically possible that multiple calls // to this function, at the same time, might be generating files // with identical contents, and therefore the same name. // // So we open it with O_CREATE|O_WRITE but *not* O_TRUNC, // and write our data, and then declare our job done. // This is idempotent and concurrency-safe. // // There is no need to truncate the file, since all writers // are writing the same text. (If we change the hashing scheme, // we must change the filename too.) let mut fh = fs::OpenOptions::new() .write(true) .create(true) .truncate(false) .open(file) .map_err(|e| format!("create/open {:?}: {}", &file, e))?; fh.write_all(text.as_ref()) .map_err(|e| format!("write {:?}: {}", &file, e))?; Ok(target.include_syntax(file)) } derive-deftly-macros-1.0.0/compat_syn_1.rs000064400000000000000000000020731046102023000166130ustar 00000000000000//! Definitions for compatibility with syn 1 use super::prelude::*; /// syn 2 has this distinct type for start/end delimiters for a group pub use proc_macro2::Span as DelimSpan; //---------- Spanned ---------- pub use syn::spanned::Spanned; //---------- Attribute methods ---------- pub trait AttributeExt1 { /// syn 2 has this as an inherent method fn path(&self) -> &syn::Path; } impl AttributeExt1 for syn::Attribute { fn path(&self) -> &syn::Path { &self.path } } impl AttributeExt12 for syn::Attribute { fn call_in_parens(&self, f: F) -> syn::Result where F: FnOnce(ParseStream<'_>) -> syn::Result, { (|outer: ParseStream<'_>| { let inner; let _paren = parenthesized!(inner in outer); f(&inner) }) .parse2(self.tokens.clone()) } } //---------- VisPublic ---------- pub trait VisPublicExt { fn pub_token(self) -> syn::token::Pub; } impl VisPublicExt for syn::VisPublic { fn pub_token(self) -> syn::token::Pub { self.pub_token } } derive-deftly-macros-1.0.0/compat_syn_2.rs000064400000000000000000000026631046102023000166210ustar 00000000000000//! Definitions for compatibility with syn 2 //! //! This, along with `compat_syn_1.rs` and //! [`compat_syn_common`](super::compat_syn_common) //! exists to minimise the delta in the commit which switches to syn 2. //! //! This approach would also allow us to support *both* syn 1 and syn 2, //! and correspondingly reduce our MSRV back to 1.54, //! eg via cargo features, //! if that turns out to be desirable. // TODO we're committed to syn 2 now, we could maybe remove some of this use super::prelude::*; pub use proc_macro2::extra::DelimSpan; //---------- Spanned ---------- /// Local version of `Spanned` /// /// Works around `Spanned` being sealed in syn 2. /// . /// (Not needed with syn 1, but would be harmless there.) pub trait Spanned { fn span(&self) -> Span; } impl Spanned for T { fn span(&self) -> Span { syn::spanned::Spanned::span(self) } } //---------- Attribute methods ---------- impl AttributeExt12 for syn::Attribute { fn call_in_parens(&self, f: F) -> syn::Result where F: FnOnce(ParseStream<'_>) -> syn::Result, { let list = self.meta.require_list()?; let _paren: syn::token::Paren = match list.delimiter { syn::MacroDelimiter::Paren(p) => p, _ => return Err(list.error("expected parenthesised attributes")), }; f.parse2(list.tokens.clone()) } } derive-deftly-macros-1.0.0/compat_syn_common.rs000064400000000000000000000012471046102023000177450ustar 00000000000000//! Definitions for compatibility with syn, common to 1 and 2 use super::prelude::*; //---------- Attribute handling ---------- /// Helper trait to deal with syn's idea of attribute contents /// /// We expect all our attributes to be parenthesised. pub trait AttributeExt12 { /// Parse the content within an `#[attribute(content)]` /// /// Parses the `content` as `T`. fn parse_in_parens(&self) -> syn::Result { self.call_in_parens(Parse::parse) } /// Like `parse_in_parens`, but takes a parser callback. fn call_in_parens(&self, f: F) -> syn::Result where F: FnOnce(ParseStream<'_>) -> syn::Result; } derive-deftly-macros-1.0.0/dbg_allkw.rs000064400000000000000000000156441046102023000161550ustar 00000000000000use super::framework::*; use std::fmt::Error as E; use std::fmt::Result as R; use std::fmt::Write; struct Out<'c> { out: String, subset_only: Option<&'c WithinVariant<'c>>, } impl Write for Out<'_> { fn write_str(&mut self, s: &str) -> fmt::Result { self.out.write_str(s) } } pub fn dump(ctx: &Context) { let w = (|| { let out = String::new(); let subset_only = match ctx.within_loop { WithinLoop::None => None, WithinLoop::When | WithinLoop::Body => { Some(ctx.variant.expect("within loop, but not variant!")) } }; let mut w = Out { out, subset_only }; let description = format!("derive-deftly expansions dump {}", ctx.display_for_dbg()); writeln!(w, "---------- {} (start) ----------", description)?; dump_whole(&mut w, ctx)?; writeln!(w, "---------- {} (end) ----------", description)?; Ok::<_, E>(w.out) })() .expect("write to String failed"); eprint!("{}", w); } fn template_result(ctx: &Context, templ: TokenStream) -> String { let parser = |input: &ParseBuffer<'_>| Template::parse(input); let templ: Template = parser.parse2(templ).expect("failed to parse own template"); let result = (|| { let mut output = TokenAccumulator::new(); templ.expand(ctx, &mut output); output.tokens() })(); match result { Ok(result) => result.to_string(), Err(e) => format!("", e), } } fn dump_any_one( w: &mut Out, ctx: &Context, show_templ: TokenStream, show_op: &str, make_real_templ: &dyn Fn(TokenStream) -> TokenStream, ) -> R { let show_templ_string = { let mut s = show_templ.to_string(); if let Some(inner) = { s.strip_prefix("$ {") .or_else(|| s.strip_prefix("${")) .and_then(|s| s.strip_suffix('}')) } { s = format!("${{{}}}", inner.trim()); } s }; let lh = format!("{:12} {}", show_templ_string, show_op); let templ = make_real_templ(show_templ); writeln!(w, " {:16} {}", lh, template_result(ctx, templ))?; Ok(()) } fn dump_expand_one(w: &mut Out, ctx: &Context, templ: TokenStream) -> R { dump_any_one(w, ctx, templ, "=>", &|t| t) } fn dump_bool_one(w: &mut Out, ctx: &Context, templ: TokenStream) -> R { let make_real = |templ| quote! { ${if #templ { true } else { false }} }; dump_any_one(w, ctx, templ, "=", &make_real) } macro_rules! expand { { $w_ctx:expr, $($t:tt)* } => { dump_expand_one($w_ctx.0, $w_ctx.1, quote!{ $($t)* })?; } } macro_rules! bool { { $w_ctx:expr, $($t:tt)* } => { dump_bool_one($w_ctx.0, $w_ctx.1, quote!{ $($t)* })?; } } fn dump_whole(mut w: &mut Out, ctx: &Context) -> R { writeln!(w, "top-level:")?; let c = (&mut w, ctx); expand! { c, $tname } expand! { c, $ttype } expand! { c, $tvis } expand! { c, $tgens } expand! { c, $tgnames } expand! { c, $twheres } expand! { c, $tdeftype } expand! { c, $tdefgens } expand! { c, $tdefkwd } expand! { c, ${tdefvariants VARIANTS..} } bool! { c, is_struct } bool! { c, is_enum } bool! { c, is_union } bool! { c, tvis } bool! { c, tgens } expand! { c, $tattrs } // Don't debug dump these. But list them here, so that // check-keywords-documented is happy. (That is nicer than // using the check-keywords-documented exception table.) if false { // Perhaps we should search attributes and dump what would work expand! { c, $tmeta } expand! { c, $vmeta } expand! { c, $fmeta } bool! { c, tmeta } bool! { c, vmeta } bool! { c, fmeta } // Too complex to demonstrate expand! { c, $paste } // Too subtle to demonstrate expand! { c, $crate } // Recursive, would be silly expand! { c, $dbg_all_keywords } // Would throw an error if we expanded it expand! { c, $error } // Control flow, can't sensibly be dumped expand! { c, $when } expand! { c, $if } expand! { c, $select1 } expand! { c, $define } expand! { c, $defcond } expand! { c, $ignore } expand! { c, $dbg } bool! { c, not } bool! { c, all } bool! { c, any } // Requires input arguments to do anything bool! { c, dbg } bool! { c, is_empty } bool! { c, approx_equal } // Vacuous bool! { c, true } bool! { c, false } } if let Some(wv) = w.subset_only { dump_variant(w, ctx, wv)?; dump_user_defined(w, ctx)?; } else { WithinVariant::for_each(ctx, |ctx, wv| dump_variant(w, ctx, wv))?; } Ok(()) } fn variant_heading(w: &mut Out, wv: &WithinVariant) -> R { match wv.variant { None => write!(w, "value")?, Some(v) => write!(w, "variant {}", v.ident)?, }; Ok(()) } fn dump_variant(mut w: &mut Out, ctx: &Context, wv: &WithinVariant) -> R { variant_heading(w, wv)?; writeln!(w, ":")?; let c = (&mut w, ctx); expand! { c, $vname } expand! { c, $vtype } expand! { c, $vpat } expand! { c, ${vdefbody VNAME FIELDS..} } bool! { c, v_is_unit } bool! { c, v_is_tuple } bool! { c, v_is_named } expand! { c, $vattrs } if let Some(_) = w.subset_only { dump_field(w, ctx, ctx.field)?; } else { WithinField::for_each(ctx, |ctx, wf| dump_field(w, ctx, Some(wf)))?; } Ok(()) } fn dump_field(mut w: &mut Out, ctx: &Context, wf: Option<&WithinField>) -> R { variant_heading(w, ctx.variant.expect("heading but not variant!"))?; if let Some(wf) = wf { let fname = wf.fname(Span::call_site()).to_token_stream(); writeln!(w, ", field {}:", fname)?; } else { writeln!(w, ", no field:")?; } let c = (&mut w, ctx); expand! { c, $fname } expand! { c, $ftype } expand! { c, $fvis } expand! { c, $fdefvis } expand! { c, $fpatname } expand! { c, $fdefine } bool! { c, fvis } bool! { c, fdefvis } expand! { c, $fattrs } Ok(()) } fn dump_user_defined(mut w: &mut Out, ctx: &Context) -> R { // evade macro hygiene let mut c; let mut name; macro_rules! print_definitions { { $heading:expr, $B:ty, $body:stmt } => { { let mut set = BTreeSet::new(); for def in ctx.definitions.iter::<$B>().flatten() { set.insert(&def.name); } if !set.is_empty() { writeln!(w, "{}", $heading)?; c = (&mut w, ctx); for n in set { name = n; $body } } } } } print_definitions! { "user-defined expansions:", DefinitionBody, expand! { c, $#name } } print_definitions! { "user-defined conditions:", DefCondBody, bool! { c, #name } } Ok(()) } derive-deftly-macros-1.0.0/define.rs000064400000000000000000000210541046102023000154510ustar 00000000000000//! Macro impl for defining a template `define_derive_deftly!` use super::framework::*; #[derive(Debug, Clone)] struct TemplateDefinition { doc_attrs: Vec, export: Option, templ_name: TemplateName, options: UnprocessedOptions, template: TokenStream, } impl Parse for TemplateDefinition { fn parse(input: ParseStream) -> syn::Result { // This rejects Rust keywords, which is good because // for example `#[derive_deftly(pub)]` ought not to mean to apply // a template called `pub`. See ticket #1. let doc_attrs = input.call(syn::Attribute::parse_outer)?; for attr in &doc_attrs { if !attr.path().is_ident("doc") { return Err(attr .path() .error("only doc attributes are supported")); } } let export = MacroExport::parse_option(input)?; let templ_name = input.parse()?; let options = UnprocessedOptions::parse(&input, OpContext::TemplateDefinition)?; let la = input.lookahead1(); if la.peek(Token![=]) { let equals: Token![=] = input.parse()?; return Err(equals.error( "You must now write `define_derive_deftly! { Template: ... }`, not `Template =`, since derive-deftly version 0.14.0" )); } else if la.peek(Token![:]) { let _colon: Token![:] = input.parse()?; } else { return Err(la.error()); }; let template = input.parse()?; Ok(TemplateDefinition { doc_attrs, export, templ_name, options, template, }) } } /// Replaces every `$` with `$orig_dollar` /// /// Eg, where the template says `$fname`, we emit `$orig_dollar fname`. /// When this is found in the macro_rules expander part /// of a precanned template, /// macro_rules doesn't expand /// it because `orig_dollar` isn't one of the arguments to the macro. /// /// Then, we spot these when parsing the template, and disregard them. /// That is done by /// [`syntax::deescape_orig_dollar`](super::syntax::deescape_orig_dollar). /// /// See `doc/implementation.md` for why this is needed. /// /// This has the weird result that there's a sometimes /// (namely, when using an adhoc, rather than precanned template) /// an undocumented `orig_dollar` expansion keyword, /// with strange behaviour. /// No-one is likely to notice this. /// /// Additionally, if we're turning `$crate` into `$orig_dollar crate`, /// we change the keyword `crate` to `_dd_intern_crate` /// (and `${crate}` likewise), with the span of the original. /// This is necessary to avoid clippy seeing the bare `crate` /// and thinking the user should have written `$crate` /// (whereas, in fact, they did), /// and emitting a spurious lint `crate_in_macro_def`. /// `$_dd_intern_crate` is an internal alias for d-d's `$crate`. /// /// ### Alternative tactics we rejected: /// /// * Pass a literal dollar sign `$` into the template pattern macro, /// capture it with a macro rules parameter `$dollar:tt`, /// and write `$dollar` in the template. /// This gets the span wrong: the span is that of /// the literal dollar, which came from the call site, not the template. /// /// * Use a different syntax in precanned templates: /// have `escape_dollars` convert to that syntax, /// and the template parsing notice this case and /// de-escape the whole template again at the start. /// This involves processing the whole template twice for no reason. /// (And it would involve inventing an additional, different, /// and probably weird, syntax.) /// /// * As above but do the de-escaping on the fly. /// Currently, though, the information about the template context /// is not available to the parser. /// We'd have to pass it in as a thread local, /// or as an extra generic on `SubstContext` /// (producing two monomorphised copies of the whole template engine). pub fn escape_dollars(input: TokenStream) -> TokenStream { enum St { Dollar, DollarBrace, Other, } impl St { fn exp_kw(&self) -> bool { match self { St::Dollar | St::DollarBrace => true, St::Other => false, } } } fn handle_tt(itt: TokenTree, st: St, out: &mut TokenStream) -> St { let ott = match itt { TT::Group(g) => { let delim = g.delimiter(); let span = g.span_open(); let stream = g.stream(); let st = match (st, delim) { (St::Dollar, Delimiter::Brace) => St::DollarBrace, _ => St::Other, }; let stream = handle_ts(stream, st); let mut g = proc_macro2::Group::new(delim, stream); g.set_span(span); TT::Group(g) } TT::Punct(p) if p.as_char() == '$' => { out.extend(quote_spanned! {p.span()=> #p orig_dollar }); return St::Dollar; } TT::Ident(i) if st.exp_kw() && i == "crate" => { out.extend(quote_spanned! {i.span()=> _dd_intern_crate }); return St::Other; } other => other, }; out.extend([ott]); St::Other } fn handle_ts(input: TokenStream, mut st: St) -> TokenStream { let mut out = TokenStream::new(); for itt in input { st = handle_tt(itt, st, &mut out); } out } handle_ts(input, St::Other) } /// This is `define_derive_deftly!` pub fn define_derive_deftly_func_macro( input: TokenStream, ) -> Result { dprint_block!(&input, "define_derive_deftly! input"); let TemplateDefinition { doc_attrs, export, templ_name, options, template, } = syn::parse2(input)?; let mut output = TokenStream::new(); let (template, parsed_template) = { let mut template = template; let parsed = Parser::parse2( { let ue = options.beta_enabled; move |input: ParseStream| TopTemplate::parse(input, ue) }, template.clone(), ) .map_err(|e| { // Make sure the error is emitted e.into_compile_error().to_tokens(&mut output); // But from now on, let's just use an empty template template = TokenStream::new(); // parsed_template becomes Err(()) () }); (template, parsed) }; let _: Result = parsed_template; let template = escape_dollars(template); let templ_mac_name = templ_name.macro_name(); let doc_addendum = (!doc_attrs.is_empty()).then(|| { let addendum = format!( r#" This is a `derive_deftly` template. Do not invoke it directly. To use it, write: `#[derive(Deftly)] #[derive_deftly({})]`."#, templ_name ); quote!( #[doc = #addendum] ) }); let engine_macro; let export_attr; match export { None => { export_attr = quote! {}; engine_macro = engine_macro_name()?; } Some(pub_token) => { let span = pub_token.span(); export_attr = quote_spanned!(span=> #[macro_export]); engine_macro = quote_spanned!(span=> $crate::derive_deftly::derive_deftly_engine); } } // the macro must recent a dollar as its first argument because // it is hard to find a dollar otherwise! output.extend(quote! { #( #doc_attrs )* #doc_addendum #export_attr macro_rules! #templ_mac_name { { { $($driver:tt)* } [ $($aoptions:tt)* ] ( $($future:tt)* ) $($tpassthrough:tt)* } => { #engine_macro! { { $( $driver )* } [ $($aoptions)* ] () { # template } ( $crate; [#options] #templ_name; ) $($tpassthrough)* } }; { $($wrong:tt)* } => { compile_error!{concat!( "wrong input to derive-deftly template macro ", stringify!(#templ_mac_name), "; might be due to incompatible derive-deftly versions(s)", )} }; } }); dprint_block!(&output, "define_derive_deftly! output {}", templ_mac_name); Ok(output) } derive-deftly-macros-1.0.0/derive.rs000064400000000000000000000231531046102023000154770ustar 00000000000000//! Macro impl for capturing the driver `#[derive(Deftly)]` use super::prelude::*; /// Contents of an entry in a `#[derive_deftly(..)]` attribute enum InvocationEntry { Precanned(syn::Path, UnprocessedOptions), } // (CannedName, CannedName, ...) struct InvocationAttr { entries: Punctuated, } /// Contents of an entry in a `#[derive_deftly_adhoc(..)]` attribute #[derive(Default)] struct AdhocAttr { pub_: Option, } impl Parse for InvocationEntry { fn parse(input: ParseStream) -> syn::Result { let entry = if input.lookahead1().peek(Token![pub]) { return Err(input.error("`pub` must be in #[derive_deftly_adhoc]")); } else { let path = syn::Path::parse_mod_style(input)?; let options = if input.peek(syn::token::Bracket) { let tokens; let _bracket = bracketed!(tokens in input); UnprocessedOptions::parse( &tokens, OpContext::DriverApplicationCapture, )? } else { UnprocessedOptions::default() }; InvocationEntry::Precanned(path, options) }; Ok(entry) } } impl Parse for InvocationAttr { fn parse(input: ParseStream) -> syn::Result { let entries = Punctuated::parse_terminated(input)?; Ok(InvocationAttr { entries }) } } fn check_for_misplaced_atrs(data: &syn::Data) -> syn::Result<()> { let attrs = |attrs: &[syn::Attribute]| { for attr in attrs { if let Some(_) = ["derive_deftly", "derive_deftly_adhoc"] .iter() .find(|forbidden| attr.path().is_ident(forbidden)) { return Err(attr.error( "attribute is only meaningful at the data structure toplevel" )); } } Ok(()) }; let fields = |fs: &Punctuated| { for f in fs.iter() { attrs(&f.attrs)?; } Ok(()) }; let variantish = |fs: &syn::Fields| match fs { syn::Fields::Unit => Ok(()), syn::Fields::Named(n) => fields(&n.named), syn::Fields::Unnamed(u) => fields(&u.unnamed), }; let variants = |vs: &Punctuated| { for v in vs.iter() { attrs(&v.attrs)?; variantish(&v.fields)?; } Ok(()) }; match data { syn::Data::Struct(s) => variantish(&s.fields), syn::Data::Union(u) => fields(&u.fields.named), syn::Data::Enum(e) => variants(&e.variants), } } /// Returns the template macro name, for a given template name (as a path) fn templ_mac_name(mut templ_path: syn::Path) -> syn::Result { if templ_path.segments.is_empty() { return Err(templ_path .leading_colon .as_ref() .expect("path with no tokens!") .error("cannot derive_deftly the empty path!")); } let last = templ_path.segments.last_mut().expect("became empty!"); let name = TemplateName::try_from(last.ident.clone())?; last.ident = name.macro_name(); Ok(templ_path) } /// This is #[derive(Deftly)] pub fn derive_deftly( driver_stream: TokenStream, ) -> Result { use engine::ChainNext; let driver: syn::DeriveInput = syn::parse2(driver_stream.clone())?; dprint_block!(&driver_stream, "#[derive(Deftly)] input"); let driver_mac_name = format_ident!("derive_deftly_driver_{}", &driver.ident); let precanned_paths: Vec<(syn::Path, UnprocessedOptions)> = driver .attrs .iter() .map(|attr| { if !attr.path().is_ident("derive_deftly") { return Ok(None); } let InvocationAttr { entries } = attr.parse_in_parens()?; Ok(Some(entries)) }) .flatten_ok() .flatten_ok() .filter_map(|entry| match entry { Err(e) => Some(Err(e)), Ok(InvocationEntry::Precanned(path, options)) => { Some(Ok((path, options))) } }) .collect::>>()?; let adhoc: Option = driver .attrs .iter() .filter(|attr| attr.path().is_ident("derive_deftly_adhoc")) .inspect(|_: &&syn::Attribute| ()) .map(|attr| { let adhoc = match &attr.meta { syn::Meta::Path(_) => AdhocAttr { pub_: None }, syn::Meta::NameValue(nv) => { return Err(nv .eq_token .error("arguments (if any) must be in parens")) } syn::Meta::List(syn::MetaList { path: _, delimiter, tokens, }) => { match delimiter { syn::MacroDelimiter::Paren(_) => Ok(()), syn::MacroDelimiter::Brace(t) => Err(t.span), syn::MacroDelimiter::Bracket(t) => Err(t.span), } .map_err(|span| span.error("expected parentheses"))?; let pub_ = Parser::parse2( MacroExport::parse_option, tokens.clone(), )?; AdhocAttr { pub_ } } }; Ok::(adhoc) }) .inspect(|_: &Result| ()) // allow this attr to be repeated; any pub makes it pub .reduce(|a, b| { let pub_ = chain!(a?.pub_, b?.pub_).next(); Ok(AdhocAttr { pub_ }) }) .transpose()?; check_for_misplaced_atrs(&driver.data)?; let engine_macro = engine_macro_name()?; // If the driver contains any $ tokens, we must do something about them. // Otherwise, they might get mangled by the macro_rules expander. // In particular, the following cause trouble: // `$template`, `$passthrough` - taken as references to the // macro arguments. // `$$` - taken as a reference to the nightly `$$` macro rules feature // (which we would love to use here, but can't yet) // // `$orig_dollar` is a literal dollar which comes from the driver // invocation in invocation.rs. This technique doesn't get the span // right. But getting the span right here is hard without having // a whole new quoting scheme - see the discussion in the doc comment // for `escape_dollars`. // // We can't use the technique we use for the template, because that // technique relies on the fact that it's *us* that parses the template. // But the driver is parsed for us by `syn`. // // Actual `$` in drivers will be very rare. They could only appear in // attributes or the like. So, unlike with templates (which are // full of important `$`s) we can probably live with the wrong spans. let driver_escaped = escape_dollars(driver_stream); let mut output = TokenStream::new(); let mut accum_start = TokenStream::new(); if let Some(adhoc) = adhoc { accum_start.extend(quote!( _meta_used * )); let macro_export = adhoc .pub_ .map(|export| { let macro_export = quote_spanned!(export.span()=> #[macro_export]); Ok::<_, syn::Error>(macro_export) }) .transpose()?; output.extend(quote! { #[allow(unused_macros)] #macro_export macro_rules! #driver_mac_name { { { $($template:tt)* } { ($orig_dollar:tt) $(future:tt)* } $($dpassthrough:tt)* } => { #engine_macro!{ { #driver_escaped } ( ) { $($template)* } $($dpassthrough)* } }; { $($wrong:tt)* } => { compile_error!{concat!( "wrong input to derive-deftly driver inner macro ", stringify!(#driver_mac_name), "; might be due to incompatible derive-deftly versions(s)", )} }; } }); } let (chain_next, chain_rest); { let mut errs = ErrorAccumulator::default(); let mut chain = chain!( precanned_paths .into_iter() .map(|(templ_path, aoptions)| { let call = templ_mac_name(templ_path)?.to_token_stream(); let ao_versions = OpCompatVersions::ours(); let after_driver = quote!( [ #ao_versions #aoptions ] () ); Ok(ChainNext { call, after_driver }) }) .filter_map(|r| errs.handle(r)), [ChainNext { call: engine_macro.clone(), after_driver: quote!( . () ), }], ); chain_next = chain.next().expect("should have been nonempty!"); chain_rest = { let mut rest = TokenStream::new(); for c in chain { c.to_tokens(&mut rest); } rest }; errs.finish()?; } let ChainNext { call, after_driver } = chain_next; output.extend(quote! { #call !{ { #driver } #after_driver [ #chain_rest ] [ #accum_start ] } }); dprint_block!(&output, "#[derive(Deftly)] output for {}", &driver.ident); Ok(output) } derive-deftly-macros-1.0.0/engine.rs000064400000000000000000000313351046102023000154670ustar 00000000000000//! `derive_deftly_engine!()` use super::framework::*; use adviseable::*; /// Input to `derive_deftly_engine!`, principal form (template expansion) /// /// See `implementation.md`, /// especially /// "Overall input syntax for `derive_deftly_engine!` and templates". #[derive(Debug)] struct EngineExpandInput { driver: syn::DeriveInput, options: DdOptions, template: TopTemplate, template_crate: syn::Path, template_name: Option, chain_next: Option, chain_after: TokenStream, accum: TokenStream, } #[derive(Debug)] pub struct ChainNext { pub call: TokenStream, pub after_driver: TokenStream, } enum EngineContext { Expand { opcontext_template: OpContext, options: DdOptions, }, Final {}, } #[derive(Debug)] enum EngineInput { Expand(EngineExpandInput), Final(accum::EngineFinalInput), } impl Parse for ChainNext { fn parse(input: ParseStream) -> syn::Result { let call = input.parse::()?.to_token_stream(); let after_driver; let _ = parenthesized!(after_driver in input); let after_driver = after_driver.parse()?; Ok(ChainNext { call, after_driver }) } } impl ToTokens for ChainNext { fn to_tokens(&self, out: &mut TokenStream) { let ChainNext { call, after_driver } = self; quote!( #call (#after_driver) ).to_tokens(out); } } impl ParseAdviseable for EngineInput { fn parse_adviseable(input: ParseStream) -> AdviseableResult { let driver; let _ = braced!(driver in input); let driver = driver.parse()?; let engine_context; if input.peek(syn::token::Bracket) { // AOPTIONS appears iff we're being invoked for a precanned // template, rather than an adhoc one; it's from the // `#[derive()` application. let tokens; let mut options = DdOptions::default(); let _ = bracketed!(tokens in input); parse_unadvised! { tokens => || { let oc = OpContext::DriverApplicationPassed; options .parse_update(&tokens, oc) } } engine_context = EngineContext::Expand { opcontext_template: OpContext::TemplateDefinition, options, }; } else if input.peek(Token![.]) { let _indicator: Token![.] = input.parse()?; engine_context = EngineContext::Final {}; } else { engine_context = EngineContext::Expand { opcontext_template: OpContext::TemplateAdhoc, options: DdOptions::default(), }; } let future_ignored; let _ = parenthesized!(future_ignored in input); let _: TokenStream = future_ignored.parse()?; let r = match engine_context { EngineContext::Expand { opcontext_template, options, } => EngineExpandInput::parse_adviseable_remainder( driver, options, input, opcontext_template, )? .map(EngineInput::Expand), EngineContext::Final {} => { accum::EngineFinalInput::parse_adviseable_remainder( driver, input, )? .map(EngineInput::Final) } }; Ok(r) } } impl EngineExpandInput { fn parse_adviseable_remainder( driver: syn::DeriveInput, mut options: DdOptions, input: ParseStream, opcontext_template: OpContext, ) -> AdviseableResult { let template; let _ = braced!(template in input); let template_crate; let template_name; { let through_driver; let _ = parenthesized!(through_driver in input); let input = through_driver; template_crate = input.parse()?; let _: Token![;] = input.parse()?; let tokens; let _ = bracketed!(tokens in input); parse_unadvised! { tokens => || { options.parse_update(&tokens, opcontext_template) } } template_name = if input.peek(Token![;]) { None } else { Some(input.parse()?) }; let _: Token![;] = input.parse()?; let _: TokenStream = input.parse()?; } let (chain_next, chain_after); { let chain; let _ = bracketed!(chain in input); let input = chain; chain_next = if !input.is_empty() { Some(input.parse()?) } else { None }; chain_after = input.parse()?; } let accum; let _ = bracketed!(accum in input); let accum = accum.parse()?; let _: TokenStream = input.parse()?; let template = parse_unadvised! { template => || TopTemplate::parse( template, options.beta_enabled, ) }; Ok(AOk(EngineExpandInput { driver, options, template, template_crate, template_name, chain_next, chain_after, accum, })) } } impl<'c> Context<'c> { /// Calls `f` with a top-level [`Context`] for a [`syn::DeriveInput`] /// /// `Context` has multiple levels of references to values created /// here, so we can't easily provide `Context::new()`. pub fn call( driver: &syn::DeriveInput, template_crate: &syn::Path, template_name: Option<&syn::Path>, f: impl FnOnce(Context) -> syn::Result, ) -> Result { let tmetas = preprocess_attrs(&driver.attrs)?; let pvariants_one = |fields| { let pmetas = &tmetas; let pfields = preprocess_fields(fields)?; let pvariant = PreprocessedVariant { fields, pmetas, pfields, }; syn::Result::Ok((Some(()), vec![pvariant])) }; let union_fields; let variants_pmetas: Vec<_>; let (variant, pvariants) = match &driver.data { syn::Data::Struct(ds) => pvariants_one(&ds.fields)?, syn::Data::Union(du) => { union_fields = syn::Fields::Named(du.fields.clone()); pvariants_one(&union_fields)? } syn::Data::Enum(de) => (None, { variants_pmetas = de .variants .iter() .map(|variant| preprocess_attrs(&variant.attrs)) .try_collect()?; izip!(&de.variants, &variants_pmetas) .map(|(variant, pmetas)| { let fields = &variant.fields; let pfields = preprocess_fields(&variant.fields)?; Ok(PreprocessedVariant { fields, pmetas, pfields, }) }) .collect::, syn::Error>>()? }), }; // `variant` is None in enums; otherwise it's Some(()) // and here we convert it to the real WithinVariant for the fields. let variant = variant.map(|()| WithinVariant { variant: None, // not actually a variant fields: pvariants[0].fields, pmetas: &pvariants[0].pmetas, pfields: &pvariants[0].pfields, }); let ctx = Context { top: &driver, template_crate, template_name, pmetas: &tmetas, field: None, variant: variant.as_ref(), pvariants: &pvariants, definitions: Default::default(), nesting_depth: 0, nesting_parent: None, within_loop: WithinLoop::None, }; f(ctx) } } impl EngineExpandInput { fn process(self) -> syn::Result { dprintln!("derive_deftly_engine! crate = {:?}", &self.template_crate); let DdOptions { dbg, driver_kind, expect_target, beta_enabled, // } = self.options; // This was used when parsing EngineExpandInput.template let _: Option<_> = beta_enabled; if let Some(exp) = driver_kind { macro_rules! got_kind { { $($kind:ident)* } => { match &self.driver.data { $( syn::Data::$kind(..) => ExpectedDriverKind::$kind, )* } } } let got_kind = got_kind!(Struct Enum Union); if got_kind != exp.value { return Err([ (exp.span, "expected kind"), (self.driver.span(), "actual kind"), ] .error(format_args!( "template defined for {}, but applied to {}", exp.value, got_kind, ))); } } let outcome = Context::call( &self.driver, &self.template_crate, self.template_name.as_ref(), |ctx| { let mut output = TokenAccumulator::new(); self.template.expand(&ctx, &mut output); let output = output.tokens()?; // dbg!(&&output); if dbg { let description = ctx.expansion_description(); let dump = format!( concat!( "---------- {} (start) ----------\n", "{}\n", "---------- {} (end) ----------\n", ), &description, &output, &description, ); eprint!("{}", dump); } let mut output = output; if let Some(target) = expect_target { check::check_expected_target_syntax( &ctx, &mut output, target, ); } let metas_used = ctx.encode_metas_used(); Ok((output, metas_used)) }, ); let (expanded, metas_used) = match outcome { Ok((expanded, metas_used)) => (Ok(expanded), Ok(metas_used)), Err(e) => (Err(e), Err(())), }; let chain_call; if let Some(ChainNext { call, after_driver }) = &self.chain_next { let driver = &self.driver; let chain_after = &self.chain_after; let mut accum = self.accum.to_token_stream(); if let Some(name) = &self.template_name { accum.extend(quote!( _name [#name] )); } match &metas_used { Ok(metas_used) => { accum.extend(quote!( _meta_used #metas_used )); use meta::FindRecogMetas as _; let mut meta_recog = meta::Recognised::default(); self.template.find_recog_metas(&mut meta_recog); accum.extend(quote!( _meta_recog [#meta_recog] )); } Err(()) => { accum.extend(quote!( _error [] )); } } chain_call = quote! { #call! { { #driver } #after_driver [ #chain_after ] [ #accum ] } } } else { chain_call = TokenStream::new(); }; dprint_block!(&chain_call, "derive_deftly_engine! chain call"); let mut out = expanded.unwrap_or_else(|e| e.into_compile_error()); out.extend(chain_call); Ok(out) } } /// `derive_deftly_engine!` -- implements the actual template engine /// /// In my design, the input contains, firstly, literally the definition /// that #[derive(Deftly)] was applied to (see NOTES.txt). /// Using the literal input, rather than some pre-parsed version, is /// slower, but means that we aren't inventing a nontrivial data format which /// potentially crosses crate boundaries with semver implications. pub fn derive_deftly_engine_func_macro( input: TokenStream, ) -> syn::Result { dprint_block!(&input, "derive_deftly_engine! input"); let input: EngineInput = adviseable_parse2(input)?; match input { EngineInput::Expand(i) => i.process(), EngineInput::Final(i) => i.process(), } } derive-deftly-macros-1.0.0/expand.rs000064400000000000000000000641751046102023000155110ustar 00000000000000//! Expansion of a template into output tokens //! //! Contains the implementations of `fn expand()` //! for the various template types in [`super::syntax`]. use super::framework::*; impl Expand for SubstIf where Template: ExpandInfallible, O: ExpansionOutput, { fn expand(&self, ctx: &Context, out: &mut O) -> syn::Result<()> { for (condition, consequence) in &self.tests { //dbg!(&condition); if condition.eval_bool(ctx)? { //dbg!(&consequence); consequence.expand(ctx, out); return Ok(()); } } if let Some(consequence) = &self.otherwise { //dbg!(&consequence); consequence.expand(ctx, out); } Ok(()) } } impl SubstIf where Template: ExpandInfallible, O: ExpansionOutput, { fn expand_select1(&self, ctx: &Context, out: &mut O) -> syn::Result<()> { let mut found: Result)>, Vec> = Ok(None); for (condition, consequence) in &self.tests { if !condition.eval_bool(ctx)? { continue; } let cspan = condition.span(); let error_loc = |span| (span, "true condition"); match &mut found { Ok(None) => found = Ok(Some((cspan, consequence))), Ok(Some((span1, _))) => { found = Err(vec![ ctx.error_loc(), error_loc(*span1), error_loc(cspan), ]) } Err(several) => several.push(error_loc(cspan)), } } let found = found .map_err(|several| several.error("multiple conditions matched"))? .map(|(_cspan, consequence)| consequence) .or(self.otherwise.as_deref()) .ok_or_else(|| { [ctx.error_loc(), (self.kw_span, "select1 expansion")] .error("no conditions matched, and no else clause") })?; found.expand(ctx, out); Ok(()) } } impl SubstVType { fn expand( &self, ctx: &Context, out: &mut TokenAccumulator, kw_span: Span, self_def: SubstDetails, ) -> syn::Result<()> { let expand_spec_or_sd = |out: &mut _, spec: &Option>, sd: SubstDetails| { if let Some(spec) = spec { spec.expand(ctx, out); Ok(()) } else { sd.expand(ctx, out, kw_span) } }; if !ctx.is_enum() { return expand_spec_or_sd(out, &self.self_, self_def); } // It's an enum. We need to write the main type name, // and the variant. Naively we might expect to just do // TTYPE::VNAME // but that doesn't work, because if TTYPE has generics, that's // TNAME::::VNAME // and this triggers bizarre (buggy) behaviour in rustc - // see rust-lang/rust/issues/108224. // So we need to emit // TNAME::VNAME:: // // The most convenient way to do that seems to be to re-parse // this bit of the expansion as a syn::Path. That lets // us fish out the generics, for writing out later. let mut self_ty = TokenAccumulator::new(); expand_spec_or_sd(&mut self_ty, &self.self_, self_def)?; let self_ty = self_ty.tokens()?; let mut self_ty: syn::Path = syn::parse2(self_ty).map_err(|mut e| { e.combine(kw_span.error( "error re-parsing self type path for this expansion", )); e })?; let mut generics = mem::take( &mut self_ty .segments .last_mut() .ok_or_else(|| { kw_span.error( "self type path for this expansion is empty path!", ) })? .arguments, ); out.append(self_ty); out.append(Token![::](kw_span)); expand_spec_or_sd(out, &self.vname, SD::vname(Default::default()))?; let gen_content = match &mut generics { syn::PathArguments::AngleBracketed(content) => Some(content), syn::PathArguments::None => None, syn::PathArguments::Parenthesized(..) => { return Err([ (generics.span(), "generics"), (kw_span, "template keyword"), ] .error("self type has parenthesised generics, not supported")) } }; if let Some(gen_content) = gen_content { // Normalise `` to `::`. gen_content .colon2_token .get_or_insert_with(|| Token![::](kw_span)); out.append(&generics); } Ok(()) } } impl SubstVPat { // $vpat for struct $tname { $( $fname: $fpatname, ) } // $vpat for enum $tname::$vname { $( $fname: $fpatname, ) } fn expand( &self, ctx: &Context, out: &mut TokenAccumulator, kw_span: Span, ) -> syn::Result<()> { let self_def = SD::tname(Default::default()); SubstVType::expand(&self.vtype, ctx, out, kw_span, self_def)?; let in_braces = braced_group(kw_span, |mut out| { WithinField::for_each(ctx, |ctx, field| { SD::fname::(()) .expand(ctx, &mut out, kw_span)?; out.append_tokens(&(), Token![:](kw_span))?; // Do the expansion with the paste machinery, since // that has a ready-made notion of what fprefix= might // allow, and how to use it. let mut paste = paste::Items::new(kw_span); if let Some(fprefix) = &self.fprefix { fprefix.expand(ctx, &mut paste); } else { paste.append_fixed_string("f_"); } paste.append_identfrag_toks(&field.fname(kw_span))?; paste.assemble(out, None)?; out.append(Token![,](kw_span)); Ok::<_, syn::Error>(()) }) })?; out.append(in_braces); Ok(()) } } impl ExpandInfallible for Template where TemplateElement: Expand, O: ExpansionOutput, { fn expand(&self, ctx_in: &Context, out: &mut O) { let mut ctx_buf; let mut definitions_here = vec![]; let mut defconds_here = vec![]; let mut ctx = ctx_in; for element in &self.elements { macro_rules! handle_definition { { $variant:ident, $store:expr } => { if let TE::Subst(Subst { sd: SD::$variant(def, _), .. }) = element { // Doing this with a macro makes it nice and obvious // to the borrow checker. $store.push(def); ctx_buf = ctx_in.clone(); ctx_buf.definitions.earlier = Some(&ctx_in.definitions); ctx_buf.definitions.here = &definitions_here; ctx_buf.definitions.conds = &defconds_here; ctx = &ctx_buf; continue; } } } handle_definition!(define, definitions_here); handle_definition!(defcond, defconds_here); let () = element .expand(ctx, out) .unwrap_or_else(|err| out.record_error(err)); } } } impl Expand for TemplateElement { fn expand( &self, ctx: &Context, out: &mut TokenAccumulator, ) -> syn::Result<()> { match self { TE::Ident(tt) => out.append(tt.clone()), TE::Literal(tt, ..) => out.append(tt.clone()), TE::LitStr(tt) => out.append(tt.clone()), TE::Punct(tt, _) => out.append(tt.clone()), TE::Group { delim_span, delimiter, template, not_in_paste: _, } => { use proc_macro2::Group; let mut content = TokenAccumulator::new(); template.expand(ctx, &mut content); let mut group = Group::new(*delimiter, content.tokens()?); group.set_span(*delim_span); out.append(TT::Group(group)); } TE::Subst(exp) => { exp.expand(ctx, out)?; } TE::Repeat(repeated_template) => { repeated_template.expand(ctx, out); } } Ok(()) } } impl Expand for Subst where O: ExpansionOutput, TemplateElement: Expand, { fn expand(&self, ctx: &Context, out: &mut O) -> syn::Result<()> { self.sd.expand(ctx, out, self.kw_span) } } impl SubstDetails where O: ExpansionOutput, TemplateElement: Expand, { /// Expand this template element, by adding it to `O` /// /// This is done using `O`'s [`ExpansionOutput`] methods. fn expand( &self, ctx: &Context, out: &mut O, kw_span: Span, ) -> syn::Result<()> { // eprintln!("@@@@@@@@@@@@@@@@@@@@ EXPAND {:?}", self); let do_meta = |sm: &meta::SubstMeta<_>, out, meta| { sm.expand(ctx, kw_span, out, meta) }; // Methods for handling generics. Most take `composable: bool`, // which lets us control the trailing comma. This is desirable // because we should include it for expansions like $tgens that the // user can append things to, but ideally *not* for expansions like // $ttype that the user can't. let do_tgnames = |out: &mut TokenAccumulator, composable| { for pair in ctx.top.generics.params.pairs() { use syn::GenericParam as GP; match pair.value() { GP::Type(t) => out.append(&t.ident), GP::Const(c) => out.append(&c.ident), GP::Lifetime(l) => out.append(&l.lifetime), } out.append_maybe_punct_composable(&pair.punct(), composable); } }; let do_tgens_nodefs = |out: &mut TokenAccumulator| { for pair in ctx.top.generics.params.pairs() { use syn::GenericParam as GP; let out_attrs = |out: &mut TokenAccumulator, attrs: &[_]| { attrs.iter().for_each(|attr| out.append(attr)); }; match pair.value() { GP::Type(t) => { out_attrs(out, &t.attrs); out.append(&t.ident); out.append(&t.colon_token); out.append(&t.bounds); } GP::Const(c) => { out_attrs(out, &c.attrs); out.append(&c.const_token); out.append(&c.ident); out.append(&c.colon_token); out.append(&c.ty); } GP::Lifetime(l) => out.append(&l), } out.with_tokens(|out| { pair.punct().to_tokens_punct_composable(out); }); } }; let do_tgens = |out: &mut TokenAccumulator, composable: bool| { out.append_maybe_punct_composable( &ctx.top.generics.params, composable, ); }; // There are three contexts where the top-level type // name might occur with generics, and two syntaxes: // referring to the type $ttype Type:: // impl'ing for the type $ttype Type:: // defining a new type $tdeftype Type // Handles $ttype and $tdeftype, and, indirectly, $vtype let do_ttype = |out: &mut O, colons: Option<()>, do_some_gens| { let _: &dyn Fn(&mut _, bool) = do_some_gens; // specify type let gens = &ctx.top.generics; let colons = gens .lt_token .and_then(|_| colons.map(|()| Token![::](kw_span))); out.append_idpath( kw_span, |_| {}, &ctx.top.ident, |out| { out.append(colons); out.append(gens.lt_token); do_some_gens(out, false); out.append(gens.gt_token); }, Grouping::Ungrouped, ) .unwrap_or_else(|e| e.unreachable()) }; let do_maybe_delimited_group = |out, np, delim, content| { let _: &mut O = out; let _: &Template = content; out.append_tokens_with(np, |out| { if let Some(delim) = delim { out.append(delimit_token_group( delim, kw_span, |inside: &mut TokenAccumulator| { Ok(content.expand(ctx, inside)) }, )?); } else { content.expand(ctx, out); } Ok(()) }) }; match self { SD::tname(_) => out.append_identfrag_toks(&ctx.top.ident)?, SD::ttype(_) => do_ttype(out, Some(()), &do_tgnames), SD::tdeftype(_) => do_ttype(out, None, &do_tgens), SD::vname(_) => { out.append_identfrag_toks(&ctx.syn_variant(&kw_span)?.ident)? } SD::fname(_) => { let fname = ctx.field(&kw_span)?.fname(kw_span); out.append_identfrag_toks(&fname)?; } SD::ftype(_) => { let f = ctx.field(&kw_span)?; out.append_syn_type( kw_span, f.field.ty.clone(), Grouping::Invisible, ); } SD::fpatname(_) => { let f = ctx.field(&kw_span)?; let fpatname = Ident::new(&format!("f_{}", f.fname(kw_span)), kw_span); out.append_identfrag_toks(&fpatname)?; } SD::Xmeta(sm) => do_meta(sm, out, sm.pmetas(ctx, kw_span)?)?, SD::error(e, _) => e.throw(ctx)?, SD::Vis(vis, np) => { out.append_tokens(np, vis.syn_vis(ctx, kw_span)?)? } SD::tdefkwd(_) => { fn w(out: &mut O, t: impl ToTokens) where O: ExpansionOutput, { out.append_identfrag_toks(&TokenPastesAsIdent(t)) .unwrap_or_else(|e| e.unreachable()); } use syn::Data::*; match &ctx.top.data { Struct(d) => w(out, &d.struct_token), Enum(d) => w(out, &d.enum_token), Union(d) => w(out, &d.union_token), }; } SD::tattrs(ra, np, ..) => out.append_tokens_with(np, |out| { ra.expand(ctx, out, &ctx.top.attrs) })?, SD::vattrs(ra, np, ..) => out.append_tokens_with(np, |out| { let variant = ctx.variant(&kw_span)?.variant; let attrs = variant.as_ref().map(|v| &*v.attrs); ra.expand(ctx, out, attrs.unwrap_or_default()) })?, SD::fattrs(ra, np, ..) => out.append_tokens_with(np, |out| { ra.expand(ctx, out, &ctx.field(&kw_span)?.field.attrs) })?, SD::tgens(np, ..) => out.append_tokens_with(np, |out| { do_tgens_nodefs(out); Ok(()) })?, SD::tdefgens(np, ..) => out.append_tokens_with(np, |out| { do_tgens(out, true); Ok(()) })?, SD::tgnames(np, ..) => out.append_tokens_with(np, |out| { do_tgnames(out, true); Ok(()) })?, SD::twheres(np, ..) => out.append_tokens_with(np, |out| { if let Some(clause) = &ctx.top.generics.where_clause { out.with_tokens(|out| { clause.predicates.to_tokens_punct_composable(out); }); } Ok(()) })?, SD::vpat(v, np, ..) => out.append_tokens_with(np, |out| { // This comment prevents rustfmt making this unlike the others v.expand(ctx, out, kw_span) })?, SD::vtype(v, np, ..) => out.append_tokens_with(np, |out| { v.expand(ctx, out, kw_span, SD::ttype(Default::default())) })?, SD::tdefvariants(content, np, ..) => { let delim = if ctx.is_enum() { Some(Delimiter::Brace) } else { None }; do_maybe_delimited_group(out, np, delim, content)?; } SD::fdefine(spec_f, np, ..) => { out.append_tokens_with(np, |out| { let field = ctx.field(&kw_span)?.field; if let Some(driver_f) = &field.ident { if let Some(spec_f) = spec_f { spec_f.expand(ctx, out); } else { out.append(driver_f); } } out.append(&field.colon_token); Ok(()) })? } SD::vdefbody(vname, content, np, ..) => { use syn::Fields as SF; let variant = ctx.variant(&kw_span)?; let struct_variant = variant.is_struct_toplevel_as_variant(); if !struct_variant { vname.expand(ctx, out); } let delim = match variant.fields { SF::Unit => None, SF::Unnamed(..) => Some(Delimiter::Parenthesis), SF::Named(..) => Some(Delimiter::Brace), }; do_maybe_delimited_group(out, np, delim, content)?; if !struct_variant { // Any enum variant: terminate with a comma. out.append_tokens(np, Token![,](kw_span))?; } else if matches!(variant.fields, SF::Named(_)) { // struct {} at top-level: no terminator. } else { // Unit or tuple struct: Terminate with a semicolon. out.append_tokens(np, Token![;](kw_span))?; } } SD::Crate(np, ..) => out.append_tokens(np, &ctx.template_crate)?, SD::paste(content, ..) => { paste::expand(ctx, kw_span, content, out)?; } SD::ChangeCase(content, case, ..) => { let mut items = paste::Items::new(kw_span); content.expand(ctx, &mut items); items.assemble(out, Some(*case))?; } SD::define(..) | SD::defcond(..) => out.write_error( &kw_span, // I think this is impossible. It could only occur if // someone parsed a Subst or SubstDetails that wasn't // in a Template. It is Template.expand() that handles this. // We could possibly use proof tokens to see if this happens // and exclude it, but that would be super invasive. // // (There are some parallels between this and `${when }`) "${define } and ${defcond } only allowed in a full template", ), SD::UserDefined(name) => name.lookup_expand(ctx, out)?, SD::ignore(content, _) => { let mut ignore = O::new_with_span(kw_span); content.expand(ctx, &mut ignore); let () = ignore.ignore_impl()?; } SD::when(..) => out.write_error( &kw_span, "internal error - misplaced ${when } detected too late!", ), SD::If(conds, ..) => conds.expand(ctx, out)?, SD::select1(conds, ..) => conds.expand_select1(ctx, out)?, SD::For(repeat, _) => repeat.expand(ctx, out), SD::dbg(ddr) => ddr.expand(ctx, out, kw_span), SD::dbg_all_keywords(_) => dbg_allkw::dump(ctx), // ## maint/check-keywords-documented BoolOnly ## SD::is_struct(bo) | SD::is_enum(bo) | SD::is_union(bo) | SD::v_is_unit(bo) | SD::v_is_tuple(bo) | SD::v_is_named(bo) | SD::is_empty(bo, _) | SD::approx_equal(bo, _) | SD::False(bo) | SD::True(bo) | SD::not(_, bo) | SD::any(_, bo) | SD::all(_, bo) => out.append_bool_only(bo), }; Ok(()) } } impl DbgDumpRequest { fn expand(&self, ctx: &Context, out: &mut O, kw_span: Span) { let desc = format!("derive-deftly dbg dump {}", self.display_heading(ctx),); let mut msg = String::new(); let () = self.content_string; writeln!( msg, // r#"---------- {} expansion (start) ----------"#, desc, ) .expect("write to String failed"); out.dbg_expand(kw_span, ctx, &mut msg, &self.content_parsed); writeln!( msg, r#" ---------- {} expansion (end) ----------"#, desc ) .expect("write to String failed"); eprint!("{}", msg); } } impl ExplicitError { pub fn throw(&self, ctx: &Context<'_>) -> Result { Err([ ctx.error_loc(), // (self.message.span(), "template"), ] .error(self.message.value())) } } impl DefinitionName { fn lookup_expand( &self, ctx: &Context<'_>, out: &mut O, ) -> syn::Result<()> { let (def, ctx) = ctx.find_definition(self)?.ok_or_else(|| { self.error(format!("user-defined expansion `{}` not found", self)) })?; match &def.body { DefinitionBody::Paste(content) => { paste::expand(&ctx, def.body_span, content, out)?; } DefinitionBody::Normal(content) => { let not_in_paste = O::not_in_paste(self).map_err(|mut unpasteable| { unpasteable.combine(def.body_span.error( "user-defined expansion is not pasteable because it isn't, itself, ${paste }" )); unpasteable })?; out.append_tokens_with(¬_in_paste, |out| { content.expand(&ctx, out); Ok(()) })?; } } Ok(()) } } impl RawAttr { fn expand( &self, ctx: &Context, out: &mut TokenAccumulator, attrs: &[syn::Attribute], ) -> syn::Result<()> { for attr in attrs { match self { RawAttr::Default => { if ["deftly", "derive_deftly", "derive_deftly_adhoc"] .iter() .all(|exclude| !attr.path().is_ident(exclude)) { out.append(attr); } } RawAttr::Include { entries } => { let ent = entries.iter().find(|ent| ent.matches(attr)); if let Some(ent) = ent { ent.expand(ctx, out, attr)?; } } RawAttr::Exclude { exclusions } => { if !exclusions.iter().any(|excl| excl == attr.path()) { out.append(attr); } } } } Ok(()) } } impl RawAttrEntry { fn matches(&self, attr: &syn::Attribute) -> bool { &self.path == attr.path() } fn expand( &self, _ctx: &Context, out: &mut TokenAccumulator, attr: &syn::Attribute, ) -> syn::Result<()> { out.append(attr); Ok(()) } } impl ExpandInfallible for RepeatedTemplate where Template: ExpandInfallible, O: ExpansionOutput, { fn expand(&self, ctx: &Context, out: &mut O) { // for_with_within expects a fallible closure, but we want to do // infallible work in our infallible context, so we use `Void` // as the error type and wrap each call in `Ok`. #[allow(clippy::unit_arg)] // clippy wants us to worsify the style match self.over { RO::Variants => ctx.for_with_within(|ctx, _: &WithinVariant| { Ok::<_, Void>(self.expand_inner(ctx, out)) }), RO::Fields => ctx.for_with_within(|ctx, _: &WithinField| { Ok::<_, Void>(self.expand_inner(ctx, out)) }), } .void_unwrap() } } impl RepeatedTemplate { /// private, does the condition fn expand_inner(&self, ctx: &Context, out: &mut O) where Template: ExpandInfallible, O: ExpansionOutput, { let mut ctx = ctx.clone(); ctx.within_loop = WithinLoop::When; for when in &self.whens { match when.eval_bool(&ctx) { Ok(true) => continue, Ok(false) => return, Err(e) => { out.record_error(e); return; } } } ctx.within_loop = WithinLoop::Body; self.template.expand(&ctx, out) } } derive-deftly-macros-1.0.0/framework.rs000064400000000000000000000465271046102023000162300ustar 00000000000000//! Core types and traits for parsing and expansion //! //! Also re-exports the names that the implementation wants. //! //! Should be included with `use super::framework::*`, not `crate::`, //! so that it works with `tests/directly.rs` too. pub use super::prelude::*; pub use super::boolean::*; pub use super::repeat::*; pub use super::syntax::*; pub(super) use super::paste; pub(super) use super::paste::{IdentFrag, IdentFragInfallible}; /// Context during expansion /// /// References the driver, and digested information about it. /// Also represents where in the driver we are, /// including repetition context. #[derive(Debug, Clone)] pub struct Context<'c> { pub top: &'c syn::DeriveInput, pub template_crate: &'c syn::Path, pub template_name: Option<&'c syn::Path>, pub pmetas: &'c meta::PreprocessedMetas, pub variant: Option<&'c WithinVariant<'c>>, pub field: Option<&'c WithinField<'c>>, pub within_loop: WithinLoop, pub pvariants: &'c [PreprocessedVariant<'c>], pub definitions: Definitions<'c>, pub nesting_depth: u16, pub nesting_parent: Option<(&'c Context<'c>, &'c DefinitionName)>, } #[derive(Debug)] pub struct PreprocessedVariant<'c> { pub fields: &'c syn::Fields, pub pmetas: &'c meta::PreprocessedMetas, pub pfields: Vec, } #[derive(Debug)] pub struct PreprocessedField { pub pmetas: meta::PreprocessedMetas, } #[derive(Debug, Clone)] pub struct WithinVariant<'c> { pub variant: Option<&'c syn::Variant>, pub fields: &'c syn::Fields, pub pmetas: &'c meta::PreprocessedMetas, pub pfields: &'c [PreprocessedField], } #[derive(Debug, Clone)] pub struct WithinField<'c> { pub field: &'c syn::Field, pub pfield: &'c PreprocessedField, pub index: u32, } /// Whether we're in a loop, and if so, its details /// /// Set only for expansions of a `RepeatedTemplate`, /// not any kind of implicit looping eg `dbg_all_keywords`, `vpat`, etc. /// /// At some future point this may have enough information /// to provide `$loop_index`, etc. /// Right now it's only used for `dbg_all_keywords`. #[derive(Debug, Clone, Copy)] pub enum WithinLoop { None, /// Evaluating `${when }` clauses When, /// Evaluating the body Body, } #[derive(Debug, Clone, Copy, Default)] pub struct Definitions<'c> { pub here: &'c [&'c Definition], pub conds: &'c [&'c Definition], pub earlier: Option<&'c Definitions<'c>>, } /// Special processing instructions returned by /// [`special_before_element_hook`](SubstParseContext::special_before_element_hook) pub enum SpecialInstructions { /// This template is finished /// /// Stop parsing this `Template` though perhaps /// the surrounding `Group` is not finished. /// /// The parser for whatever called `Template::parse` /// will continue. EndOfTemplate, } /// Surrounding lexical context during parsing /// /// This is the kind of lexical context a piece of a template appears in. /// It is implemented for /// * Types that represent an expansion output `ExpansionOutput`; /// in this case, the lexical context is one where /// the expansion is accumulated in this type. /// * Places where template substitution syntax `${keyword }` /// appears but where no output will be generated (eg, within /// the condition of `${if }`. /// /// The associated types are either `Void` or `()`. /// They appears within the variants of `SubstDetails`, /// causing inapplicable variants to be eliminated. /// /// Because a variant is only inhabited if all of its fields are, /// the conditions are effectively ANDed. /// So the "default" value (for context that don't have an opnion) /// is inhabitedness `()`. /// /// Each type has an associated constructur, /// used during parsing. /// So this generates a parse error at parse time, /// if a construct appears in the wrong place. pub trait SubstParseContext: Sized { /// Uninhabited iff this lexical context is within `${paste }` type NotInPaste: Debug + Copy + Sized; /// Uninhabited iff this lexical context is within a condition. type NotInBool: Debug + Copy + Sized; /// Uninhabited unless this lexical context is within a condition. type BoolOnly: Debug + Copy + Sized; /// Whether this is a boolean context // // Useful for ad-hoc handling of the way that boolean // context has a different notion of syntax. const IS_BOOL: bool = false; /// Content of the `dbg` keyword /// /// This has to be in this trait because /// `${dbg }` contains a `Template` but `dbg(...)` contains a `Subst`. /// /// We make bespoke output for each context; for boolean this is sui /// generis, and for expansions it's in [`ExpansionOutput::dbg_expand`]. type DbgContent: Parse + Debug + AnalyseRepeat + meta::FindRecogMetas; fn not_in_paste(span: &impl Spanned) -> syn::Result; fn not_in_bool(span: &impl Spanned) -> syn::Result; fn bool_only(span: &impl Spanned) -> syn::Result { Err(span.error( "derive-deftly keyword is a condition - not valid as an expansion", )) } /// When we find a `fmeta` etc. in this context, does it allow a value? /// /// Used by the template-scanning code, to report whether an `Xmeta` /// in the template justifies a value-bearing `Xmeta` attribute /// on/in the driver, or just a boolean. fn meta_recog_usage() -> meta::Usage; /// For communicating through `parse_special` type SpecialParseContext: Default; /// Handle any special syntax for a special kind of template context. /// /// This method is called only when parsing multi-element [`Template`]s, /// It's a hook, called before parsing each `TemplateElement`. /// /// It should consume any special syntax as appropriate, /// /// The default implementation is a no-op. /// The only non-default implementation is in `paste.rs`, for `$<...>` - /// see [`paste::AngleBrackets`]. fn special_before_element_hook( _special: &mut Self::SpecialParseContext, _input: ParseStream, ) -> syn::Result> { Ok(None) } /// Parse using `f`, within parens in boolean context, not otherwise /// /// Useful for parsing the arguments to an argument-taking keyword /// which takes an "equivalent" syntax in both contexts. fn parse_maybe_within_parens( input: ParseStream, f: impl FnOnce(ParseStream) -> syn::Result, ) -> syn::Result { if Self::IS_BOOL { let inner; let _ = parenthesized!(inner in input); f(&inner) } else { f(input) } } /// Parse maybe a comma (comma in boolean contegxt, not otherwise) /// /// Useful for parsing the arguments to an argument-taking keyword /// which takes an "equivalent" syntax in both contexts. fn parse_maybe_comma(input: ParseStream) -> syn::Result<()> { if Self::IS_BOOL { let _: Token![,] = input.parse()?; } Ok(()) } /// Return an error suitable for reporting missing arguments /// /// Helper for handling missing arguments to an argument-taking keyword /// which takes an "equivalent" syntax in both contexts. fn missing_keyword_arguments(kw_span: Span) -> syn::Result { Err(kw_span.error(format_args!( "missing parameters to expansion keyword (NB: argument must be within {{ }})", ))) } } /// Expansion output accumulator, for a template lexical context /// /// Each template lexical context has a distinct type which /// * Represents the lexical context /// * If that lexical context generates expansions, /// accumulates the expansion. That's what this trait is. /// /// The methods are for accumulating various kinds of things /// that can be found in templates, or result from template expansion. /// /// The accumulating type (`Self` might be accumulating /// tokens ([`TokenStream`]) or strings ([`paste::Items`]). pub trait ExpansionOutput: SubstParseContext { /// An identifier (or fragment of one) /// /// Uses the `IdentFragment` for identifier pasting, /// and the `ToTokens` for general expansion. fn append_identfrag_toks( &mut self, ident: &I, ) -> Result<(), I::BadIdent>; /// Append a Rust path (scoped identifier, perhaps with generics) /// /// To facilitate `${pawte }`, the path is provided as: /// * some prefix tokens (e.g., a scoping path), /// * the actual identifer, /// * some suffix tokens (e.g. generics). /// /// `tspan` is the span of the part of the template /// which expanded into this path. /// /// This is a "more complex" expansion, /// in the terminology of the template reference: /// If a paste contains more than one, it is an error. fn append_idpath( &mut self, template_entry_span: Span, pre: A, ident: &I, post: B, grouping: Grouping, ) -> Result<(), I::BadIdent> where A: FnOnce(&mut TokenAccumulator), B: FnOnce(&mut TokenAccumulator), I: IdentFrag; /// Append a [`syn::LitStr`](struct@syn::LitStr) /// /// This is its own method because `syn::LitStr` is not `Display`, /// and we don't want to unconditionally turn it into a string /// before retokenising it. fn append_syn_litstr(&mut self, v: &syn::LitStr); /// Append a [`syn::Type`] /// /// This is a "more complex" expansion, /// in the terminology of the template reference: /// If a paste contains more than one, it is an error. fn append_syn_type( &mut self, te_span: Span, mut v: syn::Type, mut grouping: Grouping, ) { loop { let (inner, add_grouping) = match v { syn::Type::Paren(inner) => (inner.elem, Grouping::Parens), syn::Type::Group(inner) => (inner.elem, Grouping::Invisible), _ => break, }; v = *inner; grouping = cmp::max(grouping, add_grouping); } if let syn::Type::Path(tp) = &mut v { typepath_add_missing_argument_colons(tp, te_span); } self.append_syn_type_inner(te_span, v, grouping) } /// Append a [`syn::Type`], which has been grouping-normalised fn append_syn_type_inner( &mut self, te_span: Span, v: syn::Type, grouping: Grouping, ); /// Append using a function which generates tokens /// /// If you have an `impl `[`ToTokens`], /// use [`append_tokens`](ExpansionOutput::append_tokens) instead. /// /// Not supported within `${paste }`. /// The `NotInPaste` parameter makes this method unreachable /// when expanding within `${paste }`; /// or to put it another way, /// it ensures that such an attempt would have been rejected /// during template parsing. fn append_tokens_with( &mut self, np: &Self::NotInPaste, f: impl FnOnce(&mut TokenAccumulator) -> syn::Result<()>, ) -> syn::Result<()>; /// "Append" a substitution which can only be used within a boolean /// /// Such a thing cannot be expanded, so it cannot be appended, /// so this function must be unreachable. /// `expand_bool_only` is called (in expansion contexts) /// to handle uninhabited `SubstDetails` variants etc. /// /// Implementing it involves demonstrating that /// either `self`, or `Self::BoolOnly`, is uninhabited, /// with a call to [`void::unreachable`]. fn append_bool_only(&mut self, bool_only: &Self::BoolOnly) -> !; /// Note that an error occurred /// /// This must arrange to /// (eventually) convert it using `into_compile_error` /// and emit it somewhere appropriate. fn record_error(&mut self, err: syn::Error); /// Convenience method for noting an error with span and message fn write_error(&mut self, span: &S, message: M) { self.record_error(span.error(message)); } /// Convenience method for writing a `ToTokens` /// /// Dispatches to /// [`append_tokens_with`](ExpansionOutput::append_tokens_with) /// Not supported within `${paste }`. // // I experimented with unifying this with `append_tokens_with` // using a `ToTokensFallible` trait, but it broke type inference // rather badly and had other warts. fn append_tokens( &mut self, np: &Self::NotInPaste, tokens: impl ToTokens, ) -> syn::Result<()> { self.append_tokens_with(np, |out| { out.append(tokens); Ok(()) }) } /// Make a new empty expansion output, introduced at `kw_span` /// /// Normally, call sites use an inherent constructor method. /// This one is used for special cases, eg `${ignore ...}` fn new_with_span(kw_span: Span) -> Self; fn default_subst_meta_as(kw: Span) -> syn::Result>; /// Implement the core of the `ignore` keyword /// /// If there was an error, returns it. /// Otherwise, discards everything. fn ignore_impl(self) -> syn::Result<()>; /// Implement the `dbg` keyword /// /// Specifically: /// * Write the expansion of `child` to `msg` in human-readable form /// * Without a trailing newline /// * Subsume it into `self` /// /// Failures are to be reported, and subsumed into `self`. fn dbg_expand( &mut self, kw_span: Span, ctx: &Context, msg: &mut String, content: &Self::DbgContent, ); } /// Convenience trait providing `item.expand()` /// /// Implementations of this are often specific to the [`ExpansionOutput`]. /// /// Having this as a separate trait, /// rather than hanging it off `ExpansionOutput`, /// makes the expansion method more convenient to call. /// /// It also avoids having to make all of these expansion methods /// members of the `ExpansionOutput` trait. pub trait Expand { fn expand(&self, ctx: &Context, out: &mut O) -> syn::Result<()>; } /// Convenience trait providing `fn expand(self)`, infallible version /// /// Some of our `expand` functions always record errors /// within the output accumulator /// and therefore do not need to return them. pub trait ExpandInfallible { fn expand(&self, ctx: &Context, out: &mut O); } /// Accumulates tokens, or errors /// /// We collect all the errors, and if we get an error, don't write /// anything out. /// This is because `compile_error!` (from `into_compile_error`) /// only works in certain places in Rust syntax (!) #[derive(Debug)] pub struct TokenAccumulator(Result); impl<'c> Context<'c> { pub fn is_enum(&self) -> bool { matches!(self.top.data, syn::Data::Enum(_)) } /// Description of the whole expansion, suitable for `dbg` option, etc. pub fn expansion_description(&self) -> impl Display { let ident = &self.top.ident; if let Some(templ) = &self.template_name { format!( "derive-deftly expansion of {} for {}", templ.to_token_stream(), ident, ) } else { format!("derive-deftly expansion, for {}", ident,) } } } impl Default for TokenAccumulator { fn default() -> Self { TokenAccumulator(Ok(TokenStream::new())) } } impl TokenAccumulator { pub fn new() -> Self { Self::default() } pub fn with_tokens( &mut self, f: impl FnOnce(&mut TokenStream) -> R, ) -> Option { self.0.as_mut().ok().map(f) } pub fn append(&mut self, t: impl ToTokens) { self.with_tokens(|out| t.to_tokens(out)); } pub fn tokens(self) -> syn::Result { self.0 } /// Appends `val`, via [`ToTokensPunctComposable`] or [`ToTokens`] pub fn append_maybe_punct_composable( &mut self, val: &(impl ToTokens + ToTokensPunctComposable), composable: bool, ) { self.with_tokens(|out| { if composable { val.to_tokens_punct_composable(out); } else { val.to_tokens(out); } }); } } impl SubstParseContext for TokenAccumulator { type NotInPaste = (); type NotInBool = (); type DbgContent = Template; fn not_in_bool(_: &impl Spanned) -> syn::Result<()> { Ok(()) } fn not_in_paste(_: &impl Spanned) -> syn::Result<()> { Ok(()) } fn meta_recog_usage() -> meta::Usage { meta::Usage::Value } type BoolOnly = Void; type SpecialParseContext = (); } impl ExpansionOutput for TokenAccumulator { fn append_identfrag_toks( &mut self, ident: &I, ) -> Result<(), I::BadIdent> { self.with_tokens( |out| ident.frag_to_tokens(out), // ) .unwrap_or(Ok(())) } fn append_idpath( &mut self, _te_span: Span, pre: A, ident: &I, post: B, grouping: Grouping, ) -> Result<(), I::BadIdent> where A: FnOnce(&mut TokenAccumulator), B: FnOnce(&mut TokenAccumulator), I: IdentFrag, { let inner = match self.with_tokens(|_outer| { let mut inner = TokenAccumulator::new(); pre(&mut inner); inner.append_identfrag_toks(ident)?; post(&mut inner); Ok(inner) }) { None => return Ok(()), // earlier errors, didn't process Some(Err(e)) => return Err(e), Some(Ok(ta)) => ta, }; match inner.tokens() { Ok(ts) => self.append(grouping.surround(ts)), Err(e) => self.record_error(e), } Ok(()) } fn append_syn_litstr(&mut self, lit: &syn::LitStr) { self.append(lit); } fn append_syn_type_inner( &mut self, _te_span: Span, ty: syn::Type, grouping: Grouping, ) { self.append(grouping.surround(ty)); } fn append_tokens_with( &mut self, _not_in_paste: &(), f: impl FnOnce(&mut TokenAccumulator) -> syn::Result<()>, ) -> syn::Result<()> { f(self) } fn append_bool_only(&mut self, bool_only: &Self::BoolOnly) -> ! { void::unreachable(*bool_only) } fn record_error(&mut self, err: syn::Error) { if let Err(before) = &mut self.0 { before.combine(err); } else { self.0 = Err(err) } } fn new_with_span(_kw_span: Span) -> Self { Self::new() } fn default_subst_meta_as(kw: Span) -> syn::Result> { Err(kw.error("missing `as ...` in meta expansion")) } fn ignore_impl(self) -> syn::Result<()> { self.0.map(|_: TokenStream| ()) } fn dbg_expand( &mut self, _kw_span: Span, ctx: &Context, msg: &mut String, content: &Template, ) { let mut child = TokenAccumulator::new(); content.expand(ctx, &mut child); let child = child.tokens(); match &child { Err(e) => write!(msg, "/* ERROR: {} */", e), Ok(y) => write!(msg, "{}", y), } .expect("write! failed"); match child { Ok(y) => self.append(y), Err(e) => self.record_error(e), } } } derive-deftly-macros-1.0.0/macros.rs000064400000000000000000000470721046102023000155130ustar 00000000000000#![allow(clippy::style, clippy::complexity)] #![doc=include_str!("README.md")] // // This is the actual proc-macro crate. // // All it exports (or can export) are the proc macros themselves. // Everything else that is `pub` could be written `pub(crate)`. mod prelude; pub(crate) use prelude::*; // Implementation - common parts #[macro_use] pub(crate) mod utils; #[macro_use] pub(crate) mod adviseable; pub(crate) mod framework; // Implementation - specific areas pub(crate) mod accum; pub(crate) mod approx_equal; pub(crate) mod boolean; pub(crate) mod dbg_allkw; pub(crate) mod expand; pub(crate) mod meta; pub(crate) mod options; pub(crate) mod paste; pub(crate) mod repeat; pub(crate) mod syntax; #[cfg_attr(not(feature = "beta"), path = "beta_disabled.rs")] pub(crate) mod beta; // Implementations of each proc-macros pub(crate) mod adhoc; pub(crate) mod define; pub(crate) mod derive; pub(crate) mod engine; pub(crate) mod semver; pub(crate) mod compat_syn_2; pub(crate) mod compat_syn_common; #[doc=include_str!("HACKING.md")] mod _doc_hacking {} #[doc=include_str!("NOTES.md")] mod _doc_notes {} /// Dummy of proc_macro for use when compiling outside of proc macro context #[cfg(not(proc_macro))] pub(crate) mod proc_macro { pub(crate) use proc_macro2::TokenStream; } //========== `expect`, the `check` module (or dummy version) ========== // "expect" feature; module named check.rs for tab completion reasons #[cfg(feature = "expect")] mod check; #[cfg(not(feature = "expect"))] mod check { use super::prelude::*; #[derive(Debug, Clone, Copy, PartialEq)] pub struct Target(Void); impl FromStr for Target { type Err = Void; fn from_str(_: &str) -> Result { panic!("output syntax checking not supported, enable `expect` feature of `derive-deftly`") } } pub fn check_expected_target_syntax( _ctx: &framework::Context, _output: &mut TokenStream, target: DdOptVal, ) { void::unreachable(target.value.0) } pub fn check_expect_opcontext( op: &DdOptVal, _context: OpContext, ) -> syn::Result<()> { void::unreachable(op.value.0) } } impl DdOptValDescribable for check::Target { const DESCRIPTION: &'static str = "expected output syntax (`expect` option)"; } //========== actual macro entrypoints ========== /// Wraps an actual macro implementation function that uses a proc_macro2 /// implementation to expose a proc_macro implementation instead. // // Clippy gives false positives for converting between proc_macro[2]::TokenStream. #[allow(clippy::useless_conversion)] fn wrap_macro_func( func: F, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream where F: FnOnce( proc_macro2::TokenStream, ) -> Result, { let input = proc_macro2::TokenStream::from(input); let output = func(input).unwrap_or_else(|e| e.into_compile_error()); proc_macro::TokenStream::from(output) } /// Template expansion engine, internal /// /// /// /// /// Normally you do not need to mention this macro. /// /// derive-deftly does its work by /// (defining and then) invoking various interrelated macros /// including `macro_rules` macros and proc macros. /// These ultimately end up calling this macro, /// which takes a template and a data structure, /// and expands the template for that data structure. /// /// This macro's behvaiour is not currently stable or documented. /// If you invoke it yourself, you get to keep all the pieces. #[cfg_attr(proc_macro, proc_macro)] pub fn derive_deftly_engine( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { wrap_macro_func(engine::derive_deftly_engine_func_macro, input) } /// Expand an ad-hoc template, on a data structure decorated `#[derive_deftly_adhoc]` /// /// /// /// /// ``` // We're in the macro crate, where the facade crate is not available. // So we must do some namespace-swizzling. /// # use derive_deftly_macros as derive_deftly; // `proc-macro-crate` says `Itself` so generates ::derive_deftly_engine, // which is wrong for a doctest. Fudge that. We must also make sure // we're not inside main here, so we must define a main. /// # use derive_deftly::derive_deftly_engine; /// # fn main(){} /// use derive_deftly::{Deftly, derive_deftly_adhoc}; /// #[derive(Deftly)] /// #[derive_deftly_adhoc] /// struct DdtaStructureType { } /// // Smoke and mirrors so we can use metasyntactic OPTIONS and TEMPLATE. /// # macro_rules! derive_deftly_adhoc { { /// # $x:ident OPTIONS,..: TEMPLATE /// # } => { derive_deftly_macros::derive_deftly_adhoc! { /// # $x expect items: fn x(){} /// # } } } /// derive_deftly_adhoc! { /// DdtaStructureType OPTIONS,..: /// TEMPLATE /// } /// ``` /// /// Expands the template `TEMPLATE` for the type `DdtaStructureType`, /// /// `OPTIONS,..` is an optional comma-separated list of /// [expansion options](doc_reference/index.html#expansion-options). /// /// The definition of `DdtaStructureType` must have been decorated /// with [`#[derive(Deftly)]`](crate::Deftly), /// and `#[derive_deftly_adhoc]`, /// and the resulting `derive_deftly_driver_TYPE` macro must be /// available in scope. /// /// `derive_deftly_adhoc!` can be used in any context /// where the Rust language permits macro calls. /// For example, it can expand to expressions, statements, /// types, or patterns. #[cfg_attr(proc_macro, proc_macro)] pub fn derive_deftly_adhoc( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { wrap_macro_func(adhoc::derive_deftly_adhoc, input) } /// Define a reuseable template /// /// /// /// /// ```text /// define_derive_deftly! { /// [/// DOCS] /// [export] MyMacro OPTIONS,..: /// TEMPLATE /// } /// ``` /// /// Then, `MyMacro` can be used with /// [`#[derive(Deftly)]`](crate::Deftly) /// `#[derive_deftly(MyMacro)]`. /// /// `OPTIONS,..` /// is an optional comma-separated list of /// [expansion options](doc_reference/index.html#expansion-options), /// which will be applied whenever this template is expanded. /// /// `DOCS`, /// if supplied, are used as the rustdocs /// for the captured template macro `derive_deftly_template_MyMacro`. /// derive-deftly will then also append a note about /// how to invoke the template. /// /// ## Template definition macro `derive_deftly_template_MyMacro` /// /// The template is made into a `macro_rules` macro /// named `derive_deftly_template_MyMacro`, /// which is referenced when the template is applied. /// /// The template definition macro /// from `define_derive_deftly!` /// must be in scope at the point where you try to use it /// (with `#[derive(Deftly)] #[derive_deftly(MyMacro)]`). /// If the template definition is in another module, /// you may need to annotate that module with `#[macro_use]`. /// See the /// [documentation for `#[derive(Deftly)]`](derive.Deftly.html#scoping-and-ordering-within-the-same-crate). /// /// ## Exporting a template for use by other crates /// /// With `export MyMacro`, `define_derive_deftly!` exports the template /// for use by other crates. /// Then, it is referred to in other crates /// with `#[derive_ahdoc(this_crate::MyMacro)]`. /// /// I.e., `export MyMacro` causes the `derive_deftly_template_MyMacro` /// pattern macro to be exported with `#[macro_export]`. /// /// Note that a template is always exported at the crate top level, /// not in a sub-module, /// even if it is *defined* in a sub-module. /// Also, note that `export` does not have any effect on /// visibility of the template *within the same crate*. /// You may still need `#[macro_use]`. /// /// ### You must re-export `derive_deftly`; semver implications /// /// When exporting a template to other crates, you must also /// re-export `derive_deftly`, /// at the top level of your crate: /// /// ```ignore /// #[doc(hidden)] /// pub use derive_deftly; /// ``` /// This is used to find the template expansion engine, /// and will arrange that your template is expanded /// by the right version of derive-deftly. /// The template syntax is that for *your* version of `derive-deftly`, /// even if the depending crate uses a different version of derive-deftly. /// /// You should *not* treat a breaking change /// to derive-deftly's template syntax /// (which is a major change to derive-deftly), /// nor a requirement to use a newer template feature, /// as a breaking changes in the API of your crate. /// (You *should* use `#[doc(hidden)]`, or other approaches, /// to discourage downstream crates from using /// the derive-deftly version you re-export. /// Such use would be outside the semver guarantees.) /// /// You *should* call /// [`derive_deftly::template_export_semver_check!`](macro@template_export_semver_check) /// once in each crate that exports macros. /// This will notify you, by breaking your build, /// if you update to a derive-deftly version /// that has semver implications for other crates that use your macros. /// /// Changes that would require a semver bump /// for all libraries that export templates, /// will be rare, and specially marked in the derive-deftly changelog. /// Search for sections with titles containing "template export semver". /// /// ## Namespacing within a template /// /// Within the template, /// items within your crate can be referred to with /// [`$crate`](doc_reference/index.html#x:crate). /// /// For other items, /// including from the standard library e.g., `std::option::Option`, /// you may rely on the context which uses the template /// to have a reasonable namespace, /// or use a explicit paths starting with `std` or `::std` or `::core` /// or `$crate` (perhaps naming a re-export). /// /// Overall, the situation is similar to defining /// an exported `macro_rules` macro. #[cfg_attr(proc_macro, proc_macro)] pub fn define_derive_deftly( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { wrap_macro_func(define::define_derive_deftly_func_macro, input) } /// Perform ad-hoc templating driven by a data structure /// /// /// /// /// This macro does two things: /// /// 1. If `#[derive_deftly(MyMacro)]` attributes are also specified, /// they are taken to refer to reuseable templates /// defined with /// [`define_derive_deftly!`](macro@crate::define_derive_deftly). /// Each such `MyMacro` is applied to the data structure. /// /// You can specify /// [expansion options](doc_reference/index.html#expansion-options) /// for each such template application, by writing /// `#[derive_deftly(MyMacro[OPTIONS,..])]`, where /// `[OPTIONS,..]` is a comma-separated list of expansion options /// contained within `[ ]`. /// /// 2. If `#[derive_deftly_adhoc]` is specified, /// captures the data structure definition, /// so that it can be used with calls to /// [`derive_deftly_adhoc!`](macro@crate::derive_deftly_adhoc). /// /// ## `#[deftly]` attribute /// /// The contents of `#[deftly]` attributes are made available /// to templates via the /// [`${Xmeta}`](doc_reference/index.html#tmeta-vmeta-fmeta--deftly-attributes) /// expansions. /// /// If none of the template(s) recognise them, /// [it is an error](doc_reference/index.html#unrecognisedunused-deftly-attributes), /// (unless `#[derive_deftly_adhoc]` is specified). /// /// `derive-deftly` /// [does not impose any namespacing](doc_reference/index.html#attribute-namespacing) /// within `#[deftly]`: /// /// ## Scoping and ordering within the same crate /// /// **Summary of required ordering** /// /// 1. `define_derive_deftly! { MyMacro = ... }` /// 2. `#[derive(Deftly)] #[derive_deftly(MyMacro)] struct MyStruct { ... }` /// 3. `derive_deftly_adhoc! { MyStruct: ... }` /// /// Any reusable templates defined with /// `define_derive_deftly!` must lexically their precede /// uses with `#[derive(Deftly) #[derive_deftly(...)]`. /// /// And, for one-off templates (`derive_deftly_adhoc!`), /// the data structure with its `#[derive(Deftly)]` /// must lexically precede /// the references in `derive_deftly_adhoc!`, /// so that the data structure definition macro /// is in scope. /// /// In each case, /// if the definition is in another module /// in the same crate, /// the defining module's `mod` statement must come before /// the reference, /// and /// the `mod` statement will need `#[macro_use]`. /// So the placement and order of `mod` statements can matter. /// Alternatively, it is possible to use path-based scoping; /// there is /// [an example in the Guide](https://diziet.pages.torproject.net/rust-derive-deftly/latest/guide/templates-in-modules.html#path-scope). /// /// ## Applying a template (derive-deftly macro) from another crate /// /// `#[derive_deftly(some_crate::MyMacro)]` /// applies an exported template /// defined and exported by `some_crate`. /// /// You can import a template from another crate, /// so you can apply it with an unqualified name, /// with `use`, /// but the `use` must refer to /// the actual pattern macro name `derive_deftly_template_MyMacro`: /// ``` // See the doc comment for `derive_deftly_adhoc`. /// # use derive_deftly_macros as derive_deftly; /// # use derive_deftly::derive_deftly_engine; /// # fn main(){} // We can't make another crate. Fake up the macro definition /// # derive_deftly::define_derive_deftly! { TheirMacro: } /// use derive_deftly::Deftly; // and don't really try to import it, then /// # #[cfg(any())] /// use other_crate::derive_deftly_template_TheirMacro; /// #[derive(Deftly)] /// #[derive_deftly(TheirMacro)] /// struct MyStruct { // ... /// # } /// ``` /// /// ## Captured data structure definition `derive_deftly_driver_TYPE` /// /// With `#[derive_deftly_adhoc]`, /// the data structure is captured /// for use by /// [`derive_deftly_adhoc!`](macro@crate::derive_deftly_adhoc). /// /// Specifically, by defining /// a `macro_rules` macro called `derive_deftly_driver_TYPE`, /// where `TYPE` is the name of the type /// that `#[derive(Deftly)]` is applied to. /// /// /// /// ### Exporting the driver for downstream crates' templates /// // Really, the documentation about this in `pub-a.rs` and `pub-b.rs`, // should be somewhere in our rustdoc output. // But I don't want to put it *here* because it would completely // dominate this macro documentation. // So for now just reference the source tree docs. // (We can't really easily provide even a link.) // I think this is such a minority feature, // that hiding the docs like this is OK. // /// To cause the macro embodying the driver struct to be exported, /// write: /// `#[derive_deftly_adhoc(export)]`. /// The driver can then be derived from in other crates, /// with `derive_deftly_adhoc! { exporting_crate::DriverStruct: ... }`. /// /// #### Semver hazards /// /// This is a tricky feature, /// which should only be used by experts /// who fully understand the implications. /// It effectively turns the body of the struct into a macro, /// with a brittle API /// and very limited support for namespacing or hygiene. /// /// See `pub mod a_driver` in the example file `pub-a.rs`, /// in the source tree, /// for a fuller discussion of the implications, /// and some advice. /// /// If you do this, you must **pin your derive-deftly** to a minor version, /// as you may need to treat *minor* version updates in derive-deftly /// as semver breaks for your crate. /// And every time you update, you must read the `CHANGELOG.md`, /// since there is nothing that will warn you automatically /// about breaking changes. // // This is the implementation of #[derive(Deftly)] #[cfg_attr( proc_macro, proc_macro_derive( Deftly, attributes(deftly, derive_deftly, derive_deftly_adhoc) ) )] pub fn derive_deftly( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { wrap_macro_func(derive::derive_deftly, input) } /// Check semver compatibility, for a crate which exports macros /// /// /// /// /// Causes a compilation error /// if and only if the specified version of `derive-deftly` /// is prior to the last *relevant change*, /// compared to the currently-running one. /// /// A *relevant change* is one which has semver implications /// for the API of a crate which exports derive-deftly templates. /// /// ## When and how to call this /// /// If you export templates, with `define_derive_deftly! { export ... }`, /// call this macro too, once in your crate. /// /// Pass it the version of `derive-deftly` that was current, /// when you last read the `derive-deftly` changelog /// and considered breaking changes. /// /// (The argument must be a string literal, containing a /// 2- or 3-element version number. /// If the 3rd element is omitted, 0 is used.) /// /// ## Guarantee /// /// You can upgrade your derive-deftly version, /// even across a semver-breaking change to derive-deftly, /// without making any consequential update to your crate's own semver. /// /// If a new version of derive-adhoc means *your* crate's /// API has semver-relevant changes, this macro will throw an error. /// (Of course that will only happen across semver-breaking /// updates of derive-deftly.) /// /// (Exporting a *driver* struct for derivation in downstream crates, /// `#[derive_deftly_adhoc(export)]`, is not covered by this promise.) /// /// ## Example /// /// ``` /// # use derive_deftly_macros as derive_deftly; /// derive_deftly::template_export_semver_check!("0.13.0"); /// ``` #[cfg_attr(proc_macro, proc_macro)] pub fn template_export_semver_check( input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { wrap_macro_func(semver::template_export_semver_check_func_macro, input) } derive-deftly-macros-1.0.0/meta.rs000064400000000000000000001524601046102023000151530ustar 00000000000000//! `#[deftly(...)]` meta attributes //! //! # Used meta checking //! //! Most of this file is concerned with generating //! accurate and useful messages //! when a driver is decorated with `#[deftly(...)]` attributes //! which are not used by any template. //! //! We distinguish "used" metas from "recognised" ones. //! //! "Used" ones are those actually tested, and used, //! during the dynamic expansion of the template. //! They are recorded in the [`PreprocessedMetas`], //! which contains a `Cell` for each supplied node. //! //! "Recognised" ones are those which appear anywhere in the template. //! These are represented in a data structure [``Recognised`]. //! This is calculated by scanning the template, //! using the `FindRecogMetas` trait. //! //! Both of these sets are threaded through //! the ACCUM data in successive template expansions; //! in the final call (`EngineFinalInput`), //! they are combined together, //! and the driver's metas are checked against them. use super::framework::*; use indexmap::IndexMap; use Usage as U; //---------- common definitions ---------- /// Indicates one of `fmeta`, `vmeta` or `tmeta` #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] #[derive(AsRefStr, EnumString, EnumIter)] #[rustfmt::skip] pub enum Scope { // NB these keywords are duplicated in SubstDetails #[strum(serialize = "tmeta")] T, #[strum(serialize = "vmeta")] V, #[strum(serialize = "fmeta")] F, } /// Scope of a *supplied* meta (`#[deftly(...)]`) attribute /// /// Also encodes, for metas at the toplevel, /// whether it's a struct or an enum. #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] // #[derive(strum::Display, EnumIter)] #[strum(serialize_all = "snake_case")] pub enum SuppliedScope { Struct, Enum, Variant, Field, } impl SuppliedScope { fn recog_search(self) -> impl Iterator { use Scope as S; use SuppliedScope as SS; match self { SS::Struct => &[S::T, S::V] as &[_], SS::Enum => &[S::T], SS::Variant => &[S::V], SS::Field => &[S::F], } .iter() .copied() } } /// `(foo(bar))` in eg `fmeta(foo(bar))` /// /// includes the parens #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct Label { // Nonempty list, each with nonempty segments. // Outermost first. pub lpaths: Vec, } /// Meta designator eg `fmeta(foo(bar))` // Field order must be the same as BorrowedDesig #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct Desig { pub scope: Scope, pub label: Label, } #[derive(Hash)] // Field order must be the same as meta::Desig struct BorrowedDesig<'p> { pub scope: Scope, pub lpaths: &'p [&'p syn::Path], } //---------- substitutions in a template ---------- #[derive(Debug)] pub struct SubstMeta { pub desig: Desig, pub as_: Option>, pub default: Option<(Argument, O::NotInBool, beta::Enabled)>, } #[derive(Debug, Clone, AsRefStr, Display)] #[allow(non_camel_case_types)] // clearer to use the exact ident pub enum SubstAs { expr(O::NotInBool, O::NotInPaste, SubstAsSupported), ident(O::NotInBool), items(O::NotInBool, O::NotInPaste, SubstAsSupported), path(O::NotInBool), str(O::NotInBool), token_stream(O::NotInBool, O::NotInPaste), ty(O::NotInBool), } //---------- meta attrs in a driver ---------- /// A part like `(foo,bar(baz),zonk="value")` #[derive(Debug)] pub struct PreprocessedValueList { pub content: Punctuated, } /// `#[deftly(...)]` helper attributes pub type PreprocessedMetas = Vec; /// An `#[deftly()]` attribute, or a sub-tree within one /// /// Has interior mutability, for tracking whether the value is used. /// (So should ideally not be Clone, to help avoid aliasing bugs.) #[derive(Debug)] pub struct PreprocessedTree { pub path: syn::Path, pub value: PreprocessedValue, pub used: Cell>, } /// Content of a meta attribute /// /// Examples in doc comments are for /// `PreprocessedMeta.path` of `foo`, /// ie the examples are for `#[deftly(foo ..)]`. #[derive(Debug)] pub enum PreprocessedValue { /// `#[deftly(foo)]` Unit, /// `#[deftly(foo = "lit")]` Value { value: syn::Lit }, /// `#[deftly(foo(...))]` List(PreprocessedValueList), } //---------- search and match results ---------- /// Node in tree structure found in driver `#[deftly(some(thing))]` #[derive(Debug)] pub struct FoundNode<'l> { kind: FoundNodeKind<'l>, path_span: Span, ptree: &'l PreprocessedTree, } /// Node in tree structure found in driver `#[deftly(some(thing))]` #[derive(Debug)] pub enum FoundNodeKind<'l> { Unit, Lit(&'l syn::Lit), } /// Information about a nearby meta node we found /// /// "Nearby" means that the node we found is a prefix (in tree descent) /// of the one we were looking for, or vice versa. #[derive(Debug)] pub struct FoundNearbyNode<'l> { pub kind: FoundNearbyNodeKind, /// Span of the identifier in the actual `#[deftly]` driver attribute pub path_span: Span, pub ptree: &'l PreprocessedTree, } /// How the nearby node relates to the one we were looking for #[derive(Debug)] pub enum FoundNearbyNodeKind { /// We were looking to go deeper, but found a unit in `#[deftly]` Unit, /// We were looking to go deeper, but found a `name = value` in `#[deftly]` Lit, /// We were looking for a leaf, but we found nested list in `#[deftly]` List, } pub use FoundNearbyNodeKind as FNNK; pub use FoundNodeKind as FNK; //---------- meta attr enumeration and checking ---------- /// Whether a meta node was used (or ought to be used) #[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, Eq, PartialEq)] // #[derive(EnumIter)] pub enum Usage { BoolOnly, Value, } /// One lot of used metas in accumulation - argument to a `_meta_used` accum #[derive(Debug)] pub struct UsedGroup { pub content: TokenStream, } /// Something representing possibly checking that meta attributes are used #[derive(Debug, Clone)] pub enum CheckUsed { /// Yes, check them, by/with/from/to `T` Check(T), /// No, don't check them. Unchecked, } /// Information for meta checking, found in accumulation #[derive(Debug, Default)] pub struct Accum { pub recog: Recognised, pub used: Vec, } #[derive(Default, Debug, Clone)] pub struct Recognised { map: IndexMap, } pub trait FindRecogMetas { /// Search for `fmeta(..)` etc. expansions /// /// Add to `acc` any that are /// (recusively) within `self`, syntactically, fn find_recog_metas(&self, acc: &mut Recognised); } //==================== implementations) ==================== //---------- template parsing ---------- impl SubstMeta { fn span_whole(&self, scope_span: Span) -> Span { spans_join(chain!( [scope_span], // self.desig.label.spans(), )) .unwrap() } } impl Label { /// Nonempty pub fn spans(&self) -> impl Iterator + '_ { self.lpaths.iter().map(|path| path.span()) } } impl SubstAs { fn parse(input: ParseStream, nb: O::NotInBool) -> syn::Result { let kw: IdentAny = input.parse()?; let from_sma = |sma: SubstAs<_>| Ok(sma); // See keyword_general! in utils.rs macro_rules! keyword { { $($args:tt)* } => { keyword_general! { kw from_sma SubstAs; $($args)* } } } let not_in_paste = || O::not_in_paste(&kw); fn supported

(kw: &IdentAny) -> syn::Result> where P: SubstAsSupportStatus, { SubstAsSupportStatus::new(&kw) } keyword! { expr(nb, not_in_paste()?, supported(&kw)?) } keyword! { ident(nb) } keyword! { items(nb, not_in_paste()?, supported(&kw)?) } keyword! { path(nb) } keyword! { str(nb) } keyword! { token_stream(nb, not_in_paste()?) } keyword! { ty(nb) } Err(kw.error("unknown derive-deftly 'as' syntax type keyword")) } } impl SubstMeta { pub fn parse( input: ParseStream, kw_span: Span, scope: Scope, ) -> syn::Result { if input.is_empty() { O::missing_keyword_arguments(kw_span)?; } let label: Label = input.parse()?; fn store( kw: Span, already: &mut Option<(Span, V)>, call: impl FnOnce() -> syn::Result, ) -> syn::Result<()> { if let Some((already, _)) = already { return Err([(*already, "first"), (kw, "second")] .error("`${Xmeta ..}` option repeated")); } let v = call()?; *already = Some((kw, v)); Ok(()) } let mut as_ = None::<(Span, SubstAs)>; let mut default = None; while !O::IS_BOOL && !input.is_empty() { let keyword = Ident::parse_any(input)?; let kw_span = keyword.span(); let nb = O::not_in_bool(&kw_span).expect("checked already"); let ue = || beta::Enabled::new_for_syntax(kw_span); if keyword == "as" { store(kw_span, &mut as_, || SubstAs::parse(input, nb))?; } else if keyword == "default" { store(kw_span, &mut default, || { Ok((input.parse()?, nb, ue()?)) })?; } else { return Err(keyword.error("unknown option in `${Xmeta }`")); } if input.is_empty() { break; } let _: Token![,] = input.parse()?; } macro_rules! ret { { $( $f:ident )* } => { SubstMeta { desig: Desig { label, scope }, $( $f: $f.map(|(_span, v)| v), )* } } } Ok(ret! { as_ default }) } } //---------- driver parsing ---------- impl PreprocessedValueList { fn parse_outer(input: ParseStream) -> syn::Result { let meta; let _paren = parenthesized!(meta in input); Self::parse_inner(&meta) } } impl PreprocessedValueList { pub fn parse_inner(input: ParseStream) -> syn::Result { let content = Punctuated::parse_terminated(input)?; Ok(PreprocessedValueList { content }) } } impl Parse for PreprocessedTree { fn parse(input: ParseStream) -> syn::Result { use PreprocessedValue as PV; let path = input.call(syn::Path::parse_mod_style)?; let la = input.lookahead1(); let value = if la.peek(Token![=]) { let _: Token![=] = input.parse()?; let value = input.parse()?; PV::Value { value } } else if la.peek(token::Paren) { let list = input.call(PreprocessedValueList::parse_outer)?; PV::List(list) } else if la.peek(Token![,]) || input.is_empty() { PV::Unit } else { return Err(la.error()); }; let used = None.into(); // will be filled in later Ok(PreprocessedTree { path, value, used }) } } impl Parse for Label { fn parse(outer: ParseStream) -> syn::Result { fn recurse( lpaths: &mut Vec, outer: ParseStream, ) -> syn::Result<()> { let input; let paren = parenthesized!(input in outer); let path = input.call(syn::Path::parse_mod_style)?; if path.segments.is_empty() { return Err(paren .span .error("`deftly` attribute must have nonempty path")); } lpaths.push(path); if !input.is_empty() { recurse(lpaths, &input)?; } Ok(()) } let mut lpaths = vec![]; recurse(&mut lpaths, outer)?; Ok(Label { lpaths }) } } //---------- searching and matching ---------- impl Label { /// Caller must note meta attrs that end up being used! pub fn search<'a, F, G, E>( &self, pmetas: &'a [PreprocessedValueList], f: &mut F, g: &mut G, ) -> Result<(), E> where F: FnMut(FoundNode<'a>) -> Result<(), E>, G: FnMut(FoundNearbyNode<'a>) -> Result<(), E>, { for m in pmetas { for l in &m.content { Self::search_1(&self.lpaths, l, &mut *f, &mut *g)?; } } Ok(()) } fn search_1<'a, E, F, G>( // Nonempty lpaths: &[syn::Path], ptree: &'a PreprocessedTree, f: &mut F, g: &mut G, ) -> Result<(), E> where F: FnMut(FoundNode<'a>) -> Result<(), E>, G: FnMut(FoundNearbyNode<'a>) -> Result<(), E>, { use PreprocessedValue as PV; if ptree.path != lpaths[0] { return Ok(()); } let path_span = ptree.path.span(); let mut nearby = |kind| { g(FoundNearbyNode { kind, path_span, ptree, }) }; let deeper = if lpaths.len() <= 1 { None } else { Some(&lpaths[1..]) }; match (deeper, &ptree.value) { (None, PV::Unit) => f(FoundNode { path_span, kind: FNK::Unit, ptree, })?, (None, PV::List(_)) => nearby(FNNK::List)?, (None, PV::Value { value, .. }) => f(FoundNode { path_span, kind: FNK::Lit(value), ptree, })?, (Some(_), PV::Value { .. }) => nearby(FNNK::Lit)?, (Some(_), PV::Unit) => nearby(FNNK::Unit)?, (Some(d), PV::List(l)) => { for m in l.content.iter() { Self::search_1(d, m, &mut *f, &mut *g)?; } } } Ok(()) } } impl Label { pub fn search_eval_bool( &self, pmetas: &PreprocessedMetas, ) -> Result<(), Found> { let found = |ptree: &PreprocessedTree| { ptree.update_used(Usage::BoolOnly); Err(Found) }; self.search( pmetas, &mut |av| /* got it! */ found(av.ptree), &mut |nearby| match nearby.kind { FNNK::List => found(nearby.ptree), FNNK::Unit => Ok(()), FNNK::Lit => Ok(()), }, ) } } //---------- scope and designator handling ---------- impl SubstMeta where O: SubstParseContext, { pub fn repeat_over(&self) -> Option { match self.desig.scope { Scope::T => None, Scope::V => Some(RO::Variants), Scope::F => Some(RO::Fields), } } } impl SubstMeta where O: SubstParseContext, { pub fn pmetas<'c>( &self, ctx: &'c Context<'c>, kw_span: Span, ) -> syn::Result<&'c PreprocessedMetas> { Ok(match self.desig.scope { Scope::T => &ctx.pmetas, Scope::V => &ctx.variant(&kw_span)?.pmetas, Scope::F => &ctx.field(&kw_span)?.pfield.pmetas, }) } } impl ToTokens for Label { fn to_tokens(&self, out: &mut TokenStream) { let mut lpaths = self.lpaths.iter().rev(); let mut current = lpaths.next().expect("empty path!").to_token_stream(); let r = loop { let span = current.span(); let mut group = proc_macro2::Group::new(Delimiter::Parenthesis, current); group.set_span(span); let wrap = if let Some(y) = lpaths.next() { y } else { break group; }; current = quote! { #wrap #group }; }; r.to_tokens(out); } } impl Desig { fn to_tokens(&self, scope_span: Span, out: &mut TokenStream) { let scope: &str = self.scope.as_ref(); Ident::new(scope, scope_span).to_tokens(out); self.label.to_tokens(out); } } impl Parse for Desig { fn parse(input: ParseStream) -> syn::Result { let scope: syn::Ident = input.parse()?; let scope = scope .to_string() .parse() .map_err(|_| scope.error("invalid meta keyword/level"))?; let label = input.parse()?; Ok(Self { scope, label }) } } impl indexmap::Equivalent for BorrowedDesig<'_> { fn equivalent(&self, desig: &Desig) -> bool { let BorrowedDesig { scope, lpaths } = self; *scope == desig.scope && itertools::equal(lpaths.iter().copied(), &desig.label.lpaths) } } /// `Display`s as a `#[deftly(...)]` as the user might write it struct DisplayAsIfSpecified<'r> { lpaths: &'r [&'r syn::Path], /// Included after the innermost lpath, inside the parens inside_after: &'r str, } impl Display for DisplayAsIfSpecified<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "#[deftly")?; for p in self.lpaths { write!(f, "({}", p.to_token_stream())?; } write!(f, "{}", self.inside_after)?; for _ in self.lpaths { write!(f, ")")?; } Ok(()) } } // Tests that our `BorrowedDesig` `equivalent` impl is justified. #[test] fn check_borrowed_desig() { use super::*; use indexmap::Equivalent; use itertools::iproduct; use std::hash::{Hash, Hasher}; #[derive(PartialEq, Eq, Debug, Default)] struct TrackingHasher(Vec>); impl Hasher for TrackingHasher { fn write(&mut self, bytes: &[u8]) { self.0.push(bytes.to_owned()); } fn finish(&self) -> u64 { unreachable!() } } impl TrackingHasher { fn hash(t: impl Hash) -> Self { let mut self_ = Self::default(); t.hash(&mut self_); self_ } } type Case = (Scope, &'static [&'static str]); const TEST_CASES: &[Case] = &[ (Scope::T, &["path"]), (Scope::T, &["r#path"]), (Scope::V, &["path", "some::path"]), (Scope::V, &["r#struct", "with_generics::<()>"]), (Scope::F, &[]), // illegal Desig, but test anyway ]; struct Desigs<'b> { owned: Desig, borrowed: BorrowedDesig<'b>, } impl Desigs<'_> { fn with((scope, lpaths): &Case, f: impl FnOnce(Desigs<'_>)) { let scope = *scope; let lpaths = lpaths .iter() .map(|l| syn::parse_str(l).expect(l)) .collect_vec(); let owned = { let label = Label { lpaths: lpaths.clone(), }; Desig { scope, label } }; let lpaths_borrowed; let borrowed = { lpaths_borrowed = lpaths.iter().collect_vec(); BorrowedDesig { scope, lpaths: &*lpaths_borrowed, } }; f(Desigs { owned, borrowed }) } } // Test that for each entry in TEST_CASES, when parsed into Paths, etc., // BorrowedDesig is `equivalent` to, and hashes the same as, Desig. for case in TEST_CASES { Desigs::with(case, |d| { assert!(d.borrowed.equivalent(&d.owned)); assert_eq!( TrackingHasher::hash(&d.owned), TrackingHasher::hash(&d.borrowed), ); }); } // Compare every TEST_CASES entry with every other entry. // See if the owned forms are equal (according to `PartialEq`). // Insist that the Borrowed vs owned `equivalent` relation agrees, // in both directions. // And, if they are equal, insist that the hashes all agree. for (case0, case1) in iproduct!(TEST_CASES, TEST_CASES) { Desigs::with(case0, |d0| { Desigs::with(case1, |d1| { let equal = d0.owned == d1.owned; assert_eq!(equal, d0.borrowed.equivalent(&d1.owned)); assert_eq!(equal, d1.borrowed.equivalent(&d0.owned)); if equal { let hash = TrackingHasher::hash(&d0.owned); assert_eq!(hash, TrackingHasher::hash(&d1.owned)); assert_eq!(hash, TrackingHasher::hash(&d0.borrowed)); assert_eq!(hash, TrackingHasher::hash(&d1.borrowed)); } }); }); } } //---------- conditional support for `Xmeta as items` ---------- #[cfg(feature = "meta-as-expr")] pub type ValueExpr = syn::Expr; #[cfg(not(feature = "meta-as-expr"))] pub type ValueExpr = MetaUnsupported; #[cfg(feature = "meta-as-items")] pub type ValueItems = Concatenated; #[cfg(not(feature = "meta-as-items"))] pub type ValueItems = MetaUnsupported; /// newtype to avoid coherence - it doesn't impl `Parse + ToTokens` #[derive(Debug, Copy, Clone)] pub struct MetaUnsupported(Void); #[derive(Debug, Copy, Clone)] pub struct SubstAsSupported(P::Marker); /// Implemented for syn types supported in this build, and `MetaUnsupported` pub trait SubstAsSupportStatus: Sized { type Marker; type Parsed: Parse + ToTokens; fn new(kw: &IdentAny) -> syn::Result>; } impl SubstAsSupported

{ fn infer_type(&self, _parsed: &P::Parsed) {} } impl SubstAsSupportStatus for T { type Marker = (); type Parsed = T; fn new(_kw: &IdentAny) -> syn::Result> { Ok(SubstAsSupported(())) } } impl SubstAsSupportStatus for MetaUnsupported { type Marker = MetaUnsupported; type Parsed = TokenStream; fn new(kw: &IdentAny) -> syn::Result> { Err(kw.error(format_args!( // We're a bit fast and loose here: if kw contained `_`, // or there were aliases, this message would be a bit wrong. "${{Xmeta as {}}} used but cargo feature meta-as-{} disabled", **kw, **kw, ))) } } impl ToTokens for MetaUnsupported { fn to_tokens(&self, _out: &mut TokenStream) { void::unreachable(self.0) } } //---------- template expansion ---------- impl SubstMeta where O: ExpansionOutput, TemplateElement: Expand, { pub fn expand( &self, ctx: &Context, kw_span: Span, out: &mut O, pmetas: &PreprocessedMetas, ) -> syn::Result<()> { let SubstMeta { desig, as_, default, } = self; let mut found = None::; let mut hint = None::; let span_whole = self.span_whole(kw_span); let self_loc = || (span_whole, "expansion"); let error_loc = || [ctx.error_loc(), self_loc()]; desig.label.search( pmetas, &mut |av: FoundNode| { if let Some(first) = &found { return Err([(first.path_span, "first occurrence"), (av.path_span, "second occurrence"), self_loc()].error( "tried to expand just attribute value, but it was specified multiple times" )); } found = Some(av); Ok(()) }, &mut |nearby| { hint.get_or_insert(nearby); Ok(()) }, )?; if let (None, Some((def, ..))) = (&found, default) { return Ok(def.expand(ctx, out)); } let found = found.ok_or_else(|| { if let Some(hint) = hint { let hint_msg = match hint.kind { FNNK::Unit => "expected a list with sub-attributes, found a unit", FNNK::Lit => "expected a list with sub-attributes, found a simple value", FNNK::List => "expected a leaf node, found a list with sub-attributes", }; let mut err = hint.path_span.error(hint_msg); err.combine(error_loc().error( "attribute value expanded, but no suitable value in data structure definition" )); err } else { error_loc().error( "attribute value expanded, but no value in data structure definition" ) } })?; found.ptree.update_used(Usage::Value); found.expand(span_whole, as_, out)?; Ok(()) } } fn metavalue_spans(tspan: Span, vspan: Span) -> [ErrorLoc<'static>; 2] { [(vspan, "attribute value"), (tspan, "template")] } /// Obtain the `LiStr` from a meta node value (ie, a `Lit`) /// /// This is the thing we actually use. /// Non-string-literal values are not allowed. fn metavalue_litstr<'l>( lit: &'l syn::Lit, tspan: Span, msg: fmt::Arguments<'_>, ) -> syn::Result<&'l syn::LitStr> { match lit { syn::Lit::Str(s) => Ok(s), // having checked derive_builder, it doesn't handle // Lit::Verbatim so I guess we don't need to either. _ => Err(metavalue_spans(tspan, lit.span()).error(msg)), } } /// Convert a literal found in a meta item into `T` /// /// `into_what` is used only for error reporting pub fn metavalue_lit_as( lit: &syn::Lit, tspan: Span, into_what: &dyn Display, ) -> syn::Result where T: Parse + ToTokens, { let s = metavalue_litstr( lit, tspan, format_args!( "expected string literal, for conversion to {}", into_what, ), )?; let t: TokenStream = s.parse().map_err(|e| { // Empirically, parsing a LitStr in actual proc macro context, with // proc_macro2, into tokens, can generate a lexical error with a // "fallback" Span. Then, attempting to render the results, // including the eventual compiler_error! invocation, back to // a compiler proc_ma cor::TokenStream can panic with // "compiler/fallback mismatch". // // https://github.com/dtolnay/syn/issues/1504 // // Attempt to detect this situation. match (|| { let _: String = (&e).into_iter().next()?.span().source_text()?; Some(()) })() { Some(()) => e, None => lit.span().error(e.to_string()), } })?; let thing: T = syn::parse2(t)?; Ok(thing) } impl<'l> FoundNode<'l> { fn expand( &self, tspan: Span, as_: &Option>, out: &mut O, ) -> syn::Result<()> where O: ExpansionOutput, { let spans = |vspan| metavalue_spans(tspan, vspan); let lit = match self.kind { FNK::Unit => return Err(spans(self.path_span).error( "tried to expand attribute which is just a unit, not a literal" )), FNK::Lit(lit) => lit, }; use SubstAs as SA; let default_buf; let as_ = match as_ { Some(as_) => as_, None => { default_buf = O::default_subst_meta_as(tspan)?; &default_buf } }; match as_ { as_ @ SA::expr(.., np, supported) => { let expr = metavalue_lit_as(lit, tspan, as_)?; supported.infer_type(&expr); out.append_tokens(np, Grouping::Parens.surround(expr))?; } as_ @ SA::ident(..) => { let ident: IdentAny = metavalue_lit_as(lit, tspan, as_)?; out.append_identfrag_toks(&*ident)?; } SA::items(_, np, supported) => { let items = metavalue_lit_as(lit, tspan, &"items")?; supported.infer_type(&items); out.append_tokens(np, items)?; } as_ @ SA::path(..) => out.append_syn_type( tspan, syn::Type::Path(metavalue_lit_as(lit, tspan, as_)?), Grouping::Invisible, ), SA::str(..) => { let s = metavalue_litstr( lit, tspan, format_args!("expected string literal, for meta value",), )?; out.append_syn_litstr(s); } as_ @ SA::ty(..) => out.append_syn_type( tspan, metavalue_lit_as(lit, tspan, as_)?, Grouping::Invisible, ), SA::token_stream(_, np) => { let tokens: TokenStream = metavalue_lit_as(lit, tspan, &"tokens")?; out.append_tokens(np, tokens)?; } } Ok(()) } } //==================== implementations - usage checking ==================== impl Parse for CheckUsed { fn parse(input: ParseStream) -> syn::Result { let la = input.lookahead1(); Ok(if la.peek(Token![*]) { let _star: Token![*] = input.parse()?; mCU::Unchecked } else if la.peek(token::Bracket) { let group: proc_macro2::Group = input.parse()?; let content = group.stream(); mCU::Check(UsedGroup { content }) } else { return Err(la.error()); }) } } impl Recognised { /// Ensures that `self[k] >= v` pub fn update(&mut self, k: Desig, v: Usage) { let ent = self.map.entry(k).or_insert(v); *ent = cmp::max(*ent, v); } } impl ToTokens for Recognised { fn to_tokens(&self, out: &mut TokenStream) { for (desig, allow) in &self.map { match allow { U::BoolOnly => { out.extend(quote! { ? }); } U::Value => {} } desig.to_tokens(Span::call_site(), out); } } } impl PreprocessedTree { pub fn update_used(&self, ra: Usage) { self.used.set(cmp::max(self.used.get(), Some(ra))); } } //---------- decoding used metas ---------- impl PreprocessedValueList { fn decode_update_used(&self, input: ParseStream) -> syn::Result<()> { use PreprocessedValue as PV; for ptree in &self.content { if input.is_empty() { return Ok(()); } if !input.peek(Token![,]) { let path = input.call(syn::Path::parse_mod_style)?; if path != ptree.path { return Err([ (path.span(), "found"), (ptree.path.span(), "expected"), ].error( "mismatch (desynchronised) incorporating previous expansions' used metas" )); } let used = if input.peek(Token![=]) { let _: Token![=] = input.parse()?; Some(U::Value) } else if input.peek(Token![?]) { let _: Token![?] = input.parse()?; Some(U::BoolOnly) } else { None }; if let Some(used) = used { ptree.update_used(used); } if input.peek(token::Paren) { let inner; let paren = parenthesized!(inner in input); let sub_list = match &ptree.value { PV::Unit | PV::Value { .. } => return Err([ (paren.span.open(), "found"), (ptree.path.span(), "defined"), ].error( "mismatch (tree vs terminal) incorporating previous expansions' used metas" )), PV::List(l) => l, }; sub_list.decode_update_used(&inner)?; } } if input.is_empty() { return Ok(()); } let _: Token![,] = input.parse()?; } Ok(()) } } impl<'c> Context<'c> { pub fn decode_update_metas_used( &self, input: /* group content */ ParseStream, ) -> syn::Result<()> { #[derive(Default)] struct Intended { variant: Option, field: Option>, attr_i: usize, } let mut intended = Intended::default(); let mut visit = |pmetas: &PreprocessedMetas, current_variant: Option<&syn::Ident>, current_field: Option>| { loop { let la = input.lookahead1(); if input.is_empty() { // keep visiting until we exit all the loops return Ok(()); } else if la.peek(Token![::]) { let _: Token![::] = input.parse()?; intended = Intended { variant: Some(input.parse()?), field: None, attr_i: 0, }; } else if la.peek(Token![.]) { let _: Token![.] = input.parse()?; intended.field = Some(match input.parse()? { syn::Member::Named(n) => Either::Left(n), syn::Member::Unnamed(i) => Either::Right(i.index), }); intended.attr_i = 0; } else if { let intended_field_refish = intended .field .as_ref() .map(|some: &Either<_, _>| some.as_ref()); !(current_variant == intended.variant.as_ref() && current_field == intended_field_refish) } { // visit subsequent things, hopefully one will match return Ok(()); } else if la.peek(token::Paren) { // we're in the right place and have found a #[deftly()] let i = intended.attr_i; intended.attr_i += 1; let m = pmetas.get(i).ok_or_else(|| { input.error("more used metas, out of range!") })?; let r; let _ = parenthesized!(r in input); m.decode_update_used(&r)?; } else { return Err(la.error()); } } }; visit(&self.pmetas, None, None)?; WithinVariant::for_each(self, |ctx, wv| { let current_variant = wv.variant.map(|wv| &wv.ident); if !wv.is_struct_toplevel_as_variant() { visit(&wv.pmetas, current_variant, None)?; } WithinField::for_each(ctx, |_ctx, wf| { let current_field = if let Some(ref ident) = wf.field.ident { Either::Left(ident) } else { Either::Right(&wf.index) }; visit(&wf.pfield.pmetas, current_variant, Some(current_field)) }) }) // if we didn't consume all of the input, due to mismatches/ // misordering, then syn will give an error for us } } //---------- encoding used metas --------- impl PreprocessedTree { /// Returns `(....)` fn encode_useds( list: &PreprocessedValueList, ) -> Option { let preamble = syn::parse::Nothing; let sep = Token![,](Span::call_site()); let mut ts = TokenStream::new(); let mut ot = TokenOutputTrimmer::new(&mut ts, &preamble, &sep); for t in &list.content { t.encode_used(&mut ot); ot.push_sep(); } if ts.is_empty() { None } else { Some(proc_macro2::Group::new(Delimiter::Parenthesis, ts)) } } /// Writes `path?=(...)` (or, rather, the parts of it that are needed) fn encode_used(&self, out: &mut TokenOutputTrimmer) { use PreprocessedValue as PV; struct OutputTrimmerWrapper<'or, 'o, 't, 'p> { // None if we have written the path already path: Option<&'p syn::Path>, out: &'or mut TokenOutputTrimmer<'t, 'o>, } let mut out = OutputTrimmerWrapper { path: Some(&self.path), out, }; impl OutputTrimmerWrapper<'_, '_, '_, '_> { fn push_reified(&mut self, t: &dyn ToTokens) { if let Some(path) = self.path.take() { self.out.push_reified(path); } self.out.push_reified(t); } } let tspan = Span::call_site(); if let Some(used) = self.used.get() { match used { U::BoolOnly => out.push_reified(&Token![?](tspan)), U::Value => out.push_reified(&Token![=](tspan)), } } match &self.value { PV::Unit | PV::Value { .. } => {} PV::List(l) => { if let Some(group) = PreprocessedTree::encode_useds(l) { out.push_reified(&group); } } } } } impl<'c> Context<'c> { /// Returns `[::Variant .field () ...]` pub fn encode_metas_used(&self) -> proc_macro2::Group { let parenthesize = |ts| proc_macro2::Group::new(Delimiter::Parenthesis, ts); let an_empty = parenthesize(TokenStream::new()); let mut ts = TokenStream::new(); struct Preamble<'p> { variant: Option<&'p syn::Variant>, field: Option<&'p WithinField<'p>>, } impl ToTokens for Preamble<'_> { fn to_tokens(&self, out: &mut TokenStream) { let span = Span::call_site(); if let Some(v) = self.variant { Token![::](span).to_tokens(out); v.ident.to_tokens(out); } if let Some(f) = self.field { Token![.](span).to_tokens(out); f.fname(span).to_tokens(out); } } } let mut last_variant: *const syn::Variant = ptr::null(); let mut last_field: *const syn::Field = ptr::null(); fn ptr_of_ref<'i, InDi>(r: Option<&'i InDi>) -> *const InDi { r.map(|r| r as _).unwrap_or_else(ptr::null) } let mut encode = |pmetas: &PreprocessedMetas, wv: Option<&WithinVariant>, wf: Option<&WithinField>| { let now_variant: *const syn::Variant = ptr_of_ref(wv.map(|wv| wv.variant).flatten()); let now_field: *const syn::Field = ptr_of_ref(wf.map(|wf| wf.field)); let preamble = Preamble { variant: (!ptr::eq(last_variant, now_variant)).then(|| { last_field = ptr::null(); let v = wv.expect("had WithinVariant, now not"); v.variant.expect("variant was syn::Variant, now not") }), field: (!ptr::eq(last_field, now_field)).then(|| { wf.expect("had WithinField (Field), now not") // }), }; let mut ot = TokenOutputTrimmer::new(&mut ts, &preamble, &an_empty); for m in pmetas { if let Some(group) = PreprocessedTree::encode_useds(m) { ot.push_reified(group); } else { ot.push_sep(); } } if ot.did_preamble().is_some() { last_variant = now_variant; last_field = now_field; } Ok::<_, Void>(()) }; encode(&self.pmetas, None, None).void_unwrap(); WithinVariant::for_each(self, |ctx, wv| { if !wv.is_struct_toplevel_as_variant() { encode(&wv.pmetas, Some(wv), None)?; } WithinField::for_each(ctx, |_ctx, wf| { encode(&wf.pfield.pmetas, Some(wv), Some(wf)) }) }) .void_unwrap(); proc_macro2::Group::new(Delimiter::Bracket, ts) } } //---------- checking used metas ---------- struct UsedChecker<'c, 'e> { current: Vec<&'c syn::Path>, reported: &'e mut HashSet

ToTokensPunctComposable for Option<&&P> where P: ToTokens, P: Default, { fn to_tokens_punct_composable(&self, out: &mut TokenStream) { if let Some(self_) = self { self_.to_tokens(out) } else { P::default().to_tokens(out) } } } //---------- ErrorAccumulator ---------- /// Contains zero or more `syn::Error` /// /// # Panics /// /// Panics if dropped. /// /// You must call one of the consuming methods, eg `finish` #[derive(Debug, Default)] pub struct ErrorAccumulator { bad: Option, defused: bool, } impl ErrorAccumulator { /// Run `f`, accumulate any error, and return an `Ok` pub fn handle_in(&mut self, f: F) -> Option where F: FnOnce() -> syn::Result, { self.handle(f()) } /// Handle a `Result`: accumulate any error, and returni an `Ok` pub fn handle(&mut self, result: syn::Result) -> Option { match result { Ok(y) => Some(y), Err(e) => { self.push(e); None } } } /// Accumulate an error pub fn push(&mut self, err: syn::Error) { if let Some(bad) = &mut self.bad { bad.combine(err) } else { self.bad = Some(err); } } /// If there were any errors, return a single error that combines them #[allow(dead_code)] pub fn finish(self) -> syn::Result<()> { self.finish_with(()) } /// If there were any errors, return `Err`, otherwise `Ok(success)` pub fn finish_with(self, success: T) -> syn::Result { match self.into_inner() { None => Ok(success), Some(bad) => Err(bad), } } /// If there any errors, return a single error that combines them pub fn into_inner(mut self) -> Option { self.defused = true; self.bad.take() } } impl Drop for ErrorAccumulator { fn drop(&mut self) { assert!(panicking() || self.defused); } } //---------- Template and driver export ---------- /// Token `export` (or `pub`), indicating that a macro should be exported /// /// Usually found in `Option`. #[derive(Debug, Clone)] pub struct MacroExport(Span); impl Spanned for MacroExport { fn span(&self) -> Span { self.0 } } impl MacroExport { pub fn parse_option(input: ParseStream) -> syn::Result> { let span = if let Some(vis) = input.parse::>()? { return Err(vis.error( "You must now write `define_derive_deftly! { export Template: ... }`, not `puib Template:`, since derive-deftly version 0.14.0" )); } else if let Some(export) = (|| { use syn::parse::discouraged::Speculative; input.peek(syn::Ident).then(|| ())?; let forked = input.fork(); let ident: syn::Ident = forked.parse().expect("it *was*"); (ident == "export").then(|| ())?; input.advance_to(&forked); Some(ident) })() { Some(export.span()) } else { None }; Ok(span.map(MacroExport)) } } //---------- Grouping ---------- /// Whether an expansion should be surrounded by a `None`-delimited `Group` /// /// `Ord` is valid for composition with `cmp::max` #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq)] pub enum Grouping { Ungrouped, Invisible, Parens, } impl Grouping { pub fn surround(&self, ts: impl ToTokens) -> TokenStream { let ts = ts.to_token_stream(); match self { Grouping::Ungrouped => ts, Grouping::Invisible => { proc_macro2::Group::new(Delimiter::None, ts).to_token_stream() } Grouping::Parens => { proc_macro2::Group::new(Delimiter::Parenthesis, ts) .to_token_stream() } } } } //---------- IdentAny ---------- /// Like `syn::Ident` but parses using `parse_any`, accepting keywords /// /// Used for derive-deftly's own keywords, which can be Rust keywords, /// or identifiers. /// /// Needs care when used with user data, since it might be a keyword, /// in which case it's not really an *identifier*. pub struct IdentAny(pub syn::Ident); impl Parse for IdentAny { fn parse(input: ParseStream) -> syn::Result { Ok(IdentAny(Ident::parse_any(input)?)) } } impl Deref for IdentAny { type Target = syn::Ident; fn deref(&self) -> &syn::Ident { &self.0 } } impl ToTokens for IdentAny { fn to_tokens(&self, out: &mut TokenStream) { self.0.to_tokens(out) } } impl + ?Sized> PartialEq for IdentAny { fn eq(&self, rhs: &T) -> bool { self.0.eq(rhs) } } //---------- OutputTrimmer ---------- /// For making an output TokenStream, but eliding an unnecessary tail /// /// This construction will write, to an output [`TokenStream`], /// /// * `preamble` /// * zero or more optional `impl ToTokens`, called "reified" /// * interleaved with zero or more optional separators `sep` /// /// But it will avoid writing trailing unnecessary content: /// that is, trailing calls to `push_sep` are ignored, /// and if `push_reified` is never called, /// the preamble is also omitted. pub struct TokenOutputTrimmer<'t, 'o> { preamble: Option<&'t dyn ToTokens>, sep: &'t dyn ToTokens, sep_count: usize, out: &'o mut TokenStream, } impl<'t, 'o> TokenOutputTrimmer<'t, 'o> { pub fn new( out: &'o mut TokenStream, preamble: &'t dyn ToTokens, sep: &'t dyn ToTokens, ) -> Self { TokenOutputTrimmer { preamble: Some(preamble), sep, sep_count: 0, out, } } pub fn push_sep(&mut self) { self.sep_count += 1; } fn reify(&mut self) { if let Some(preamble) = self.preamble.take() { preamble.to_tokens(&mut self.out); } for _ in 0..mem::take(&mut self.sep_count) { self.sep.to_tokens(&mut self.out); } } pub fn push_reified(&mut self, t: impl ToTokens) { self.reify(); t.to_tokens(&mut self.out); } /// Did we output the preamble at all? pub fn did_preamble(self) -> Option<()> { if self.preamble.is_some() { None } else { Some(()) } } } //---------- TemplateName ---------- #[derive(Debug, Clone)] pub struct TemplateName(syn::Ident); impl TemplateName { pub fn macro_name(&self) -> syn::Ident { format_ident!("derive_deftly_template_{}", &self.0) } } impl Parse for TemplateName { fn parse(input: ParseStream) -> syn::Result { let ident: syn::Ident = input.parse()?; ident.try_into() } } impl TryFrom for TemplateName { type Error = syn::Error; fn try_from(ident: syn::Ident) -> syn::Result { let s = ident.to_string(); match s.chars().find(|&c| c != '_') { None => { Err("template name cannot consist entirely of underscores") } Some(c) => { if c.is_lowercase() { Err( "template name may not start with a lowercase letter (after any underscores)" ) } else { Ok(()) } } } .map_err(|emsg| ident.error(emsg))?; Ok(TemplateName(ident)) } } impl ToTokens for TemplateName { fn to_tokens(&self, out: &mut TokenStream) { self.0.to_tokens(out) } } impl Display for TemplateName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.0, f) } } impl quote::IdentFragment for TemplateName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { quote::IdentFragment::fmt(&self.0, f) } fn span(&self) -> Option { quote::IdentFragment::span(&self.0) } } //---------- engine_macro_name ---------- /// Return a full path to the location of `derive_deftly_engine`. /// /// (This may not work properly if the user /// imports the crate under a different name. /// This is a problem with the way cargo and rustc /// handle imports and proc-macro crates, /// which I think we can't properly solve here.) pub fn engine_macro_name() -> Result { let name = crate_name("derive-deftly-macros") .or_else(|_| crate_name("derive-deftly")); // See `tests/pub-export/pub-b/pub-b.rs`. (The bizarre version // has a different crate name, which we must handle heree.) #[cfg(feature = "bizarre")] let name = name.or_else(|_| crate_name("bizarre-derive-deftly")); match name { Ok(FoundCrate::Itself) => Ok(quote!( crate::derive_deftly_engine )), Ok(FoundCrate::Name(name)) => { let ident = Ident::new(&name, Span::call_site()); Ok(quote!( ::#ident::derive_deftly_engine )) } Err(e) => Err(Span::call_site().error( format_args!("Expected derive-deftly or derive-deftly-macro to be present in Cargo.toml: {}", e) )), } } //---------- general keyword enum parsing ---------- /// General-purpose keyword parser /// /// ```ignore /// keyword_general!{ /// KW_VAR FROM_ENUM ENUM; /// KEYWORD [ {BINDINGS} ] [ CONSTRUCTOR-ARGS ] } /// ``` /// Expands to: /// ```ignore /// if KW_VAR = ... { /// BINDINGS /// return FROM_ENUM(ENUM::CONSTRUCTOR CONSTRUCTOR-ARGS) /// } /// ``` /// /// `KEYWORD` can be `"KEYWORD_STRING": CONSTRUCTOR` /// /// `CONSTRUCTOR-ARGS`, if present, should be in the `( )` or `{ }` /// as required by the variant's CONSTRUCTOR. macro_rules! keyword_general { { $kw_var:ident $from_enum:ident $Enum:ident; $kw:ident $( $rest:tt )* } => { keyword_general!{ $kw_var $from_enum $Enum; @ 1 stringify!($kw), $kw, $($rest)* } }; { $kw_var:ident $from_enum:ident $Enum:ident; $kw:literal: $constr:ident $( $rest:tt )* } => { keyword_general!{ $kw_var $from_enum $Enum; @ 1 $kw, $constr, $($rest)* } }; { $kw_var:ident $from_enum:ident $Enum:ident; @ 1 $kw:expr, $constr:ident, $( $ca:tt )? } => { keyword_general!{ $kw_var $from_enum $Enum; @ 2 $kw, $constr, { } $( $ca )? } }; { $kw_var:ident $from_enum:ident $Enum:ident; @ 1 $kw:expr, $constr:ident, { $( $bindings:tt )* } $ca:tt } => { keyword_general!{ $kw_var $from_enum $Enum; @ 2 $kw, $constr, { $( $bindings )* } $ca } }; { $kw_var:ident $from_enum:ident $Enum:ident; @ 2 $kw:expr, $constr:ident, { $( $bindings:tt )* } $( $constr_args:tt )? } => { let _: &IdentAny = &$kw_var; if $kw_var == $kw { $( $bindings )* return $from_enum($Enum::$constr $( $constr_args )*); } }; { $($x:tt)* } => { compile_error!(stringify!($($x)*)) }; }