diff --git a/.clippy.toml b/.clippy.toml new file mode 100644 index 0000000000..0d369b50fa --- /dev/null +++ b/.clippy.toml @@ -0,0 +1 @@ +msrv = "1.56.0" diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..d14b9d4885 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +src/gen/** linguist-generated +syn.json linguist-generated +tests/debug/gen.rs linguist-generated diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000000..750707701c --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +github: dtolnay diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..789ef2c906 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,180 @@ +name: CI + +on: + push: + pull_request: + workflow_dispatch: + schedule: [cron: "40 1 * * *"] + +permissions: + contents: read + +env: + RUSTFLAGS: -Dwarnings + +jobs: + pre_ci: + uses: dtolnay/.github/.github/workflows/pre_ci.yml@master + + test: + name: Tests + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + with: + components: llvm-tools, rustc-dev + - run: cargo test --all-features --release --tests + + build: + name: ${{matrix.name || format('Rust {0}', matrix.rust)}} + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ${{matrix.os || 'ubuntu'}}-latest + strategy: + fail-fast: false + matrix: + rust: [stable, beta, 1.56.0] + include: + - rust: nightly + components: rustc-dev + - rust: nightly + name: WebAssembly + target: wasm32-unknown-unknown + - rust: nightly + name: WASI + target: wasm32-wasi + - rust: nightly + name: Windows + os: windows + env: + target: ${{matrix.target && format('--target={0}', matrix.target)}} + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{matrix.rust}} + targets: ${{matrix.target}} + components: ${{matrix.components}} + - run: cargo check ${{env.target}} --no-default-features + - run: cargo check ${{env.target}} + - run: cargo check ${{env.target}} --features full + - run: cargo check ${{env.target}} --features 'fold visit visit-mut' + - run: cargo check ${{env.target}} --features 'full fold visit visit-mut' + - run: cargo check ${{env.target}} --no-default-features --features derive + - run: cargo check ${{env.target}} --no-default-features --features 'derive parsing' + - run: cargo check ${{env.target}} --no-default-features --features 'derive printing' + - run: cargo check ${{env.target}} --no-default-features --features 'proc-macro parsing printing' + - run: cargo check ${{env.target}} --no-default-features --features full + - run: cargo check ${{env.target}} --no-default-features --features 'full parsing' + - run: cargo check ${{env.target}} --no-default-features --features 'full printing' + - run: cargo check ${{env.target}} --no-default-features --features 'full parsing printing' + - run: cargo check ${{env.target}} --no-default-features --features 'fold visit visit-mut parsing printing' + - run: cargo check ${{env.target}} --no-default-features --features 'full fold visit visit-mut parsing printing' + - if: matrix.components == 'rustc-dev' + run: cargo check --benches --all-features --release + + examples: + name: Examples + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + - run: cargo check --manifest-path examples/dump-syntax/Cargo.toml + - run: cargo check --manifest-path examples/heapsize/example/Cargo.toml + - run: cargo check --manifest-path examples/lazy-static/example/Cargo.toml + - run: cargo check --manifest-path examples/trace-var/example/Cargo.toml + + docs: + name: Docs + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + env: + RUSTDOCFLAGS: --cfg=doc_cfg -Dbroken_intra_doc_links + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + - run: cargo test --all-features --doc + - run: cargo doc --all-features + + codegen: + name: Codegen + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@stable + - run: cargo run --manifest-path codegen/Cargo.toml + - run: git diff --exit-code + + msrv: + name: Minimal versions + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + - run: cargo update -Z minimal-versions + - run: cargo check --all-features + + fuzz: + name: Fuzz + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + - uses: dtolnay/install@cargo-fuzz + - run: cargo fuzz check + + miri: + name: Miri + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@miri + - run: cargo miri test --all-features + env: + MIRIFLAGS: -Zmiri-strict-provenance + + clippy: + name: Clippy + runs-on: ubuntu-latest + if: github.event_name != 'pull_request' + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + with: + components: clippy,rustc-dev + - run: cargo clippy --all-features --tests --benches -- -Dclippy::all -Dclippy::pedantic + - run: cargo clippy --manifest-path codegen/Cargo.toml -- -Dclippy::all -Dclippy::pedantic + + outdated: + name: Outdated + runs-on: ubuntu-latest + if: github.event_name != 'pull_request' + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/install@cargo-outdated + - run: cargo outdated --workspace --exit-code 1 + - run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1 diff --git a/.gitignore b/.gitignore index aba0fb5d2d..cfaea94a7e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -target +target/ Cargo.lock -tests/rust/* -!tests/rust/clone.sh +/tests/rust/* +/tests/*.pending-snap diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 3ee37829cc..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,78 +0,0 @@ -sudo: false -language: rust - -rust: - - nightly - - stable - - beta - - 1.31.0 - -before_script: - - set -o errexit - -script: - - shopt -s expand_aliases - - alias build="cargo build ${TARGET+--target=$TARGET}" - - build --no-default-features - - build - - build --features full - - build --features 'fold visit visit-mut' - - build --features 'full fold visit visit-mut' - - build --no-default-features --features derive - - build --no-default-features --features 'derive parsing' - - build --no-default-features --features 'derive printing' - - build --no-default-features --features 'proc-macro parsing printing' - - build --no-default-features --features full - - build --no-default-features --features 'full parsing' - - build --no-default-features --features 'full printing' - - build --no-default-features --features 'full parsing printing' - - build --no-default-features --features 'fold visit visit-mut parsing printing' - - build --no-default-features --features 'full fold visit visit-mut parsing printing' - -matrix: - include: - - rust: nightly - name: Tests - script: - - cargo test --all-features --release - - rust: nightly - name: Examples - script: - - cargo check --manifest-path examples/dump-syntax/Cargo.toml - - cargo check --manifest-path examples/heapsize/example/Cargo.toml - - cargo check --manifest-path examples/lazy-static/example/Cargo.toml - - cargo check --manifest-path examples/trace-var/example/Cargo.toml - # Temporarily pinned until rustfmt is unbroken. - - rust: nightly-2019-07-28 - name: Codegen - script: - - (cd codegen && cargo run) - - git diff --exit-code - - rust: nightly - name: Minimal versions - script: - - cargo update -Z minimal-versions - - cargo build --all-features - - rust: nightly - name: Clippy - script: - - rustup component add clippy || travis_terminate 0 - - cargo clippy --all-features - - rust: nightly - name: WebAssembly - env: TARGET=wasm32-unknown-unknown - install: - - rustup target add "${TARGET}" - - rust: nightly - name: WASI - env: TARGET=wasm32-wasi - install: - - rustup target add "${TARGET}" - allow_failures: - - rust: nightly - name: Clippy - fast_finish: true - -env: - global: - - RUST_MIN_STACK=20000000 diff --git a/Cargo.toml b/Cargo.toml index 3d2d432546..706820ecbe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,16 +1,13 @@ [package] name = "syn" -version = "1.0.0" # don't forget to update html_root_url and syn.json +version = "2.0.3" # don't forget to update html_root_url and syn.json authors = ["David Tolnay "] -license = "MIT OR Apache-2.0" +categories = ["development-tools::procedural-macro-helpers", "parser-implementations"] description = "Parser for Rust source code" -repository = "https://github.com/dtolnay/syn" documentation = "https://docs.rs/syn" -categories = ["development-tools::procedural-macro-helpers"] -readme = "README.md" +edition = "2021" include = [ "/benches/**", - "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", @@ -18,10 +15,10 @@ include = [ "/src/**", "/tests/**", ] -edition = "2018" - -[lib] -name = "syn" +keywords = ["macros", "syn"] +license = "MIT OR Apache-2.0" +repository = "https://github.com/dtolnay/syn" +rust-version = "1.56" [features] default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"] @@ -35,39 +32,60 @@ fold = [] clone-impls = [] extra-traits = [] proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"] +test = ["syn-test-suite/all-features"] [dependencies] -proc-macro2 = { version = "1.0", default-features = false } -quote = { version = "1.0", optional = true, default-features = false } -unicode-xid = "0.2" +proc-macro2 = { version = "1.0.52", default-features = false } +quote = { version = "1.0.25", optional = true, default-features = false } +unicode-ident = "1" [dev-dependencies] -insta = "0.9" -rayon = "1.0" -ref-cast = "0.2" -regex = "1.0" -termcolor = "1.0" +anyhow = "1" +automod = "1" +flate2 = "1" +insta = "1" +rayon = "1" +ref-cast = "1" +regex = "1" +reqwest = { version = "0.11", features = ["blocking"] } +rustversion = "1" +syn-test-suite = { version = "0", path = "tests/features" } +tar = "0.4.16" +termcolor = "1" walkdir = "2.1" +[lib] +doc-scrape-examples = false + [[bench]] name = "rust" -edition = "2018" harness = false required-features = ["full", "parsing"] [[bench]] name = "file" -edition = "2018" required-features = ["full", "parsing"] [package.metadata.docs.rs] all-features = true +targets = ["x86_64-unknown-linux-gnu"] +rustdoc-args = ["--cfg", "doc_cfg"] [package.metadata.playground] -all-features = true - -[badges] -travis-ci = { repository = "dtolnay/syn" } +features = ["full", "visit", "visit-mut", "fold", "extra-traits"] [workspace] -members = ["dev", "json"] +members = [ + "dev", + "examples/dump-syntax", + "examples/heapsize/example", + "examples/heapsize/heapsize", + "examples/heapsize/heapsize_derive", + "examples/lazy-static/example", + "examples/lazy-static/lazy-static", + "examples/trace-var/example", + "examples/trace-var/trace-var", + "json", + "tests/crates", + "tests/features", +] diff --git a/LICENSE-APACHE b/LICENSE-APACHE index 16fe87b06e..1b5ec8b78e 100644 --- a/LICENSE-APACHE +++ b/LICENSE-APACHE @@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/README.md b/README.md index 4057eb3762..24aea17007 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ Parser for Rust source code =========================== -[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn) -[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn) -[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/0.15/syn/) -[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html) +[github](https://github.com/dtolnay/syn) +[crates.io](https://crates.io/crates/syn) +[docs.rs](https://docs.rs/syn) +[build status](https://github.com/dtolnay/syn/actions?query=branch%3Amaster) Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree of Rust source code. @@ -39,18 +39,14 @@ contains some APIs that may be useful more generally. procedural macros enable only what they need, and do not pay in compile time for all the rest. -[`syn::File`]: https://docs.rs/syn/0.15/syn/struct.File.html -[`syn::Item`]: https://docs.rs/syn/0.15/syn/enum.Item.html -[`syn::Expr`]: https://docs.rs/syn/0.15/syn/enum.Expr.html -[`syn::Type`]: https://docs.rs/syn/0.15/syn/enum.Type.html -[`syn::DeriveInput`]: https://docs.rs/syn/0.15/syn/struct.DeriveInput.html -[parser functions]: https://docs.rs/syn/0.15/syn/parse/index.html +[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html +[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html +[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html +[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html +[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html +[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html -If you get stuck with anything involving procedural macros in Rust I am happy to -provide help even if the issue is not related to Syn. Please file a ticket in -this repo. - -*Version requirement: Syn supports rustc 1.31 and up.* +*Version requirement: Syn supports rustc 1.56 and up.* [*Release notes*](https://github.com/dtolnay/syn/releases) @@ -80,16 +76,14 @@ tokens back to the compiler to compile into the user's crate. ```toml [dependencies] -syn = "0.15" -quote = "0.6" +syn = "2.0" +quote = "1.0" [lib] proc-macro = true ``` ```rust -extern crate proc_macro; - use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, DeriveInput}; @@ -110,9 +104,8 @@ pub fn my_macro(input: TokenStream) -> TokenStream { ``` The [`heapsize`] example directory shows a complete working implementation of a -derive macro. It works on any Rust compiler 1.31+. The example derives a -`HeapSize` trait which computes an estimate of the amount of heap memory owned -by a value. +derive macro. The example derives a `HeapSize` trait which computes an estimate +of the amount of heap memory owned by a value. [`heapsize`]: examples/heapsize @@ -156,7 +149,7 @@ By tracking span information all the way through the expansion of a procedural macro as shown in the `heapsize` example, token-based macros in Syn are able to trigger errors that directly pinpoint the source of the problem. -``` +```console error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied --> src/main.rs:7:5 | @@ -177,7 +170,7 @@ Syn's parsing API. The example reimplements the popular `lazy_static` crate from crates.io as a procedural macro. -``` +```rust lazy_static! { static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap(); } @@ -186,7 +179,7 @@ lazy_static! { The implementation shows how to trigger custom warnings and error messages on the macro input. -``` +```console warning: come on, pick a more creative name --> src/main.rs:10:16 | @@ -271,7 +264,7 @@ points, which are required by the language to use `proc_macro::TokenStream`. The proc-macro2 crate will automatically detect and use the compiler's data structures when a procedural macro is active. -[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/ +[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 020c8ac651..0000000000 --- a/appveyor.yml +++ /dev/null @@ -1,16 +0,0 @@ -environment: - matrix: - - TARGET: x86_64-pc-windows-msvc - -install: - - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe" - - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" - - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin - - SET PATH=%PATH%;C:\MinGW\bin - - rustc -V - - cargo -V - -build: false - -test_script: - - cargo build --all-features diff --git a/benches/file.rs b/benches/file.rs index 08ecd90960..b424723966 100644 --- a/benches/file.rs +++ b/benches/file.rs @@ -1,23 +1,57 @@ -// $ cargo bench --features full --bench file +// $ cargo bench --features full,test --bench file #![feature(rustc_private, test)] +#![recursion_limit = "1024"] +#![allow( + clippy::items_after_statements, + clippy::manual_let_else, + clippy::match_like_matches_macro, + clippy::missing_panics_doc, + clippy::must_use_candidate, + clippy::uninlined_format_args +)] extern crate test; +#[macro_use] +#[path = "../tests/macros/mod.rs"] +mod macros; + +#[allow(dead_code)] #[path = "../tests/repo/mod.rs"] -pub mod repo; +mod repo; -use proc_macro2::TokenStream; +use proc_macro2::{Span, TokenStream}; use std::fs; use std::str::FromStr; +use syn::parse::{ParseStream, Parser}; use test::Bencher; -const FILE: &str = "tests/rust/src/libcore/str/mod.rs"; +const FILE: &str = "tests/rust/library/core/src/str/mod.rs"; -#[bench] -fn parse_file(b: &mut Bencher) { +fn get_tokens() -> TokenStream { repo::clone_rust(); let content = fs::read_to_string(FILE).unwrap(); - let tokens = TokenStream::from_str(&content).unwrap(); + TokenStream::from_str(&content).unwrap() +} + +#[bench] +fn baseline(b: &mut Bencher) { + let tokens = get_tokens(); + b.iter(|| drop(tokens.clone())); +} + +#[bench] +fn create_token_buffer(b: &mut Bencher) { + let tokens = get_tokens(); + fn immediate_fail(_input: ParseStream) -> syn::Result<()> { + Err(syn::Error::new(Span::call_site(), "")) + } + b.iter(|| immediate_fail.parse2(tokens.clone())); +} + +#[bench] +fn parse_file(b: &mut Bencher) { + let tokens = get_tokens(); b.iter(|| syn::parse2::(tokens.clone())); } diff --git a/benches/rust.rs b/benches/rust.rs index e3d9cd29ba..fa72cc94bf 100644 --- a/benches/rust.rs +++ b/benches/rust.rs @@ -1,9 +1,22 @@ -// $ cargo bench --features full --bench rust +// $ cargo bench --features full,test --bench rust // // Syn only, useful for profiling: -// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust +// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test --bench rust #![cfg_attr(not(syn_only), feature(rustc_private))] +#![recursion_limit = "1024"] +#![allow( + clippy::cast_lossless, + clippy::let_underscore_untyped, + clippy::manual_let_else, + clippy::match_like_matches_macro, + clippy::uninlined_format_args, + clippy::unnecessary_wraps +)] + +#[macro_use] +#[path = "../tests/macros/mod.rs"] +mod macros; #[path = "../tests/repo/mod.rs"] mod repo; @@ -28,31 +41,47 @@ mod syn_parse { } #[cfg(not(syn_only))] -mod libsyntax_parse { +mod librustc_parse { extern crate rustc_data_structures; - extern crate syntax; - extern crate syntax_pos; + extern crate rustc_driver; + extern crate rustc_error_messages; + extern crate rustc_errors; + extern crate rustc_parse; + extern crate rustc_session; + extern crate rustc_span; use rustc_data_structures::sync::Lrc; - use syntax::edition::Edition; - use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler}; - use syntax::parse::ParseSess; - use syntax::source_map::{FilePathMapping, SourceMap}; - use syntax_pos::FileName; + use rustc_error_messages::FluentBundle; + use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler}; + use rustc_session::parse::ParseSess; + use rustc_span::source_map::{FilePathMapping, SourceMap}; + use rustc_span::{edition::Edition, FileName}; pub fn bench(content: &str) -> Result<(), ()> { struct SilentEmitter; impl Emitter for SilentEmitter { - fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {} + fn emit_diagnostic(&mut self, _diag: &Diagnostic) {} + fn source_map(&self) -> Option<&Lrc> { + None + } + } + + impl Translate for SilentEmitter { + fn fluent_bundle(&self) -> Option<&Lrc> { + None + } + fn fallback_fluent_bundle(&self) -> &FluentBundle { + panic!("silent emitter attempted to translate a diagnostic"); + } } - syntax::with_globals(Edition::Edition2018, || { + rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let emitter = Box::new(SilentEmitter); let handler = Handler::with_emitter(false, None, emitter); let sess = ParseSess::with_span_handler(handler, cm); - if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str( + if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str( FileName::Custom("bench".to_owned()), content.to_owned(), &sess, @@ -104,11 +133,11 @@ fn main() { repo::clone_rust(); macro_rules! testcases { - ($($(#[$cfg:meta])* $name:path,)*) => { - vec![ + ($($(#[$cfg:meta])* $name:ident,)*) => { + [ $( $(#[$cfg])* - (stringify!($name), $name as fn(&str) -> Result<(), ()>), + (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>), )* ] }; @@ -128,12 +157,12 @@ fn main() { for (name, f) in testcases!( #[cfg(not(syn_only))] - read_from_disk::bench, + read_from_disk, #[cfg(not(syn_only))] - tokenstream_parse::bench, - syn_parse::bench, + tokenstream_parse, + syn_parse, #[cfg(not(syn_only))] - libsyntax_parse::bench, + librustc_parse, ) { eprint!("{:20}", format!("{}:", name)); let elapsed = exec(f); diff --git a/build.rs b/build.rs index c0f9ed3406..c550dddb28 100644 --- a/build.rs +++ b/build.rs @@ -1,19 +1,23 @@ +#![allow(clippy::let_underscore_untyped, clippy::manual_let_else)] + use std::env; use std::process::Command; -use std::str::{self, FromStr}; +use std::str; // The rustc-cfg strings below are *not* public API. Please let us know by // opening a GitHub issue if your build environment requires some way to enable // these cfgs other than by executing our build script. fn main() { + println!("cargo:rerun-if-changed=build.rs"); + let compiler = match rustc_version() { Some(compiler) => compiler, None => return, }; - if compiler.minor < 36 { - println!("cargo:rustc-cfg=syn_omit_await_from_token_macro"); - } + // Note: add "/build.rs" to package.include in Cargo.toml if adding any + // conditional compilation within the library. + let _ = compiler.minor; if !compiler.nightly { println!("cargo:rustc-cfg=syn_disable_nightly_tests"); @@ -26,38 +30,14 @@ struct Compiler { } fn rustc_version() -> Option { - let rustc = match env::var_os("RUSTC") { - Some(rustc) => rustc, - None => return None, - }; - - let output = match Command::new(rustc).arg("--version").output() { - Ok(output) => output, - Err(_) => return None, - }; - - let version = match str::from_utf8(&output.stdout) { - Ok(version) => version, - Err(_) => return None, - }; - + let rustc = env::var_os("RUSTC")?; + let output = Command::new(rustc).arg("--version").output().ok()?; + let version = str::from_utf8(&output.stdout).ok()?; let mut pieces = version.split('.'); if pieces.next() != Some("rustc 1") { return None; } - - let next = match pieces.next() { - Some(next) => next, - None => return None, - }; - - let minor = match u32::from_str(next) { - Ok(minor) => minor, - Err(_) => return None, - }; - - Some(Compiler { - minor: minor, - nightly: version.contains("nightly"), - }) + let minor = pieces.next()?.parse().ok()?; + let nightly = version.contains("nightly") || version.ends_with("-dev"); + Some(Compiler { minor, nightly }) } diff --git a/codegen/Cargo.toml b/codegen/Cargo.toml index 051e8505e7..e2f892fd1c 100644 --- a/codegen/Cargo.toml +++ b/codegen/Cargo.toml @@ -2,25 +2,24 @@ name = "syn-internal-codegen" version = "0.0.0" authors = ["David Tolnay ", "Nika Layzell "] -edition = "2018" +edition = "2021" publish = false # this is an internal crate which should never be published [dependencies] -color-backtrace = "0.2" -failure = "0.1" -indexmap = { version = "1.0", features = ["serde-1"] } +anyhow = "1" +color-backtrace = "0.4" +indexmap = { version = "1", features = ["serde-1"] } inflections = "1.1" -proc-macro2 = "1.0" -quote = "1.0" -rustfmt = { package = "rustfmt-nightly", git = "https://github.com/rust-lang-nursery/rustfmt" } -semver = { version = "0.9", features = ["serde"] } +prettyplease = "0.1" +proc-macro2 = { version = "1.0.20", features = ["span-locations"] } +quote = "1" +semver = { version = "1", features = ["serde"] } serde = { version = "1.0.88", features = ["derive"] } serde_json = "1.0.38" syn-codegen = { path = "../json" } -syn = { path = "..", features = ["full", "extra-traits"] } -toml = "0.4.10" +syn = { version = "1", features = ["derive", "parsing", "printing", "full"], default-features = false } +thiserror = "1" +toml = "0.5" [workspace] -# Prefer that `cargo clean` in syn's directory does not require a rebuild of -# rustfmt in the codegen directory. diff --git a/codegen/src/cfg.rs b/codegen/src/cfg.rs new file mode 100644 index 0000000000..5932860f59 --- /dev/null +++ b/codegen/src/cfg.rs @@ -0,0 +1,12 @@ +use proc_macro2::TokenStream; +use quote::quote; +use syn_codegen::Features; + +pub fn features(features: &Features) -> TokenStream { + let features = &features.any; + match features.len() { + 0 => quote!(), + 1 => quote!(#[cfg(feature = #(#features)*)]), + _ => quote!(#[cfg(any(#(feature = #features),*))]), + } +} diff --git a/codegen/src/clone.rs b/codegen/src/clone.rs new file mode 100644 index 0000000000..117732bbb4 --- /dev/null +++ b/codegen/src/clone.rs @@ -0,0 +1,132 @@ +use crate::{cfg, file, lookup}; +use anyhow::Result; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{format_ident, quote}; +use syn_codegen::{Data, Definitions, Node, Type}; + +const CLONE_SRC: &str = "src/gen/clone.rs"; + +fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream { + let type_name = &node.ident; + let ident = Ident::new(type_name, Span::call_site()); + + match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(match *self {}), + Data::Enum(variants) => { + let arms = variants.iter().map(|(variant_name, fields)| { + let variant = Ident::new(variant_name, Span::call_site()); + if fields.is_empty() { + quote! { + #ident::#variant => #ident::#variant, + } + } else { + let mut pats = Vec::new(); + let mut clones = Vec::new(); + for i in 0..fields.len() { + let pat = format_ident!("v{}", i); + clones.push(quote!(#pat.clone())); + pats.push(pat); + } + let mut cfg = None; + if node.ident == "Expr" { + if let Type::Syn(ty) = &fields[0] { + if !lookup::node(defs, ty).features.any.contains("derive") { + cfg = Some(quote!(#[cfg(feature = "full")])); + } + } + } + quote! { + #cfg + #ident::#variant(#(#pats),*) => #ident::#variant(#(#clones),*), + } + } + }); + let nonexhaustive = if node.ident == "Expr" { + Some(quote! { + #[cfg(not(feature = "full"))] + _ => unreachable!(), + }) + } else { + None + }; + quote! { + match self { + #(#arms)* + #nonexhaustive + } + } + } + Data::Struct(fields) => { + let fields = fields.keys().map(|f| { + let ident = Ident::new(f, Span::call_site()); + quote! { + #ident: self.#ident.clone(), + } + }); + quote!(#ident { #(#fields)* }) + } + Data::Private => unreachable!(), + } +} + +fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream { + let manual_clone = node.data == Data::Private || node.ident == "Lifetime"; + if manual_clone { + return TokenStream::new(); + } + + let ident = Ident::new(&node.ident, Span::call_site()); + let cfg_features = cfg::features(&node.features); + + let copy = node.ident == "AttrStyle" + || node.ident == "BinOp" + || node.ident == "RangeLimits" + || node.ident == "TraitBoundModifier" + || node.ident == "UnOp"; + if copy { + return quote! { + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Copy for #ident {} + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Clone for #ident { + fn clone(&self) -> Self { + *self + } + } + }; + } + + let body = expand_impl_body(defs, node); + + quote! { + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Clone for #ident { + fn clone(&self) -> Self { + #body + } + } + } +} + +pub fn generate(defs: &Definitions) -> Result<()> { + let mut impls = TokenStream::new(); + for node in &defs.types { + impls.extend(expand_impl(defs, node)); + } + + file::write( + CLONE_SRC, + quote! { + #![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)] + + use crate::*; + + #impls + }, + )?; + + Ok(()) +} diff --git a/codegen/src/debug.rs b/codegen/src/debug.rs index 5eb6a9ce92..005d4a05b0 100644 --- a/codegen/src/debug.rs +++ b/codegen/src/debug.rs @@ -1,287 +1,164 @@ -use crate::error::Result; -use crate::file; +use crate::{cfg, file, lookup}; +use anyhow::Result; use proc_macro2::{Ident, Span, TokenStream}; -use quote::quote; -use syn::Index; +use quote::{format_ident, quote}; +use std::collections::BTreeSet as Set; use syn_codegen::{Data, Definitions, Node, Type}; -const DEBUG_SRC: &str = "../tests/debug/gen.rs"; +const DEBUG_SRC: &str = "src/gen/debug.rs"; -fn rust_type(ty: &Type) -> TokenStream { - match ty { - Type::Syn(ty) => { - let ident = Ident::new(ty, Span::call_site()); - quote!(syn::#ident) - } - Type::Std(ty) => { - let ident = Ident::new(ty, Span::call_site()); - quote!(#ident) - } - Type::Ext(ty) => { - let ident = Ident::new(ty, Span::call_site()); - quote!(proc_macro2::#ident) - } - Type::Token(ty) | Type::Group(ty) => { - let ident = Ident::new(ty, Span::call_site()); - quote!(syn::token::#ident) - } - Type::Punctuated(ty) => { - let element = rust_type(&ty.element); - let punct = Ident::new(&ty.punct, Span::call_site()); - quote!(syn::punctuated::Punctuated<#element, #punct>) - } - Type::Option(ty) => { - let inner = rust_type(ty); - quote!(Option<#inner>) - } - Type::Box(ty) => { - let inner = rust_type(ty); - quote!(Box<#inner>) - } - Type::Vec(ty) => { - let inner = rust_type(ty); - quote!(Vec<#inner>) - } - Type::Tuple(ty) => { - let inner = ty.iter().map(rust_type); - quote!((#(#inner,)*)) - } - } -} - -fn is_printable(ty: &Type) -> bool { - match ty { - Type::Ext(name) => name != "Span", - Type::Box(ty) => is_printable(ty), - Type::Tuple(ty) => ty.iter().any(is_printable), - Type::Token(_) | Type::Group(_) => false, - Type::Syn(name) => name != "Reserved", - Type::Std(_) | Type::Punctuated(_) | Type::Option(_) | Type::Vec(_) => true, - } -} - -fn format_field(val: &TokenStream, ty: &Type) -> Option { - if !is_printable(ty) { - return None; - } - let format = match ty { - Type::Option(ty) => { - let inner = quote!(_val); - let format = format_field(&inner, ty).map(|format| { - quote! { - formatter.write_str("(")?; - Debug::fmt(#format, formatter)?; - formatter.write_str(")")?; - } - }); - let ty = rust_type(ty); - quote!({ - #[derive(RefCast)] - #[repr(transparent)] - struct Print(Option<#ty>); - impl Debug for Print { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match &self.0 { - Some(#inner) => { - formatter.write_str("Some")?; - #format - Ok(()) - } - None => formatter.write_str("None"), - } - } - } - Print::ref_cast(#val) - }) - } - Type::Tuple(ty) => { - let printable: Vec = ty - .iter() - .enumerate() - .filter_map(|(i, ty)| { - let index = Index::from(i); - let val = quote!(&#val.#index); - format_field(&val, ty) - }) - .collect(); - if printable.len() == 1 { - printable.into_iter().next().unwrap() - } else { - quote! { - &(#(#printable),*) - } - } - } - _ => quote! { Lite(#val) }, - }; - Some(format) -} - -fn syntax_tree_enum<'a>(outer: &str, inner: &str, fields: &'a [Type]) -> Option<&'a str> { +fn syntax_tree_enum<'a>( + enum_name: &str, + variant_name: &str, + fields: &'a [Type], +) -> Option<&'a str> { if fields.len() != 1 { return None; } - const WHITELIST: &[&str] = &["PathArguments", "Visibility"]; + const WHITELIST: &[(&str, &str)] = &[ + ("Meta", "Path"), + ("Pat", "Const"), + ("Pat", "Lit"), + ("Pat", "Macro"), + ("Pat", "Path"), + ("Pat", "Range"), + ("PathArguments", "AngleBracketed"), + ("PathArguments", "Parenthesized"), + ("Stmt", "Local"), + ("TypeParamBound", "Lifetime"), + ("Visibility", "Public"), + ("Visibility", "Restricted"), + ]; match &fields[0] { - Type::Syn(ty) if WHITELIST.contains(&outer) || outer.to_owned() + inner == *ty => Some(ty), - _ => None, - } -} - -fn lookup<'a>(defs: &'a Definitions, name: &str) -> &'a Node { - for node in &defs.types { - if node.ident == name { - return node; + Type::Syn(ty) + if WHITELIST.contains(&(enum_name, variant_name)) + || enum_name.to_owned() + variant_name == *ty => + { + Some(ty) } + _ => None, } - panic!("not found: {}", name) } -fn expand_impl_body(defs: &Definitions, node: &Node, name: &str) -> TokenStream { - let ident = Ident::new(&node.ident, Span::call_site()); +fn expand_impl_body( + defs: &Definitions, + node: &Node, + syntax_tree_variants: &Set<&str>, +) -> TokenStream { + let type_name = &node.ident; + let ident = Ident::new(type_name, Span::call_site()); + let is_syntax_tree_variant = syntax_tree_variants.contains(type_name.as_str()); - match &node.data { + let body = match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(match *self {}), Data::Enum(variants) => { - let arms = variants.iter().map(|(v, fields)| { - let variant = Ident::new(v, Span::call_site()); + assert!(!is_syntax_tree_variant); + let arms = variants.iter().map(|(variant_name, fields)| { + let variant = Ident::new(variant_name, Span::call_site()); if fields.is_empty() { quote! { - syn::#ident::#variant => formatter.write_str(#v), + #ident::#variant => formatter.write_str(#variant_name), } - } else if let Some(inner) = syntax_tree_enum(name, v, fields) { - let path = format!("{}::{}", name, v); - let format = expand_impl_body(defs, lookup(defs, inner), &path); - quote! { - syn::#ident::#variant(_val) => { - #format + } else { + let mut cfg = None; + if node.ident == "Expr" { + if let Type::Syn(ty) = &fields[0] { + if !lookup::node(defs, ty).features.any.contains("derive") { + cfg = Some(quote!(#[cfg(feature = "full")])); + } } } - } else if fields.len() == 1 { - let ty = &fields[0]; - let val = quote!(_val); - let format = format_field(&val, ty).map(|format| { + if syntax_tree_enum(type_name, variant_name, fields).is_some() { quote! { - formatter.write_str("(")?; - Debug::fmt(#format, formatter)?; - formatter.write_str(")")?; + #cfg + #ident::#variant(v0) => v0.debug(formatter, #variant_name), } - }); - quote! { - syn::#ident::#variant(_val) => { - formatter.write_str(#v)?; - #format - Ok(()) - } - } - } else { - let pats = (0..fields.len()) - .map(|i| Ident::new(&format!("_v{}", i), Span::call_site())); - let fields = fields.iter().enumerate().filter_map(|(i, ty)| { - let index = Ident::new(&format!("_v{}", i), Span::call_site()); - let val = quote!(#index); - let format = format_field(&val, ty)?; - Some(quote! { - formatter.field(#format); - }) - }); - quote! { - syn::#ident::#variant(#(#pats),*) => { - let mut formatter = formatter.debug_tuple(#v); - #(#fields)* - formatter.finish() + } else { + let pats = (0..fields.len()) + .map(|i| format_ident!("v{}", i)) + .collect::>(); + quote! { + #cfg + #ident::#variant(#(#pats),*) => { + let mut formatter = formatter.debug_tuple(#variant_name); + #(formatter.field(#pats);)* + formatter.finish() + } } } } }); - let nonexhaustive = if node.exhaustive { - None + let nonexhaustive = if node.ident == "Expr" { + Some(quote! { + #[cfg(not(feature = "full"))] + _ => unreachable!(), + }) } else { - Some(quote!(_ => unreachable!())) + None }; + let prefix = format!("{}::", type_name); quote! { - match _val { + formatter.write_str(#prefix)?; + match self { #(#arms)* #nonexhaustive } } } Data::Struct(fields) => { - let fields = fields.iter().filter_map(|(f, ty)| { + let type_name = if is_syntax_tree_variant { + quote!(name) + } else { + quote!(#type_name) + }; + let fields = fields.keys().map(|f| { let ident = Ident::new(f, Span::call_site()); - if let Type::Option(ty) = ty { - let inner = quote!(_val); - let format = format_field(&inner, ty).map(|format| { - quote! { - let #inner = &self.0; - formatter.write_str("(")?; - Debug::fmt(#format, formatter)?; - formatter.write_str(")")?; - } - }); - let ty = rust_type(ty); - Some(quote! { - if let Some(val) = &_val.#ident { - #[derive(RefCast)] - #[repr(transparent)] - struct Print(#ty); - impl Debug for Print { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("Some")?; - #format - Ok(()) - } - } - formatter.field(#f, Print::ref_cast(val)); - } - }) - } else { - let val = quote!(&_val.#ident); - let format = format_field(&val, ty)?; - let mut call = quote! { - formatter.field(#f, #format); - }; - if let Type::Vec(_) | Type::Punctuated(_) = ty { - call = quote! { - if !_val.#ident.is_empty() { - #call - } - }; - } - Some(call) + quote! { + formatter.field(#f, &self.#ident); } }); quote! { - let mut formatter = formatter.debug_struct(#name); + let mut formatter = formatter.debug_struct(#type_name); #(#fields)* formatter.finish() } } - Data::Private => { - if node.ident == "LitInt" || node.ident == "LitFloat" { - quote! { - write!(formatter, "{}", _val) - } - } else { - quote! { - write!(formatter, "{:?}", _val.value()) + Data::Private => unreachable!(), + }; + + if is_syntax_tree_variant { + quote! { + impl #ident { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + #body } } + self.debug(formatter, #type_name) } + } else { + body } } -fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream { - if node.ident == "Reserved" { +fn expand_impl(defs: &Definitions, node: &Node, syntax_tree_variants: &Set<&str>) -> TokenStream { + let manual_debug = node.data == Data::Private || node.ident == "LitBool"; + if manual_debug { return TokenStream::new(); } let ident = Ident::new(&node.ident, Span::call_site()); - let body = expand_impl_body(defs, node, &node.ident); + let cfg_features = cfg::features(&node.features); + let body = expand_impl_body(defs, node, syntax_tree_variants); + let formatter = match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(_formatter), + _ => quote!(formatter), + }; quote! { - impl Debug for Lite { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - let _val = &self.value; + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for #ident { + fn fmt(&self, #formatter: &mut fmt::Formatter) -> fmt::Result { #body } } @@ -289,15 +166,27 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream { } pub fn generate(defs: &Definitions) -> Result<()> { + let mut syntax_tree_variants = Set::new(); + for node in &defs.types { + if let Data::Enum(variants) = &node.data { + let enum_name = &node.ident; + for (variant_name, fields) in variants { + if let Some(inner) = syntax_tree_enum(enum_name, variant_name, fields) { + syntax_tree_variants.insert(inner); + } + } + } + } + let mut impls = TokenStream::new(); for node in &defs.types { - impls.extend(expand_impl(&defs, node)); + impls.extend(expand_impl(defs, node, &syntax_tree_variants)); } file::write( DEBUG_SRC, quote! { - use super::{Lite, RefCast}; + use crate::*; use std::fmt::{self, Debug}; #impls diff --git a/codegen/src/eq.rs b/codegen/src/eq.rs new file mode 100644 index 0000000000..4b7b79e33c --- /dev/null +++ b/codegen/src/eq.rs @@ -0,0 +1,168 @@ +use crate::{cfg, file, lookup}; +use anyhow::Result; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{format_ident, quote}; +use syn_codegen::{Data, Definitions, Node, Type}; + +const EQ_SRC: &str = "src/gen/eq.rs"; + +fn always_eq(field_type: &Type) -> bool { + match field_type { + Type::Ext(ty) => ty == "Span", + Type::Token(_) | Type::Group(_) => true, + Type::Box(inner) => always_eq(inner), + Type::Tuple(inner) => inner.iter().all(always_eq), + _ => false, + } +} + +fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream { + let type_name = &node.ident; + let ident = Ident::new(type_name, Span::call_site()); + + match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(match *self {}), + Data::Enum(variants) => { + let arms = variants.iter().map(|(variant_name, fields)| { + let variant = Ident::new(variant_name, Span::call_site()); + if fields.is_empty() { + quote! { + (#ident::#variant, #ident::#variant) => true, + } + } else { + let mut this_pats = Vec::new(); + let mut other_pats = Vec::new(); + let mut comparisons = Vec::new(); + for (i, field) in fields.iter().enumerate() { + if always_eq(field) { + this_pats.push(format_ident!("_")); + other_pats.push(format_ident!("_")); + continue; + } + let this = format_ident!("self{}", i); + let other = format_ident!("other{}", i); + comparisons.push(match field { + Type::Ext(ty) if ty == "TokenStream" => { + quote!(TokenStreamHelper(#this) == TokenStreamHelper(#other)) + } + Type::Ext(ty) if ty == "Literal" => { + quote!(#this.to_string() == #other.to_string()) + } + _ => quote!(#this == #other), + }); + this_pats.push(this); + other_pats.push(other); + } + if comparisons.is_empty() { + comparisons.push(quote!(true)); + } + let mut cfg = None; + if node.ident == "Expr" { + if let Type::Syn(ty) = &fields[0] { + if !lookup::node(defs, ty).features.any.contains("derive") { + cfg = Some(quote!(#[cfg(feature = "full")])); + } + } + } + quote! { + #cfg + (#ident::#variant(#(#this_pats),*), #ident::#variant(#(#other_pats),*)) => { + #(#comparisons)&&* + } + } + } + }); + let fallthrough = if variants.len() == 1 { + None + } else { + Some(quote!(_ => false,)) + }; + quote! { + match (self, other) { + #(#arms)* + #fallthrough + } + } + } + Data::Struct(fields) => { + let mut comparisons = Vec::new(); + for (f, ty) in fields { + if always_eq(ty) { + continue; + } + let ident = Ident::new(f, Span::call_site()); + comparisons.push(match ty { + Type::Ext(ty) if ty == "TokenStream" => { + quote!(TokenStreamHelper(&self.#ident) == TokenStreamHelper(&other.#ident)) + } + _ => quote!(self.#ident == other.#ident), + }); + } + if comparisons.is_empty() { + quote!(true) + } else { + quote!(#(#comparisons)&&*) + } + } + Data::Private => unreachable!(), + } +} + +fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream { + if node.ident == "Member" || node.ident == "Index" || node.ident == "Lifetime" { + return TokenStream::new(); + } + + let ident = Ident::new(&node.ident, Span::call_site()); + let cfg_features = cfg::features(&node.features); + + let eq = quote! { + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Eq for #ident {} + }; + + let manual_partial_eq = node.data == Data::Private; + if manual_partial_eq { + return eq; + } + + let body = expand_impl_body(defs, node); + let other = match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(_other), + Data::Struct(fields) if fields.values().all(always_eq) => quote!(_other), + _ => quote!(other), + }; + + quote! { + #eq + + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl PartialEq for #ident { + fn eq(&self, #other: &Self) -> bool { + #body + } + } + } +} + +pub fn generate(defs: &Definitions) -> Result<()> { + let mut impls = TokenStream::new(); + for node in &defs.types { + impls.extend(expand_impl(defs, node)); + } + + file::write( + EQ_SRC, + quote! { + #[cfg(any(feature = "derive", feature = "full"))] + use crate::tt::TokenStreamHelper; + use crate::*; + + #impls + }, + )?; + + Ok(()) +} diff --git a/codegen/src/error.rs b/codegen/src/error.rs deleted file mode 100644 index 0b3f191df4..0000000000 --- a/codegen/src/error.rs +++ /dev/null @@ -1,59 +0,0 @@ -use std::fmt::{self, Display}; -use std::io; - -pub type Result = std::result::Result; - -#[derive(Debug)] -pub enum Error { - Io(io::Error), - Json(serde_json::Error), - Rustfmt(rustfmt::ErrorKind), - Syn(syn::Error), - Toml(toml::de::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use self::Error::*; - - match self { - Io(e) => write!(f, "{}", e), - Json(e) => write!(f, "{}", e), - Rustfmt(e) => write!(f, "{}", e), - Syn(e) => write!(f, "{}", e), - Toml(e) => write!(f, "{}", e), - } - } -} - -impl std::error::Error for Error {} - -impl From for Error { - fn from(e: io::Error) -> Self { - Error::Io(e) - } -} - -impl From for Error { - fn from(e: rustfmt::ErrorKind) -> Self { - Error::Rustfmt(e) - } -} - -impl From for Error { - fn from(e: serde_json::Error) -> Self { - Error::Json(e) - } -} - -impl From for Error { - fn from(e: syn::Error) -> Self { - Error::Syn(e) - } -} - -impl From for Error { - fn from(e: toml::de::Error) -> Self { - Error::Toml(e) - } -} diff --git a/codegen/src/file.rs b/codegen/src/file.rs index 807f675277..beeb66017d 100644 --- a/codegen/src/file.rs +++ b/codegen/src/file.rs @@ -1,10 +1,11 @@ -use crate::error::Result; +use crate::workspace_path; +use anyhow::Result; use proc_macro2::TokenStream; use std::fs; use std::io::Write; use std::path::Path; -pub fn write>(path: P, content: TokenStream) -> Result<()> { +pub fn write(relative_to_workspace_root: impl AsRef, content: TokenStream) -> Result<()> { let mut formatted = Vec::new(); writeln!( formatted, @@ -13,17 +14,12 @@ pub fn write>(path: P, content: TokenStream) -> Result<()> { writeln!(formatted, "// It is not intended for manual editing.")?; writeln!(formatted)?; - let mut config = rustfmt::Config::default(); - config.set().emit_mode(rustfmt::EmitMode::Stdout); - config.set().verbose(rustfmt::Verbosity::Quiet); - config.set().format_macro_matchers(true); - config.set().normalize_doc_attributes(true); + let syntax_tree: syn::File = syn::parse2(content).unwrap(); + let pretty = prettyplease::unparse(&syntax_tree); + write!(formatted, "{}", pretty)?; - let mut session = rustfmt::Session::new(config, Some(&mut formatted)); - session.format(rustfmt::Input::Text(content.to_string()))?; - drop(session); - - if path.as_ref().is_file() && fs::read(&path)? == formatted { + let path = workspace_path::get(relative_to_workspace_root); + if path.is_file() && fs::read(&path)? == formatted { return Ok(()); } diff --git a/codegen/src/fold.rs b/codegen/src/fold.rs index c2db58f9d7..ec6ae30920 100644 --- a/codegen/src/fold.rs +++ b/codegen/src/fold.rs @@ -1,15 +1,15 @@ -use crate::error::Result; use crate::{file, full, gen}; +use anyhow::Result; use proc_macro2::{Ident, Span, TokenStream}; -use quote::quote; +use quote::{format_ident, quote}; use syn::Index; use syn_codegen::{Data, Definitions, Features, Node, Type}; -const FOLD_SRC: &str = "../src/gen/fold.rs"; +const FOLD_SRC: &str = "src/gen/fold.rs"; fn simple_visit(item: &str, name: &TokenStream) -> TokenStream { let ident = gen::under_name(item); - let method = Ident::new(&format!("fold_{}", ident), Span::call_site()); + let method = format_ident!("fold_{}", ident); quote! { f.#method(#name) } @@ -32,21 +32,21 @@ fn visit( let operand = quote!(it); let val = visit(t, features, defs, &operand)?; Some(quote! { - FoldHelper::lift(#name, |it| { #val }) + FoldHelper::lift(#name, |it| #val) }) } Type::Punctuated(p) => { let operand = quote!(it); let val = visit(&p.element, features, defs, &operand)?; Some(quote! { - FoldHelper::lift(#name, |it| { #val }) + FoldHelper::lift(#name, |it| #val) }) } Type::Option(t) => { let it = quote!(it); let val = visit(t, features, defs, &it)?; Some(quote! { - (#name).map(|it| { #val }) + (#name).map(|it| #val) }) } Type::Tuple(t) => { @@ -62,29 +62,6 @@ fn visit( (#code) }) } - Type::Token(t) => { - let repr = &defs.tokens[t]; - let is_keyword = repr.chars().next().unwrap().is_alphabetic(); - let spans = if is_keyword { - quote!(span) - } else { - quote!(spans) - }; - let ty = if repr == "await" { - quote!(crate::token::Await) - } else { - syn::parse_str(&format!("Token![{}]", repr)).unwrap() - }; - Some(quote! { - #ty(tokens_helper(f, &#name.#spans)) - }) - } - Type::Group(t) => { - let ty = Ident::new(t, Span::call_site()); - Some(quote! { - #ty(tokens_helper(f, &#name.span)) - }) - } Type::Syn(t) => { fn requires_full(features: &Features) -> bool { features.any.contains("full") && features.any.len() == 1 @@ -97,14 +74,14 @@ fn visit( Some(res) } Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)), - Type::Ext(_) | Type::Std(_) => None, + Type::Ext(_) | Type::Std(_) | Type::Token(_) | Type::Group(_) => None, } } fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Definitions) { let under_name = gen::under_name(&s.ident); let ty = Ident::new(&s.ident, Span::call_site()); - let fold_fn = Ident::new(&format!("fold_{}", under_name), Span::call_site()); + let fold_fn = format_ident!("fold_{}", under_name); let mut fold_impl = TokenStream::new(); @@ -126,8 +103,7 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi let mut fold_fields = TokenStream::new(); for (idx, ty) in fields.iter().enumerate() { - let name = format!("_binding_{}", idx); - let binding = Ident::new(&name, Span::call_site()); + let binding = format_ident!("_binding_{}", idx); bind_fold_fields.extend(quote! { #binding, @@ -152,16 +128,9 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi } } - let nonexhaustive = if s.exhaustive { - None - } else { - Some(quote!(_ => unreachable!())) - }; - fold_impl.extend(quote! { match node { #fold_variants - #nonexhaustive } }); } @@ -169,32 +138,17 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi let mut fold_fields = TokenStream::new(); for (field, ty) in fields { - let id = Ident::new(&field, Span::call_site()); + let id = Ident::new(field, Span::call_site()); let ref_toks = quote!(node.#id); - if let Type::Syn(ty) = ty { - if ty == "Reserved" { - fold_fields.extend(quote! { - #id: #ref_toks, - }); - continue; - } - } - - let fold = visit(&ty, &s.features, defs, &ref_toks).unwrap_or(ref_toks); + let fold = visit(ty, &s.features, defs, &ref_toks).unwrap_or(ref_toks); fold_fields.extend(quote! { #id: #fold, }); } - if !fields.is_empty() { - fold_impl.extend(quote! { - #ty { - #fold_fields - } - }) - } else { + if fields.is_empty() { if ty == "Ident" { fold_impl.extend(quote! { let mut node = node; @@ -205,6 +159,12 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi fold_impl.extend(quote! { node }); + } else { + fold_impl.extend(quote! { + #ty { + #fold_fields + } + }); } } Data::Private => { @@ -256,13 +216,12 @@ pub fn generate(defs: &Definitions) -> Result<()> { quote! { // Unreachable code is generated sometimes without the full feature. #![allow(unreachable_code, unused_variables)] + #![allow(clippy::match_wildcard_for_single_variants, clippy::needless_match)] - use crate::*; - #[cfg(any(feature = "full", feature = "derive"))] - use crate::token::{Brace, Bracket, Paren, Group}; - use proc_macro2::Span; #[cfg(any(feature = "full", feature = "derive"))] use crate::gen::helper::fold::*; + use crate::*; + use proc_macro2::Span; #full_macro @@ -271,8 +230,6 @@ pub fn generate(defs: &Definitions) -> Result<()> { /// See the [module documentation] for details. /// /// [module documentation]: self - /// - /// *This trait is available if Syn is built with the `"fold"` feature.* pub trait Fold { #traits } diff --git a/codegen/src/gen.rs b/codegen/src/gen.rs index ef431829bf..54f6e75b90 100644 --- a/codegen/src/gen.rs +++ b/codegen/src/gen.rs @@ -1,6 +1,6 @@ +use crate::cfg; use inflections::Inflect; use proc_macro2::{Ident, Span, TokenStream}; -use quote::quote; use syn_codegen::{Data, Definitions, Features, Node}; pub const TERMINAL_TYPES: &[&str] = &["Span", "Ident"]; @@ -14,9 +14,9 @@ pub fn traverse( node: fn(&mut TokenStream, &mut TokenStream, &Node, &Definitions), ) -> (TokenStream, TokenStream) { let mut types = defs.types.clone(); - for terminal in TERMINAL_TYPES { + for &terminal in TERMINAL_TYPES { types.push(Node { - ident: terminal.to_string(), + ident: terminal.to_owned(), features: Features::default(), data: Data::Private, exhaustive: true, @@ -27,15 +27,7 @@ pub fn traverse( let mut traits = TokenStream::new(); let mut impls = TokenStream::new(); for s in types { - if s.ident == "Reserved" { - continue; - } - let features = &s.features.any; - let features = match features.len() { - 0 => quote!(), - 1 => quote!(#[cfg(feature = #(#features)*)]), - _ => quote!(#[cfg(any(#(feature = #features),*))]), - }; + let features = cfg::features(&s.features); traits.extend(features.clone()); impls.extend(features); node(&mut traits, &mut impls, &s, defs); diff --git a/codegen/src/hash.rs b/codegen/src/hash.rs new file mode 100644 index 0000000000..323db351ee --- /dev/null +++ b/codegen/src/hash.rs @@ -0,0 +1,169 @@ +use crate::{cfg, file, lookup}; +use anyhow::Result; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{format_ident, quote}; +use syn_codegen::{Data, Definitions, Node, Type}; + +const HASH_SRC: &str = "src/gen/hash.rs"; + +fn skip(field_type: &Type) -> bool { + match field_type { + Type::Ext(ty) => ty == "Span", + Type::Token(_) | Type::Group(_) => true, + Type::Box(inner) => skip(inner), + Type::Tuple(inner) => inner.iter().all(skip), + _ => false, + } +} + +fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream { + let type_name = &node.ident; + let ident = Ident::new(type_name, Span::call_site()); + + match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(match *self {}), + Data::Enum(variants) => { + let arms = variants + .iter() + .enumerate() + .map(|(i, (variant_name, fields))| { + let i = u8::try_from(i).unwrap(); + let variant = Ident::new(variant_name, Span::call_site()); + if fields.is_empty() { + quote! { + #ident::#variant => { + state.write_u8(#i); + } + } + } else { + let mut pats = Vec::new(); + let mut hashes = Vec::new(); + for (i, field) in fields.iter().enumerate() { + if skip(field) { + pats.push(format_ident!("_")); + continue; + } + let var = format_ident!("v{}", i); + let mut hashed_val = quote!(#var); + match field { + Type::Ext(ty) if ty == "TokenStream" => { + hashed_val = quote!(TokenStreamHelper(#hashed_val)); + } + Type::Ext(ty) if ty == "Literal" => { + hashed_val = quote!(#hashed_val.to_string()); + } + _ => {} + } + hashes.push(quote! { + #hashed_val.hash(state); + }); + pats.push(var); + } + let mut cfg = None; + if node.ident == "Expr" { + if let Type::Syn(ty) = &fields[0] { + if !lookup::node(defs, ty).features.any.contains("derive") { + cfg = Some(quote!(#[cfg(feature = "full")])); + } + } + } + quote! { + #cfg + #ident::#variant(#(#pats),*) => { + state.write_u8(#i); + #(#hashes)* + } + } + } + }); + let nonexhaustive = if node.ident == "Expr" { + Some(quote! { + #[cfg(not(feature = "full"))] + _ => unreachable!(), + }) + } else { + None + }; + quote! { + match self { + #(#arms)* + #nonexhaustive + } + } + } + Data::Struct(fields) => fields + .iter() + .filter_map(|(f, ty)| { + if skip(ty) { + return None; + } + let ident = Ident::new(f, Span::call_site()); + let mut val = quote!(self.#ident); + if let Type::Ext(ty) = ty { + if ty == "TokenStream" { + val = quote!(TokenStreamHelper(&#val)); + } + } + Some(quote! { + #val.hash(state); + }) + }) + .collect(), + Data::Private => unreachable!(), + } +} + +fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream { + let manual_hash = node.data == Data::Private + || node.ident == "Member" + || node.ident == "Index" + || node.ident == "Lifetime"; + if manual_hash { + return TokenStream::new(); + } + + let ident = Ident::new(&node.ident, Span::call_site()); + let cfg_features = cfg::features(&node.features); + + let body = expand_impl_body(defs, node); + + let hasher = match &node.data { + Data::Struct(_) if body.is_empty() => quote!(_state), + Data::Enum(variants) if variants.is_empty() => quote!(_state), + _ => quote!(state), + }; + + quote! { + #cfg_features + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Hash for #ident { + fn hash(&self, #hasher: &mut H) + where + H: Hasher, + { + #body + } + } + } +} + +pub fn generate(defs: &Definitions) -> Result<()> { + let mut impls = TokenStream::new(); + for node in &defs.types { + impls.extend(expand_impl(defs, node)); + } + + file::write( + HASH_SRC, + quote! { + #[cfg(any(feature = "derive", feature = "full"))] + use crate::tt::TokenStreamHelper; + use crate::*; + use std::hash::{Hash, Hasher}; + + #impls + }, + )?; + + Ok(()) +} diff --git a/codegen/src/json.rs b/codegen/src/json.rs index e202bbec39..28ceee8b56 100644 --- a/codegen/src/json.rs +++ b/codegen/src/json.rs @@ -1,6 +1,6 @@ -use crate::error::Result; +use crate::workspace_path; +use anyhow::Result; use std::fs; -use std::path::Path; use syn_codegen::Definitions; pub fn generate(defs: &Definitions) -> Result<()> { @@ -10,8 +10,7 @@ pub fn generate(defs: &Definitions) -> Result<()> { let check: Definitions = serde_json::from_str(&j)?; assert_eq!(*defs, check); - let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR")); - let json_path = codegen_root.join("../syn.json"); + let json_path = workspace_path::get("syn.json"); fs::write(json_path, j)?; Ok(()) diff --git a/codegen/src/lookup.rs b/codegen/src/lookup.rs new file mode 100644 index 0000000000..58ae526a99 --- /dev/null +++ b/codegen/src/lookup.rs @@ -0,0 +1,10 @@ +use syn_codegen::{Definitions, Node}; + +pub fn node<'a>(defs: &'a Definitions, name: &str) -> &'a Node { + for node in &defs.types { + if node.ident == name { + return node; + } + } + panic!("not found: {}", name) +} diff --git a/codegen/src/main.rs b/codegen/src/main.rs index dff7db2b28..8b8670c5e5 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -9,39 +9,45 @@ // Finally this crate generates the Visit, VisitMut, and Fold traits in Syn // programmatically from the syntax tree description. -#![recursion_limit = "128"] -#![allow(clippy::needless_pass_by_value)] +#![allow( + clippy::items_after_statements, + clippy::manual_let_else, + clippy::match_like_matches_macro, + clippy::similar_names, + clippy::too_many_lines, + clippy::uninlined_format_args +)] +mod cfg; +mod clone; mod debug; -mod error; +mod eq; mod file; mod fold; mod full; mod gen; +mod hash; mod json; +mod lookup; mod operand; mod parse; +mod snapshot; mod version; mod visit; mod visit_mut; +mod workspace_path; -use crate::error::Result; -use std::process; - -fn main() { +fn main() -> anyhow::Result<()> { color_backtrace::install(); - if let Err(err) = do_main() { - let _ = eprintln!("error: {}", err); - process::exit(1); - } -} - -fn do_main() -> Result<()> { let defs = parse::parse()?; + clone::generate(&defs)?; + debug::generate(&defs)?; + eq::generate(&defs)?; + hash::generate(&defs)?; json::generate(&defs)?; fold::generate(&defs)?; visit::generate(&defs)?; visit_mut::generate(&defs)?; - debug::generate(&defs)?; + snapshot::generate(&defs)?; Ok(()) } diff --git a/codegen/src/parse.rs b/codegen/src/parse.rs index a0f3a8d2b3..e5f6180239 100644 --- a/codegen/src/parse.rs +++ b/codegen/src/parse.rs @@ -1,42 +1,53 @@ -use crate::error::Result; -use crate::version; - +use crate::{version, workspace_path}; +use anyhow::{bail, Result}; use indexmap::IndexMap; use quote::quote; -use syn::parse::Parser; -use syn::{parse_quote, Data, DataStruct, DeriveInput, Ident, Item}; -use syn_codegen as types; - use std::collections::BTreeMap; -use std::fs::File; -use std::io::Read; -use std::path::Path; +use std::fs; +use std::path::{Path, PathBuf}; +use syn::parse::{Error, Parser}; +use syn::{ + parse_quote, Attribute, Data, DataEnum, DataStruct, DeriveInput, Fields, GenericArgument, + Ident, Item, PathArguments, TypeMacro, TypePath, TypeTuple, UseTree, Visibility, +}; +use syn_codegen as types; +use thiserror::Error; -const SYN_CRATE_ROOT: &str = "../src/lib.rs"; -const TOKEN_SRC: &str = "../src/token.rs"; +const SYN_CRATE_ROOT: &str = "src/lib.rs"; +const TOKEN_SRC: &str = "src/token.rs"; const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"]; const EXTRA_TYPES: &[&str] = &["Lifetime"]; -const NONEXHAUSTIVE: &str = "__Nonexhaustive"; -// NOTE: BTreeMap is used here instead of HashMap to have deterministic output. -type ItemLookup = BTreeMap; -type TokenLookup = BTreeMap; +struct Lookup { + items: BTreeMap, + // "+" => "Add" + tokens: BTreeMap, + // "PatLit" => "ExprLit" + aliases: BTreeMap, +} /// Parse the contents of `src` and return a list of AST types. pub fn parse() -> Result { - let mut item_lookup = BTreeMap::new(); - load_file(SYN_CRATE_ROOT, &[], &mut item_lookup)?; + let tokens = load_token_file(TOKEN_SRC)?; - let token_lookup = load_token_file(TOKEN_SRC)?; + let mut lookup = Lookup { + items: BTreeMap::new(), + tokens, + aliases: BTreeMap::new(), + }; + + load_file(SYN_CRATE_ROOT, &[], &mut lookup)?; let version = version::get()?; - let types = item_lookup + let types = lookup + .items .values() - .map(|item| introspect_item(item, &item_lookup, &token_lookup)) + .map(|item| introspect_item(item, &lookup)) .collect(); - let tokens = token_lookup + let tokens = lookup + .tokens .into_iter() .map(|(name, ty)| (ty, name)) .collect(); @@ -49,28 +60,28 @@ pub fn parse() -> Result { } /// Data extracted from syn source -#[derive(Clone)] pub struct AstItem { ast: DeriveInput, - features: Vec, + features: Vec, } -fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node { +fn introspect_item(item: &AstItem, lookup: &Lookup) -> types::Node { let features = introspect_features(&item.features); match &item.ast.data { - Data::Enum(ref data) => types::Node { + Data::Enum(data) => types::Node { ident: item.ast.ident.to_string(), features, - data: types::Data::Enum(introspect_enum(data, items, tokens)), - exhaustive: data.variants.iter().all(|v| v.ident != NONEXHAUSTIVE), + data: types::Data::Enum(introspect_enum(data, lookup)), + exhaustive: !(is_non_exhaustive(&item.ast.attrs) + || data.variants.iter().any(|v| is_doc_hidden(&v.attrs))), }, - Data::Struct(ref data) => types::Node { + Data::Struct(data) => types::Node { ident: item.ast.ident.to_string(), features, data: { if data.fields.iter().all(|f| is_pub(&f.vis)) { - types::Data::Struct(introspect_struct(data, items, tokens)) + types::Data::Struct(introspect_struct(data, lookup)) } else { types::Data::Private } @@ -81,69 +92,58 @@ fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> } } -fn introspect_enum( - item: &syn::DataEnum, - items: &ItemLookup, - tokens: &TokenLookup, -) -> types::Variants { +fn introspect_enum(item: &DataEnum, lookup: &Lookup) -> types::Variants { item.variants .iter() .filter_map(|variant| { - if variant.ident == NONEXHAUSTIVE { + if is_doc_hidden(&variant.attrs) { return None; } let fields = match &variant.fields { - syn::Fields::Unnamed(fields) => fields + Fields::Unnamed(fields) => fields .unnamed .iter() - .map(|field| introspect_type(&field.ty, items, tokens)) + .map(|field| introspect_type(&field.ty, lookup)) .collect(), - syn::Fields::Unit => vec![], - _ => panic!("Enum representation not supported"), + Fields::Unit => vec![], + Fields::Named(_) => panic!("Enum representation not supported"), }; Some((variant.ident.to_string(), fields)) }) .collect() } -fn introspect_struct( - item: &syn::DataStruct, - items: &ItemLookup, - tokens: &TokenLookup, -) -> types::Fields { +fn introspect_struct(item: &DataStruct, lookup: &Lookup) -> types::Fields { match &item.fields { - syn::Fields::Named(fields) => fields + Fields::Named(fields) => fields .named .iter() .map(|field| { ( field.ident.as_ref().unwrap().to_string(), - introspect_type(&field.ty, items, tokens), + introspect_type(&field.ty, lookup), ) }) .collect(), - syn::Fields::Unit => IndexMap::new(), - _ => panic!("Struct representation not supported"), + Fields::Unit => IndexMap::new(), + Fields::Unnamed(_) => panic!("Struct representation not supported"), } } -fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type { +fn introspect_type(item: &syn::Type, lookup: &Lookup) -> types::Type { match item { - syn::Type::Path(syn::TypePath { - qself: None, - ref path, - }) => { + syn::Type::Path(TypePath { qself: None, path }) => { let last = path.segments.last().unwrap(); let string = last.ident.to_string(); match string.as_str() { "Option" => { - let nested = introspect_type(first_arg(&last.arguments), items, tokens); + let nested = introspect_type(first_arg(&last.arguments), lookup); types::Type::Option(Box::new(nested)) } "Punctuated" => { - let nested = introspect_type(first_arg(&last.arguments), items, tokens); - let punct = match introspect_type(last_arg(&last.arguments), items, tokens) { + let nested = introspect_type(first_arg(&last.arguments), lookup); + let punct = match introspect_type(last_arg(&last.arguments), lookup) { types::Type::Token(s) => s, _ => panic!(), }; @@ -154,38 +154,38 @@ fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) - }) } "Vec" => { - let nested = introspect_type(first_arg(&last.arguments), items, tokens); + let nested = introspect_type(first_arg(&last.arguments), lookup); types::Type::Vec(Box::new(nested)) } "Box" => { - let nested = introspect_type(first_arg(&last.arguments), items, tokens); + let nested = introspect_type(first_arg(&last.arguments), lookup); types::Type::Box(Box::new(nested)) } "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string), "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string), "String" | "u32" | "usize" | "bool" => types::Type::Std(string), - "Await" => types::Type::Token("Await".to_string()), _ => { - if items.get(&last.ident).is_some() || last.ident == "Reserved" { - types::Type::Syn(string) + let mut resolved = &last.ident; + while let Some(alias) = lookup.aliases.get(resolved) { + resolved = alias; + } + if lookup.items.get(resolved).is_some() { + types::Type::Syn(resolved.to_string()) } else { - unimplemented!("{}", string); + unimplemented!("{}", resolved); } } } } - syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => { - let tys = elems - .iter() - .map(|ty| introspect_type(&ty, items, tokens)) - .collect(); + syn::Type::Tuple(TypeTuple { elems, .. }) => { + let tys = elems.iter().map(|ty| introspect_type(ty, lookup)).collect(); types::Type::Tuple(tys) } - syn::Type::Macro(syn::TypeMacro { ref mac }) + syn::Type::Macro(TypeMacro { mac }) if mac.path.segments.last().unwrap().ident == "Token" => { let content = mac.tokens.to_string(); - let ty = tokens.get(&content).unwrap().to_string(); + let ty = lookup.tokens.get(&content).unwrap().to_string(); types::Type::Token(ty) } @@ -193,7 +193,7 @@ fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) - } } -fn introspect_features(attrs: &[syn::Attribute]) -> types::Features { +fn introspect_features(attrs: &[Attribute]) -> types::Features { let mut ret = types::Features::default(); for attr in attrs { @@ -216,56 +216,77 @@ fn introspect_features(attrs: &[syn::Attribute]) -> types::Features { ret } -fn is_pub(vis: &syn::Visibility) -> bool { +fn is_pub(vis: &Visibility) -> bool { match vis { - syn::Visibility::Public(_) => true, + Visibility::Public(_) => true, _ => false, } } -fn first_arg(params: &syn::PathArguments) -> &syn::Type { - let data = match *params { - syn::PathArguments::AngleBracketed(ref data) => data, +fn is_non_exhaustive(attrs: &[Attribute]) -> bool { + for attr in attrs { + if attr.path.is_ident("non_exhaustive") { + return true; + } + } + false +} + +fn is_doc_hidden(attrs: &[Attribute]) -> bool { + for attr in attrs { + if attr.path.is_ident("doc") + && parsing::parse_doc_hidden_attr + .parse2(attr.tokens.clone()) + .is_ok() + { + return true; + } + } + false +} + +fn first_arg(params: &PathArguments) -> &syn::Type { + let data = match params { + PathArguments::AngleBracketed(data) => data, _ => panic!("Expected at least 1 type argument here"), }; - match *data + match data .args .first() .expect("Expected at least 1 type argument here") { - syn::GenericArgument::Type(ref ty) => ty, + GenericArgument::Type(ty) => ty, _ => panic!("Expected at least 1 type argument here"), } } -fn last_arg(params: &syn::PathArguments) -> &syn::Type { - let data = match *params { - syn::PathArguments::AngleBracketed(ref data) => data, +fn last_arg(params: &PathArguments) -> &syn::Type { + let data = match params { + PathArguments::AngleBracketed(data) => data, _ => panic!("Expected at least 1 type argument here"), }; - match *data + match data .args .last() .expect("Expected at least 1 type argument here") { - syn::GenericArgument::Type(ref ty) => ty, + GenericArgument::Type(ty) => ty, _ => panic!("Expected at least 1 type argument here"), } } mod parsing { - use super::{AstItem, TokenLookup}; - - use proc_macro2::{TokenStream, TokenTree}; + use super::AstItem; + use proc_macro2::TokenStream; use quote::quote; - use syn; - use syn::parse::{ParseStream, Result}; - use syn::*; - use syn_codegen as types; - use std::collections::{BTreeMap, BTreeSet}; + use syn::parse::{ParseStream, Parser, Result}; + use syn::{ + braced, bracketed, parenthesized, parse_quote, token, Attribute, Ident, LitStr, Path, Token, + }; + use syn_codegen as types; fn peek_tag(input: ParseStream, tag: &str) -> bool { let ahead = input.fork(); @@ -278,7 +299,7 @@ mod parsing { // Parses #full - returns #[cfg(feature = "full")] if it is present, and // nothing otherwise. - fn full(input: ParseStream) -> Vec { + fn full(input: ParseStream) -> Vec { if peek_tag(input, "full") { input.parse::().unwrap(); input.parse::().unwrap(); @@ -288,18 +309,10 @@ mod parsing { } } - fn skip_manual_extra_traits(input: ParseStream) { - if peek_tag(input, "manual_extra_traits") || peek_tag(input, "manual_extra_traits_debug") { - input.parse::().unwrap(); - input.parse::().unwrap(); - } - } - // Parses a simple AstStruct without the `pub struct` prefix. fn ast_struct_inner(input: ParseStream) -> Result { let ident: Ident = input.parse()?; let features = full(input); - skip_manual_extra_traits(input); let rest: TokenStream = input.parse()?; Ok(AstItem { ast: syn::parse2(quote! { @@ -328,7 +341,7 @@ mod parsing { } pub fn ast_enum(input: ParseStream) -> Result> { - input.call(Attribute::parse_outer)?; + let attrs = input.call(Attribute::parse_outer)?; input.parse::()?; input.parse::()?; let ident: Ident = input.parse()?; @@ -339,6 +352,7 @@ mod parsing { } else { Some(AstItem { ast: syn::parse2(quote! { + #(#attrs)* pub enum #ident #rest })?, features: vec![], @@ -348,11 +362,12 @@ mod parsing { // A single variant of an ast_enum_of_structs! struct EosVariant { + attrs: Vec, name: Ident, member: Option, } fn eos_variant(input: ParseStream) -> Result { - input.call(Attribute::parse_outer)?; + let attrs = input.call(Attribute::parse_outer)?; let variant: Ident = input.parse()?; let member = if input.peek(token::Paren) { let content; @@ -364,17 +379,17 @@ mod parsing { }; input.parse::()?; Ok(EosVariant { + attrs, name: variant, member, }) } pub fn ast_enum_of_structs(input: ParseStream) -> Result { - input.call(Attribute::parse_outer)?; + let attrs = input.call(Attribute::parse_outer)?; input.parse::()?; input.parse::()?; let ident: Ident = input.parse()?; - skip_manual_extra_traits(input); let content; braced!(content in input); @@ -383,19 +398,18 @@ mod parsing { variants.push(content.call(eos_variant)?); } - if let Some(ident) = input.parse::>()? { - assert_eq!(ident, "do_not_generate_to_tokens"); - } - let enum_item = { let variants = variants.iter().map(|v| { - let name = v.name.clone(); - match v.member { - Some(ref member) => quote!(#name(#member)), - None => quote!(#name), + let attrs = &v.attrs; + let name = &v.name; + if let Some(member) = &v.member { + quote!(#(#attrs)* #name(#member)) + } else { + quote!(#(#attrs)* #name) } }); parse_quote! { + #(#attrs)* pub enum #ident { #(#variants),* } @@ -408,55 +422,35 @@ mod parsing { } mod kw { + syn::custom_keyword!(hidden); syn::custom_keyword!(macro_rules); syn::custom_keyword!(Token); } - pub fn parse_token_macro(input: ParseStream) -> Result { - input.parse::()?; - input.parse::]>()?; - - let definition; - braced!(definition in input); - definition.call(Attribute::parse_outer)?; - definition.parse::()?; - definition.parse::()?; - definition.parse::()?; - - let rules; - braced!(rules in definition); - input.parse::()?; - + pub fn parse_token_macro(input: ParseStream) -> Result> { let mut tokens = BTreeMap::new(); - while !rules.is_empty() { - if rules.peek(Token![$]) { - rules.parse::()?; - rules.parse::()?; - rules.parse::()?; - tokens.insert("await".to_owned(), "Await".to_owned()); - } else { - let pattern; - parenthesized!(pattern in rules); - let token = pattern.parse::()?.to_string(); - rules.parse::]>()?; - let expansion; - braced!(expansion in rules); - rules.parse::()?; - expansion.parse::()?; - let path: Path = expansion.parse()?; - let ty = path.segments.last().unwrap().ident.to_string(); - tokens.insert(token, ty.to_string()); - } + while !input.is_empty() { + let pattern; + bracketed!(pattern in input); + let token = pattern.parse::()?.to_string(); + input.parse::]>()?; + let expansion; + braced!(expansion in input); + input.parse::()?; + expansion.parse::()?; + let path: Path = expansion.parse()?; + let ty = path.segments.last().unwrap().ident.to_string(); + tokens.insert(token, ty.to_string()); } Ok(tokens) } fn parse_feature(input: ParseStream) -> Result { - let i: syn::Ident = input.parse()?; + let i: Ident = input.parse()?; assert_eq!(i, "feature"); input.parse::()?; - let s = input.parse::()?; + let s = input.parse::()?; Ok(s.value()) } @@ -467,10 +461,10 @@ mod parsing { let level_1; parenthesized!(level_1 in input); - let i: syn::Ident = level_1.fork().parse()?; + let i: Ident = level_1.fork().parse()?; if i == "any" { - level_1.parse::()?; + level_1.parse::()?; let level_2; parenthesized!(level_2 in level_1); @@ -493,33 +487,85 @@ mod parsing { Ok(types::Features { any: features }) } + + pub fn path_attr(attrs: &[Attribute]) -> Result> { + for attr in attrs { + if attr.path.is_ident("path") { + fn parser(input: ParseStream) -> Result { + input.parse::()?; + input.parse() + } + let filename = parser.parse2(attr.tokens.clone())?; + return Ok(Some(filename)); + } + } + Ok(None) + } + + pub fn parse_doc_hidden_attr(input: ParseStream) -> Result<()> { + let content; + parenthesized!(content in input); + content.parse::()?; + Ok(()) + } +} + +fn clone_features(features: &[Attribute]) -> Vec { + features.iter().map(|attr| parse_quote!(#attr)).collect() } -fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec { - let mut ret = base.to_owned(); +fn get_features(attrs: &[Attribute], base: &[Attribute]) -> Vec { + let mut ret = clone_features(base); for attr in attrs { if attr.path.is_ident("cfg") { - ret.push(attr.clone()); + ret.push(parse_quote!(#attr)); } } ret } -fn load_file>( - name: P, - features: &[syn::Attribute], - lookup: &mut ItemLookup, +#[derive(Error, Debug)] +#[error("{path}:{line}:{column}: {error}")] +struct LoadFileError { + path: PathBuf, + line: usize, + column: usize, + error: Error, +} + +fn load_file( + relative_to_workspace_root: impl AsRef, + features: &[Attribute], + lookup: &mut Lookup, ) -> Result<()> { - let name = name.as_ref(); - let parent = name.parent().expect("no parent path"); + let error = match do_load_file(&relative_to_workspace_root, features, lookup).err() { + None => return Ok(()), + Some(error) => error, + }; + + let error = error.downcast::()?; + let span = error.span().start(); - let mut f = File::open(name)?; - let mut src = String::new(); - f.read_to_string(&mut src)?; + bail!(LoadFileError { + path: relative_to_workspace_root.as_ref().to_owned(), + line: span.line, + column: span.column + 1, + error, + }) +} + +fn do_load_file( + relative_to_workspace_root: impl AsRef, + features: &[Attribute], + lookup: &mut Lookup, +) -> Result<()> { + let relative_to_workspace_root = relative_to_workspace_root.as_ref(); + let parent = relative_to_workspace_root.parent().expect("no parent path"); // Parse the file + let src = fs::read_to_string(workspace_path::get(relative_to_workspace_root))?; let file = syn::parse_file(&src)?; // Collect all of the interesting AstItems declared in this file or submodules. @@ -551,8 +597,13 @@ fn load_file>( }; // Look up the submodule file, and recursively parse it. - // XXX: Only handles same-directory .rs file submodules. - let path = parent.join(&format!("{}.rs", item.ident)); + // Only handles same-directory .rs file submodules for now. + let filename = if let Some(filename) = parsing::path_attr(&item.attrs)? { + filename.value() + } else { + format!("{}.rs", item.ident) + }; + let path = parent.join(filename); load_file(path, &features, lookup)?; } Item::Macro(item) => { @@ -573,17 +624,15 @@ fn load_file>( }; // Record our features on the parsed AstItems. - for mut item in found { - if item.ast.ident != "Reserved" { - item.features.extend(features.clone()); - lookup.insert(item.ast.ident.clone(), item); - } + if let Some(mut item) = found { + item.features.extend(clone_features(&features)); + lookup.items.insert(item.ast.ident.clone(), item); } } Item::Struct(item) => { let ident = item.ident; if EXTRA_TYPES.contains(&&ident.to_string()[..]) { - lookup.insert( + lookup.items.insert( ident.clone(), AstItem { ast: DeriveInput { @@ -597,34 +646,52 @@ fn load_file>( semi_token: item.semi_token, }), }, - features: features.to_owned(), + features: clone_features(features), }, ); } } + Item::Use(item) + if relative_to_workspace_root == Path::new(SYN_CRATE_ROOT) + && matches!(item.vis, Visibility::Public(_)) => + { + load_aliases(item.tree, lookup); + } _ => {} } } Ok(()) } -fn load_token_file>(name: P) -> Result { - let name = name.as_ref(); - let mut f = File::open(name)?; - let mut src = String::new(); - f.read_to_string(&mut src)?; +fn load_aliases(use_tree: UseTree, lookup: &mut Lookup) { + match use_tree { + UseTree::Path(use_tree) => load_aliases(*use_tree.tree, lookup), + UseTree::Rename(use_tree) => { + lookup.aliases.insert(use_tree.rename, use_tree.ident); + } + UseTree::Group(use_tree) => { + for use_tree in use_tree.items { + load_aliases(use_tree, lookup); + } + } + UseTree::Name(_) | UseTree::Glob(_) => {} + } +} + +fn load_token_file( + relative_to_workspace_root: impl AsRef, +) -> Result> { + let path = workspace_path::get(relative_to_workspace_root); + let src = fs::read_to_string(path)?; let file = syn::parse_file(&src)?; for item in file.items { - match item { - Item::Macro(item) => { - match item.ident { - Some(ref i) if i == "export_token_macro" => {} - _ => continue, - } - let tokens = item.mac.parse_body_with(parsing::parse_token_macro)?; - return Ok(tokens); + if let Item::Macro(item) = item { + match item.ident { + Some(i) if i == "Token" => {} + _ => continue, } - _ => {} + let tokens = item.mac.parse_body_with(parsing::parse_token_macro)?; + return Ok(tokens); } } diff --git a/codegen/src/snapshot.rs b/codegen/src/snapshot.rs new file mode 100644 index 0000000000..11510c76d0 --- /dev/null +++ b/codegen/src/snapshot.rs @@ -0,0 +1,352 @@ +use crate::operand::{Borrowed, Operand, Owned}; +use crate::{file, lookup}; +use anyhow::Result; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{format_ident, quote}; +use syn::Index; +use syn_codegen::{Data, Definitions, Node, Type}; + +const TESTS_DEBUG_SRC: &str = "tests/debug/gen.rs"; + +fn rust_type(ty: &Type) -> TokenStream { + match ty { + Type::Syn(ty) => { + let ident = Ident::new(ty, Span::call_site()); + quote!(syn::#ident) + } + Type::Std(ty) => { + let ident = Ident::new(ty, Span::call_site()); + quote!(#ident) + } + Type::Ext(ty) => { + let ident = Ident::new(ty, Span::call_site()); + quote!(proc_macro2::#ident) + } + Type::Token(ty) | Type::Group(ty) => { + let ident = Ident::new(ty, Span::call_site()); + quote!(syn::token::#ident) + } + Type::Punctuated(ty) => { + let element = rust_type(&ty.element); + let punct = Ident::new(&ty.punct, Span::call_site()); + quote!(syn::punctuated::Punctuated<#element, #punct>) + } + Type::Option(ty) => { + let inner = rust_type(ty); + quote!(Option<#inner>) + } + Type::Box(ty) => { + let inner = rust_type(ty); + quote!(Box<#inner>) + } + Type::Vec(ty) => { + let inner = rust_type(ty); + quote!(Vec<#inner>) + } + Type::Tuple(ty) => { + let inner = ty.iter().map(rust_type); + quote!((#(#inner,)*)) + } + } +} + +fn is_printable(ty: &Type) -> bool { + match ty { + Type::Ext(name) => name != "Span", + Type::Box(ty) => is_printable(ty), + Type::Tuple(ty) => ty.iter().any(is_printable), + Type::Token(_) | Type::Group(_) => false, + Type::Syn(_) | Type::Std(_) | Type::Punctuated(_) | Type::Option(_) | Type::Vec(_) => true, + } +} + +fn format_field(val: &Operand, ty: &Type) -> Option { + if !is_printable(ty) { + return None; + } + let format = match ty { + Type::Option(ty) => { + if let Some(format) = format_field(&Borrowed(quote!(_val)), ty) { + let ty = rust_type(ty); + let val = val.ref_tokens(); + quote!({ + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Option<#ty>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match &self.0 { + Some(_val) => { + formatter.write_str("Some(")?; + Debug::fmt(#format, formatter)?; + formatter.write_str(")")?; + Ok(()) + } + None => formatter.write_str("None"), + } + } + } + Print::ref_cast(#val) + }) + } else { + let val = val.tokens(); + quote! { + &super::Option { present: #val.is_some() } + } + } + } + Type::Tuple(ty) => { + let printable: Vec = ty + .iter() + .enumerate() + .filter_map(|(i, ty)| { + let index = Index::from(i); + let val = val.tokens(); + let inner = Owned(quote!(#val.#index)); + format_field(&inner, ty) + }) + .collect(); + if printable.len() == 1 { + printable.into_iter().next().unwrap() + } else { + quote! { + &(#(#printable),*) + } + } + } + _ => { + let val = val.ref_tokens(); + quote! { Lite(#val) } + } + }; + Some(format) +} + +fn syntax_tree_enum<'a>(outer: &str, inner: &str, fields: &'a [Type]) -> Option<&'a str> { + if fields.len() != 1 { + return None; + } + const WHITELIST: &[(&str, &str)] = &[ + ("Meta", "Path"), + ("PathArguments", "AngleBracketed"), + ("PathArguments", "Parenthesized"), + ("Stmt", "Local"), + ("TypeParamBound", "Lifetime"), + ("Visibility", "Public"), + ("Visibility", "Restricted"), + ]; + match &fields[0] { + Type::Syn(ty) if WHITELIST.contains(&(outer, inner)) || outer.to_owned() + inner == *ty => { + Some(ty) + } + _ => None, + } +} + +fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand) -> TokenStream { + let ident = Ident::new(&node.ident, Span::call_site()); + + match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(unreachable!()), + Data::Enum(variants) => { + let arms = variants.iter().map(|(v, fields)| { + let path = format!("{}::{}", name, v); + let variant = Ident::new(v, Span::call_site()); + if fields.is_empty() { + quote! { + syn::#ident::#variant => formatter.write_str(#path), + } + } else if let Some(inner) = syntax_tree_enum(name, v, fields) { + let format = expand_impl_body( + defs, + lookup::node(defs, inner), + &path, + &Borrowed(quote!(_val)), + ); + quote! { + syn::#ident::#variant(_val) => { + #format + } + } + } else if fields.len() == 1 { + let val = quote!(_val); + let format = if variant == "Verbatim" { + Some(quote! { + formatter.write_str("(`")?; + Display::fmt(#val, formatter)?; + formatter.write_str("`)")?; + }) + } else { + let ty = &fields[0]; + format_field(&Borrowed(val), ty).map(|format| { + quote! { + formatter.write_str("(")?; + Debug::fmt(#format, formatter)?; + formatter.write_str(")")?; + } + }) + }; + quote! { + syn::#ident::#variant(_val) => { + formatter.write_str(#path)?; + #format + Ok(()) + } + } + } else { + let pats = (0..fields.len()).map(|i| format_ident!("_v{}", i)); + let fields = fields.iter().enumerate().filter_map(|(i, ty)| { + let index = format_ident!("_v{}", i); + let val = quote!(#index); + let format = format_field(&Borrowed(val), ty)?; + Some(quote! { + formatter.field(#format); + }) + }); + quote! { + syn::#ident::#variant(#(#pats),*) => { + let mut formatter = formatter.debug_tuple(#path); + #(#fields)* + formatter.finish() + } + } + } + }); + let nonexhaustive = if node.exhaustive { + None + } else { + Some(quote!(_ => unreachable!())) + }; + let val = val.ref_tokens(); + quote! { + match #val { + #(#arms)* + #nonexhaustive + } + } + } + Data::Struct(fields) => { + let fields = fields.iter().filter_map(|(f, ty)| { + let ident = Ident::new(f, Span::call_site()); + if let Type::Option(ty) = ty { + Some(if let Some(format) = format_field(&Owned(quote!(self.0)), ty) { + let val = val.tokens(); + let ty = rust_type(ty); + quote! { + if let Some(val) = &#val.#ident { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(#ty); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some(")?; + Debug::fmt(#format, formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field(#f, Print::ref_cast(val)); + } + } + } else { + let val = val.tokens(); + quote! { + if #val.#ident.is_some() { + formatter.field(#f, &Present); + } + } + }) + } else { + let val = val.tokens(); + let inner = Owned(quote!(#val.#ident)); + let format = format_field(&inner, ty)?; + let mut call = quote! { + formatter.field(#f, #format); + }; + if let Type::Vec(_) | Type::Punctuated(_) = ty { + call = quote! { + if !#val.#ident.is_empty() { + #call + } + }; + } else if let Type::Syn(inner) = ty { + for node in &defs.types { + if node.ident == *inner { + if let Data::Enum(variants) = &node.data { + if variants.get("None").map_or(false, Vec::is_empty) { + let ty = rust_type(ty); + call = quote! { + match #val.#ident { + #ty::None => {} + _ => { #call } + } + }; + } + } + break; + } + } + } + Some(call) + } + }); + quote! { + let mut formatter = formatter.debug_struct(#name); + #(#fields)* + formatter.finish() + } + } + Data::Private => { + if node.ident == "LitInt" || node.ident == "LitFloat" { + let val = val.ref_tokens(); + quote! { + write!(formatter, "{}", #val) + } + } else { + let val = val.tokens(); + quote! { + write!(formatter, "{:?}", #val.value()) + } + } + } + } +} + +fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream { + let ident = Ident::new(&node.ident, Span::call_site()); + let body = expand_impl_body(defs, node, &node.ident, &Owned(quote!(self.value))); + let formatter = match &node.data { + Data::Enum(variants) if variants.is_empty() => quote!(_formatter), + _ => quote!(formatter), + }; + + quote! { + impl Debug for Lite { + fn fmt(&self, #formatter: &mut fmt::Formatter) -> fmt::Result { + #body + } + } + } +} + +pub fn generate(defs: &Definitions) -> Result<()> { + let mut impls = TokenStream::new(); + for node in &defs.types { + impls.extend(expand_impl(defs, node)); + } + + file::write( + TESTS_DEBUG_SRC, + quote! { + #![allow(clippy::match_wildcard_for_single_variants)] + + use super::{Lite, Present}; + use ref_cast::RefCast; + use std::fmt::{self, Debug, Display}; + + #impls + }, + )?; + + Ok(()) +} diff --git a/codegen/src/version.rs b/codegen/src/version.rs index 7ccca608d7..538614375b 100644 --- a/codegen/src/version.rs +++ b/codegen/src/version.rs @@ -1,13 +1,11 @@ -use crate::error::Result; +use crate::workspace_path; +use anyhow::Result; use semver::Version; use serde::Deserialize; - use std::fs; -use std::path::Path; pub fn get() -> Result { - let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR")); - let syn_cargo_toml = codegen_root.join("../Cargo.toml"); + let syn_cargo_toml = workspace_path::get("Cargo.toml"); let manifest = fs::read_to_string(syn_cargo_toml)?; let parsed: Manifest = toml::from_str(&manifest)?; Ok(parsed.package.version) diff --git a/codegen/src/visit.rs b/codegen/src/visit.rs index a937a2f12d..fcbe63d033 100644 --- a/codegen/src/visit.rs +++ b/codegen/src/visit.rs @@ -1,16 +1,16 @@ -use crate::error::Result; use crate::operand::{Borrowed, Operand, Owned}; use crate::{file, full, gen}; +use anyhow::Result; use proc_macro2::{Ident, Span, TokenStream}; -use quote::quote; +use quote::{format_ident, quote}; use syn::Index; use syn_codegen::{Data, Definitions, Features, Node, Type}; -const VISIT_SRC: &str = "../src/gen/visit.rs"; +const VISIT_SRC: &str = "src/gen/visit.rs"; fn simple_visit(item: &str, name: &Operand) -> TokenStream { let ident = gen::under_name(item); - let method = Ident::new(&format!("visit_{}", ident), Span::call_site()); + let method = format_ident!("visit_{}", ident); let name = name.ref_tokens(); quote! { v.#method(#name) @@ -41,7 +41,7 @@ fn visit( let name = name.ref_tokens(); Some(quote! { for it in #name { - #val + #val; } }) } @@ -51,21 +51,18 @@ fn visit( let name = name.ref_tokens(); Some(quote! { for el in Punctuated::pairs(#name) { - let (it, p) = el.into_tuple(); + let it = el.value(); #val; - if let Some(p) = p { - tokens_helper(v, &p.spans); - } } }) } Type::Option(t) => { let it = Borrowed(quote!(it)); let val = visit(t, features, defs, &it)?; - let name = name.owned_tokens(); + let name = name.ref_tokens(); Some(quote! { - if let Some(it) = &#name { - #val + if let Some(it) = #name { + #val; } }) } @@ -81,25 +78,6 @@ fn visit( } Some(code) } - Type::Token(t) => { - let name = name.tokens(); - let repr = &defs.tokens[t]; - let is_keyword = repr.chars().next().unwrap().is_alphabetic(); - let spans = if is_keyword { - quote!(span) - } else { - quote!(spans) - }; - Some(quote! { - tokens_helper(v, &#name.#spans) - }) - } - Type::Group(_) => { - let name = name.tokens(); - Some(quote! { - tokens_helper(v, &#name.span) - }) - } Type::Syn(t) => { fn requires_full(features: &Features) -> bool { features.any.contains("full") && features.any.len() == 1 @@ -112,18 +90,23 @@ fn visit( Some(res) } Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)), - Type::Ext(_) | Type::Std(_) => None, + Type::Ext(_) | Type::Std(_) | Type::Token(_) | Type::Group(_) => None, } } fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Definitions) { let under_name = gen::under_name(&s.ident); let ty = Ident::new(&s.ident, Span::call_site()); - let visit_fn = Ident::new(&format!("visit_{}", under_name), Span::call_site()); + let visit_fn = format_ident!("visit_{}", under_name); let mut visit_impl = TokenStream::new(); match &s.data { + Data::Enum(variants) if variants.is_empty() => { + visit_impl.extend(quote! { + match *node {} + }); + } Data::Enum(variants) => { let mut visit_variants = TokenStream::new(); @@ -139,8 +122,7 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi let mut visit_fields = TokenStream::new(); for (idx, ty) in fields.iter().enumerate() { - let name = format!("_binding_{}", idx); - let binding = Ident::new(&name, Span::call_site()); + let binding = format_ident!("_binding_{}", idx); bind_visit_fields.extend(quote! { #binding, @@ -164,30 +146,17 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi } } - let nonexhaustive = if s.exhaustive { - None - } else { - Some(quote!(_ => unreachable!())) - }; - visit_impl.extend(quote! { match node { #visit_variants - #nonexhaustive } }); } Data::Struct(fields) => { for (field, ty) in fields { - if let Type::Syn(ty) = ty { - if ty == "Reserved" { - continue; - } - } - - let id = Ident::new(&field, Span::call_site()); + let id = Ident::new(field, Span::call_site()); let ref_toks = Owned(quote!(node.#id)); - let visit_field = visit(&ty, &s.features, defs, &ref_toks) + let visit_field = visit(ty, &s.features, defs, &ref_toks) .unwrap_or_else(|| noop_visit(&ref_toks)); visit_impl.extend(quote! { #visit_field; @@ -211,7 +180,7 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi traits.extend(quote! { fn #visit_fn(&mut self, i: &#ast_lifetime #ty) { - #visit_fn(self, i) + #visit_fn(self, i); } }); @@ -233,16 +202,13 @@ pub fn generate(defs: &Definitions) -> Result<()> { quote! { #![allow(unused_variables)] - use crate::*; #[cfg(any(feature = "full", feature = "derive"))] use crate::punctuated::Punctuated; + use crate::*; use proc_macro2::Span; - #[cfg(any(feature = "full", feature = "derive"))] - use crate::gen::helper::visit::*; #full_macro - #[cfg(any(feature = "full", feature = "derive"))] macro_rules! skip { ($($tt:tt)*) => {}; } @@ -252,8 +218,6 @@ pub fn generate(defs: &Definitions) -> Result<()> { /// See the [module documentation] for details. /// /// [module documentation]: self - /// - /// *This trait is available if Syn is built with the `"visit"` feature.* pub trait Visit<'ast> { #traits } diff --git a/codegen/src/visit_mut.rs b/codegen/src/visit_mut.rs index c681bf278f..80069e943e 100644 --- a/codegen/src/visit_mut.rs +++ b/codegen/src/visit_mut.rs @@ -1,16 +1,16 @@ -use crate::error::Result; use crate::operand::{Borrowed, Operand, Owned}; use crate::{file, full, gen}; +use anyhow::Result; use proc_macro2::{Ident, Span, TokenStream}; -use quote::quote; +use quote::{format_ident, quote}; use syn::Index; use syn_codegen::{Data, Definitions, Features, Node, Type}; -const VISIT_MUT_SRC: &str = "../src/gen/visit_mut.rs"; +const VISIT_MUT_SRC: &str = "src/gen/visit_mut.rs"; fn simple_visit(item: &str, name: &Operand) -> TokenStream { let ident = gen::under_name(item); - let method = Ident::new(&format!("visit_{}_mut", ident), Span::call_site()); + let method = format_ident!("visit_{}_mut", ident); let name = name.ref_mut_tokens(); quote! { v.#method(#name) @@ -41,7 +41,7 @@ fn visit( let name = name.ref_mut_tokens(); Some(quote! { for it in #name { - #val + #val; } }) } @@ -50,22 +50,19 @@ fn visit( let val = visit(&p.element, features, defs, &operand)?; let name = name.ref_mut_tokens(); Some(quote! { - for el in Punctuated::pairs_mut(#name) { - let (it, p) = el.into_tuple(); + for mut el in Punctuated::pairs_mut(#name) { + let it = el.value_mut(); #val; - if let Some(p) = p { - tokens_helper(v, &mut p.spans); - } } }) } Type::Option(t) => { let it = Borrowed(quote!(it)); let val = visit(t, features, defs, &it)?; - let name = name.owned_tokens(); + let name = name.ref_mut_tokens(); Some(quote! { - if let Some(it) = &mut #name { - #val + if let Some(it) = #name { + #val; } }) } @@ -81,25 +78,6 @@ fn visit( } Some(code) } - Type::Token(t) => { - let name = name.tokens(); - let repr = &defs.tokens[t]; - let is_keyword = repr.chars().next().unwrap().is_alphabetic(); - let spans = if is_keyword { - quote!(span) - } else { - quote!(spans) - }; - Some(quote! { - tokens_helper(v, &mut #name.#spans) - }) - } - Type::Group(_) => { - let name = name.tokens(); - Some(quote! { - tokens_helper(v, &mut #name.span) - }) - } Type::Syn(t) => { fn requires_full(features: &Features) -> bool { features.any.contains("full") && features.any.len() == 1 @@ -112,18 +90,23 @@ fn visit( Some(res) } Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)), - Type::Ext(_) | Type::Std(_) => None, + Type::Ext(_) | Type::Std(_) | Type::Token(_) | Type::Group(_) => None, } } fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Definitions) { let under_name = gen::under_name(&s.ident); let ty = Ident::new(&s.ident, Span::call_site()); - let visit_mut_fn = Ident::new(&format!("visit_{}_mut", under_name), Span::call_site()); + let visit_mut_fn = format_ident!("visit_{}_mut", under_name); let mut visit_mut_impl = TokenStream::new(); match &s.data { + Data::Enum(variants) if variants.is_empty() => { + visit_mut_impl.extend(quote! { + match *node {} + }); + } Data::Enum(variants) => { let mut visit_mut_variants = TokenStream::new(); @@ -139,8 +122,7 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi let mut visit_mut_fields = TokenStream::new(); for (idx, ty) in fields.iter().enumerate() { - let name = format!("_binding_{}", idx); - let binding = Ident::new(&name, Span::call_site()); + let binding = format_ident!("_binding_{}", idx); bind_visit_mut_fields.extend(quote! { #binding, @@ -164,30 +146,17 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi } } - let nonexhaustive = if s.exhaustive { - None - } else { - Some(quote!(_ => unreachable!())) - }; - visit_mut_impl.extend(quote! { match node { #visit_mut_variants - #nonexhaustive } }); } Data::Struct(fields) => { for (field, ty) in fields { - if let Type::Syn(ty) = ty { - if ty == "Reserved" { - continue; - } - } - - let id = Ident::new(&field, Span::call_site()); + let id = Ident::new(field, Span::call_site()); let ref_toks = Owned(quote!(node.#id)); - let visit_mut_field = visit(&ty, &s.features, defs, &ref_toks) + let visit_mut_field = visit(ty, &s.features, defs, &ref_toks) .unwrap_or_else(|| noop_visit(&ref_toks)); visit_mut_impl.extend(quote! { #visit_mut_field; @@ -207,7 +176,7 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi traits.extend(quote! { fn #visit_mut_fn(&mut self, i: &mut #ty) { - #visit_mut_fn(self, i) + #visit_mut_fn(self, i); } }); @@ -229,16 +198,13 @@ pub fn generate(defs: &Definitions) -> Result<()> { quote! { #![allow(unused_variables)] - use crate::*; #[cfg(any(feature = "full", feature = "derive"))] use crate::punctuated::Punctuated; + use crate::*; use proc_macro2::Span; - #[cfg(any(feature = "full", feature = "derive"))] - use crate::gen::helper::visit_mut::*; #full_macro - #[cfg(any(feature = "full", feature = "derive"))] macro_rules! skip { ($($tt:tt)*) => {}; } @@ -249,8 +215,6 @@ pub fn generate(defs: &Definitions) -> Result<()> { /// See the [module documentation] for details. /// /// [module documentation]: self - /// - /// *This trait is available if Syn is built with the `"visit-mut"` feature.* pub trait VisitMut { #traits } diff --git a/codegen/src/workspace_path.rs b/codegen/src/workspace_path.rs new file mode 100644 index 0000000000..eb29bce2e8 --- /dev/null +++ b/codegen/src/workspace_path.rs @@ -0,0 +1,8 @@ +use std::path::{Path, PathBuf}; + +pub fn get(relative_to_workspace_root: impl AsRef) -> PathBuf { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + assert!(path.pop()); + path.push(relative_to_workspace_root); + path +} diff --git a/dev/Cargo.toml b/dev/Cargo.toml index 79486c122a..eb3ff57cdb 100644 --- a/dev/Cargo.toml +++ b/dev/Cargo.toml @@ -2,7 +2,7 @@ name = "syn-dev" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [lib] @@ -14,7 +14,7 @@ path = "main.rs" name = "syn-dev" [dependencies] -quote = "1.0" +quote = "1" [dependencies.syn] path = ".." diff --git a/dev/parse.rs b/dev/parse.rs index 2a92550511..c671b25596 100644 --- a/dev/parse.rs +++ b/dev/parse.rs @@ -1,5 +1,3 @@ -extern crate proc_macro; - use proc_macro::TokenStream; use quote::quote; use syn::File; diff --git a/examples/dump-syntax/Cargo.toml b/examples/dump-syntax/Cargo.toml index 0bc9f62177..2f33a4f3ab 100644 --- a/examples/dump-syntax/Cargo.toml +++ b/examples/dump-syntax/Cargo.toml @@ -2,16 +2,14 @@ name = "dump-syntax" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [dependencies] -colored = "1.7" -proc-macro2 = { version = "1.0", features = ["span-locations"] } +colored = "2" +proc-macro2 = { version = "1", features = ["span-locations"] } [dependencies.syn] path = "../.." default-features = false features = ["parsing", "full", "extra-traits"] - -[workspace] diff --git a/examples/dump-syntax/src/main.rs b/examples/dump-syntax/src/main.rs index 240b7a2975..d23c31a141 100644 --- a/examples/dump-syntax/src/main.rs +++ b/examples/dump-syntax/src/main.rs @@ -13,10 +13,11 @@ //! attrs: [ //! Attribute { //! pound_token: Pound, -//! style: Inner( +//! style: AttrStyle::Inner( //! ... //! } +use colored::Colorize; use std::borrow::Cow; use std::env; use std::ffi::OsStr; @@ -26,8 +27,6 @@ use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process; -use colored::Colorize; - enum Error { IncorrectUsage, ReadFile(io::Error), diff --git a/examples/heapsize/Cargo.toml b/examples/heapsize/Cargo.toml deleted file mode 100644 index 9b19214075..0000000000 --- a/examples/heapsize/Cargo.toml +++ /dev/null @@ -1,2 +0,0 @@ -[workspace] -members = ["example", "heapsize", "heapsize_derive"] diff --git a/examples/heapsize/example/Cargo.toml b/examples/heapsize/example/Cargo.toml index 85c7699c86..ab2c4266fd 100644 --- a/examples/heapsize/example/Cargo.toml +++ b/examples/heapsize/example/Cargo.toml @@ -2,7 +2,7 @@ name = "heapsize_example" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [dependencies] diff --git a/examples/heapsize/heapsize/Cargo.toml b/examples/heapsize/heapsize/Cargo.toml index 27bb95414c..2b2c31e984 100644 --- a/examples/heapsize/heapsize/Cargo.toml +++ b/examples/heapsize/heapsize/Cargo.toml @@ -2,7 +2,7 @@ name = "heapsize" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [dependencies] diff --git a/examples/heapsize/heapsize_derive/Cargo.toml b/examples/heapsize/heapsize_derive/Cargo.toml index f4357b9830..06066ccab1 100644 --- a/examples/heapsize/heapsize_derive/Cargo.toml +++ b/examples/heapsize/heapsize_derive/Cargo.toml @@ -2,13 +2,13 @@ name = "heapsize_derive" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [lib] proc-macro = true [dependencies] -proc-macro2 = "1.0" -quote = "1.0" +proc-macro2 = "1" +quote = "1" syn = { path = "../../.." } diff --git a/examples/heapsize/heapsize_derive/src/lib.rs b/examples/heapsize/heapsize_derive/src/lib.rs index 9176b29af6..b1246093a0 100644 --- a/examples/heapsize/heapsize_derive/src/lib.rs +++ b/examples/heapsize/heapsize_derive/src/lib.rs @@ -1,9 +1,9 @@ -extern crate proc_macro; - use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; use syn::spanned::Spanned; -use syn::{parse_macro_input, parse_quote, Data, DeriveInput, Fields, GenericParam, Generics, Index}; +use syn::{ + parse_macro_input, parse_quote, Data, DeriveInput, Fields, GenericParam, Generics, Index, +}; #[proc_macro_derive(HeapSize)] pub fn derive_heap_size(input: proc_macro::TokenStream) -> proc_macro::TokenStream { diff --git a/examples/lazy-static/Cargo.toml b/examples/lazy-static/Cargo.toml deleted file mode 100644 index 586e547f78..0000000000 --- a/examples/lazy-static/Cargo.toml +++ /dev/null @@ -1,2 +0,0 @@ -[workspace] -members = ["example", "lazy-static"] diff --git a/examples/lazy-static/README.md b/examples/lazy-static/README.md index bc6458544d..bc2a66f0c9 100644 --- a/examples/lazy-static/README.md +++ b/examples/lazy-static/README.md @@ -6,7 +6,7 @@ individual tokens of the input. - [`example/src/main.rs`](example/src/main.rs) The library implements a `lazy_static!` macro similar to the one from the real -[`lazy_static`](https://docs.rs/lazy_static/1.0.0/lazy_static/) crate on +[`lazy_static`](https://docs.rs/lazy_static/1.0/lazy_static/) crate on crates.io. ```rust diff --git a/examples/lazy-static/example/Cargo.toml b/examples/lazy-static/example/Cargo.toml index 716b08c003..b6e95a2bbc 100644 --- a/examples/lazy-static/example/Cargo.toml +++ b/examples/lazy-static/example/Cargo.toml @@ -1,10 +1,10 @@ [package] -name = "example" +name = "lazy-static-example" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [dependencies] lazy_static = { path = "../lazy-static" } -regex = "0.2" +regex = "1" diff --git a/examples/lazy-static/lazy-static/Cargo.toml b/examples/lazy-static/lazy-static/Cargo.toml index bf65787c0a..be966caa41 100644 --- a/examples/lazy-static/lazy-static/Cargo.toml +++ b/examples/lazy-static/lazy-static/Cargo.toml @@ -2,13 +2,13 @@ name = "lazy_static" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [lib] proc-macro = true [dependencies] -proc-macro2 = { version = "1.0", features = ["nightly"] } -quote = "1.0" +proc-macro2 = { version = "1", features = ["nightly"] } +quote = "1" syn = { path = "../../../", features = ["full"] } diff --git a/examples/lazy-static/lazy-static/src/lib.rs b/examples/lazy-static/lazy-static/src/lib.rs index 254ca72802..20d1f39774 100644 --- a/examples/lazy-static/lazy-static/src/lib.rs +++ b/examples/lazy-static/lazy-static/src/lib.rs @@ -1,9 +1,6 @@ -#![recursion_limit = "128"] #![feature(proc_macro_diagnostic)] -extern crate proc_macro; -use self::proc_macro::TokenStream; - +use proc_macro::TokenStream; use quote::{quote, quote_spanned}; use syn::parse::{Parse, ParseStream, Result}; use syn::spanned::Spanned; diff --git a/examples/trace-var/Cargo.toml b/examples/trace-var/Cargo.toml deleted file mode 100644 index b54454d507..0000000000 --- a/examples/trace-var/Cargo.toml +++ /dev/null @@ -1,2 +0,0 @@ -[workspace] -members = ["example", "trace-var"] diff --git a/examples/trace-var/README.md b/examples/trace-var/README.md index 09f5f1acac..b93fae2b2c 100644 --- a/examples/trace-var/README.md +++ b/examples/trace-var/README.md @@ -42,7 +42,7 @@ n = 1 The procedural macro uses a syntax tree [`Fold`] to rewrite every `let` statement and assignment expression in the following way: -[`Fold`]: https://docs.rs/syn/0.15/syn/fold/trait.Fold.html +[`Fold`]: https://docs.rs/syn/1.0/syn/fold/trait.Fold.html ```rust // Before diff --git a/examples/trace-var/example/Cargo.toml b/examples/trace-var/example/Cargo.toml index d2ad6502c6..b95e4c351c 100644 --- a/examples/trace-var/example/Cargo.toml +++ b/examples/trace-var/example/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "example" +name = "trace-var-example" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [dependencies] diff --git a/examples/trace-var/trace-var/Cargo.toml b/examples/trace-var/trace-var/Cargo.toml index 72f56e9223..21ae03836d 100644 --- a/examples/trace-var/trace-var/Cargo.toml +++ b/examples/trace-var/trace-var/Cargo.toml @@ -2,13 +2,13 @@ name = "trace-var" version = "0.0.0" authors = ["David Tolnay "] -edition = "2018" +edition = "2021" publish = false [lib] proc-macro = true [dependencies] -proc-macro2 = { version = "1.0", features = ["nightly"] } -quote = "1.0" +proc-macro2 = { version = "1", features = ["nightly"] } +quote = "1" syn = { path = "../../../", features = ["full", "fold"] } diff --git a/examples/trace-var/trace-var/src/lib.rs b/examples/trace-var/trace-var/src/lib.rs index 0ecfb4783c..0dcaa94bb2 100644 --- a/examples/trace-var/trace-var/src/lib.rs +++ b/examples/trace-var/trace-var/src/lib.rs @@ -1,12 +1,10 @@ -extern crate proc_macro; -use self::proc_macro::TokenStream; - +use proc_macro::TokenStream; use quote::{quote, ToTokens}; use std::collections::HashSet as Set; use syn::fold::{self, Fold}; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; -use syn::{parse_macro_input, parse_quote, Expr, Ident, ItemFn, Local, Pat, Stmt, Token}; +use syn::{parse_macro_input, parse_quote, BinOp, Expr, Ident, ItemFn, Local, Pat, Stmt, Token}; /// Parses a list of variable names separated by commas. /// @@ -86,7 +84,7 @@ impl Args { /// let VAR = { let VAR = INIT; println!("VAR = {:?}", VAR); VAR }; fn let_and_print(&mut self, local: Local) -> Stmt { let Local { pat, init, .. } = local; - let init = self.fold_expr(*init.unwrap().1); + let init = self.fold_expr(*init.unwrap().expr); let ident = match pat { Pat::Ident(ref p) => &p.ident, _ => unreachable!(), @@ -124,11 +122,11 @@ impl Fold for Args { Expr::Assign(fold::fold_expr_assign(self, e)) } } - Expr::AssignOp(e) => { + Expr::Binary(e) if is_assign_op(e.op) => { if self.should_print_expr(&e.left) { self.assign_and_print(*e.left, &e.op, *e.right) } else { - Expr::AssignOp(fold::fold_expr_assign_op(self, e)) + Expr::Binary(fold::fold_expr_binary(self, e)) } } _ => fold::fold_expr(self, e), @@ -149,6 +147,22 @@ impl Fold for Args { } } +fn is_assign_op(op: BinOp) -> bool { + match op { + BinOp::AddAssign(_) + | BinOp::SubAssign(_) + | BinOp::MulAssign(_) + | BinOp::DivAssign(_) + | BinOp::RemAssign(_) + | BinOp::BitXorAssign(_) + | BinOp::BitAndAssign(_) + | BinOp::BitOrAssign(_) + | BinOp::ShlAssign(_) + | BinOp::ShrAssign(_) => true, + _ => false, + } +} + /// Attribute to print the value of the given variables each time they are /// reassigned. /// diff --git a/fuzz/.gitignore b/fuzz/.gitignore new file mode 100644 index 0000000000..188f196098 --- /dev/null +++ b/fuzz/.gitignore @@ -0,0 +1,3 @@ +artifacts/ +corpus/ +target/ diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml new file mode 100644 index 0000000000..3a2dee488c --- /dev/null +++ b/fuzz/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "syn-fuzz" +version = "0.0.0" +authors = ["David Tolnay "] +edition = "2021" +publish = false + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +proc-macro2 = "1.0.52" +syn = { path = "..", default-features = false, features = ["full", "parsing"] } + +[[bin]] +name = "create_token_buffer" +path = "fuzz_targets/create_token_buffer.rs" +test = false +doc = false + +[[bin]] +name = "parse_file" +path = "fuzz_targets/parse_file.rs" +test = false +doc = false + +[workspace] diff --git a/fuzz/fuzz_targets/create_token_buffer.rs b/fuzz/fuzz_targets/create_token_buffer.rs new file mode 100644 index 0000000000..48540e1b39 --- /dev/null +++ b/fuzz/fuzz_targets/create_token_buffer.rs @@ -0,0 +1,18 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use proc_macro2::Span; +use std::str; +use syn::parse::{ParseStream, Parser}; + +fn immediate_fail(_input: ParseStream) -> syn::Result<()> { + Err(syn::Error::new(Span::call_site(), "")) +} + +fuzz_target!(|data: &[u8]| { + if data.len() < 300 { + if let Ok(string) = str::from_utf8(data) { + let _ = immediate_fail.parse_str(string); + } + } +}); diff --git a/fuzz/fuzz_targets/parse_file.rs b/fuzz/fuzz_targets/parse_file.rs new file mode 100644 index 0000000000..b30d694eed --- /dev/null +++ b/fuzz/fuzz_targets/parse_file.rs @@ -0,0 +1,12 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use std::str; + +fuzz_target!(|data: &[u8]| { + if data.len() < 300 { + if let Ok(string) = str::from_utf8(data) { + let _ = syn::parse_file(string); + } + } +}); diff --git a/json/Cargo.toml b/json/Cargo.toml index 77104dc822..7a5cc8ef3f 100644 --- a/json/Cargo.toml +++ b/json/Cargo.toml @@ -1,18 +1,22 @@ [package] name = "syn-codegen" -version = "0.1.0" +version = "0.3.0" # also update html_root_url authors = ["David Tolnay "] -edition = "2018" -license = "MIT OR Apache-2.0" +categories = ["development-tools::procedural-macro-helpers"] description = "Syntax tree describing Syn's syntax tree" -repository = "https://github.com/dtolnay/syn" documentation = "https://docs.rs/syn-codegen" -categories = ["development-tools::procedural-macro-helpers"] +edition = "2021" +keywords = ["syn"] +license = "MIT OR Apache-2.0" +repository = "https://github.com/dtolnay/syn" [dependencies] -indexmap = { version = "1.0", features = ["serde-1"] } -semver = { version = "0.9", features = ["serde"] } +indexmap = { version = "1", features = ["serde-1"] } +semver = { version = "1", features = ["serde"] } serde = { version = "1.0.88", features = ["derive"] } [dev-dependencies] -serde_json = "1.0" +serde_json = "1" + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/json/src/lib.rs b/json/src/lib.rs index 1234b81ffd..be5a02e0aa 100644 --- a/json/src/lib.rs +++ b/json/src/lib.rs @@ -13,9 +13,9 @@ //! of the [`visit`], [`visit_mut`], and [`fold`] modules can be generated //! programmatically from a description of the syntax tree. //! -//! [`visit`]: https://docs.rs/syn/0.15/syn/visit/index.html -//! [`visit_mut`]: https://docs.rs/syn/0.15/syn/visit_mut/index.html -//! [`fold`]: https://docs.rs/syn/0.15/syn/fold/index.html +//! [`visit`]: https://docs.rs/syn/1.0/syn/visit/index.html +//! [`visit_mut`]: https://docs.rs/syn/1.0/syn/visit_mut/index.html +//! [`fold`]: https://docs.rs/syn/1.0/syn/fold/index.html //! //! To make this type of code as easy as possible to implement in any language, //! every Syn release comes with a machine-readable description of that version @@ -44,10 +44,11 @@ //! } //! ``` +#![doc(html_root_url = "https://docs.rs/syn-codegen/0.2.0")] + use indexmap::IndexMap; use semver::Version; use serde::{Deserialize, Deserializer, Serialize}; - use std::collections::{BTreeMap, BTreeSet}; /// Top-level content of the syntax tree description. diff --git a/src/attr.rs b/src/attr.rs index 34009deabc..34d5515a56 100644 --- a/src/attr.rs +++ b/src/attr.rs @@ -1,24 +1,15 @@ use super::*; -use crate::punctuated::Punctuated; - -use std::iter; - use proc_macro2::TokenStream; +use std::iter; +use std::slice; #[cfg(feature = "parsing")] -use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result}; +use crate::meta::{self, ParseNestedMeta}; #[cfg(feature = "parsing")] -use crate::punctuated::Pair; -#[cfg(feature = "extra-traits")] -use crate::tt::TokenStreamHelper; -#[cfg(feature = "extra-traits")] -use std::hash::{Hash, Hasher}; +use crate::parse::{Parse, ParseStream, Parser, Result}; ast_struct! { - /// An attribute like `#[repr(transparent)]`. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* + /// An attribute, like `#[repr(transparent)]`. /// ///
/// @@ -28,27 +19,52 @@ ast_struct! { /// /// - Outer attributes like `#[repr(transparent)]`. These appear outside or /// in front of the item they describe. + /// /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside /// of the item they describe, usually a module. - /// - Outer doc comments like `/// # Example`. - /// - Inner doc comments like `//! Please file an issue`. - /// - Outer block comments `/** # Example */`. - /// - Inner block comments `/*! Please file an issue */`. + /// + /// - Outer one-line doc comments like `/// Example`. + /// + /// - Inner one-line doc comments like `//! Please file an issue`. + /// + /// - Outer documentation blocks `/** Example */`. + /// + /// - Inner documentation blocks `/*! Please file an issue */`. /// /// The `style` field of type `AttrStyle` distinguishes whether an attribute - /// is outer or inner. Doc comments and block comments are promoted to - /// attributes, as this is how they are processed by the compiler and by - /// `macro_rules!` macros. + /// is outer or inner. /// - /// The `path` field gives the possibly colon-delimited path against which - /// the attribute is resolved. It is equal to `"doc"` for desugared doc - /// comments. The `tokens` field contains the rest of the attribute body as - /// tokens. + /// Every attribute has a `path` that indicates the intended interpretation + /// of the rest of the attribute's contents. The path and the optional + /// additional contents are represented together in the `meta` field of the + /// attribute in three possible varieties: + /// + /// - Meta::Path — attributes whose information content conveys just a + /// path, for example the `#[test]` attribute. + /// + /// - Meta::List — attributes that carry arbitrary tokens after the + /// path, surrounded by a delimiter (parenthesis, bracket, or brace). For + /// example `#[derive(Copy)]` or `#[precondition(x < 5)]`. + /// + /// - Meta::NameValue — attributes with an `=` sign after the path, + /// followed by a Rust expression. For example `#[path = + /// "sys/windows.rs"]`. + /// + /// All doc comments are represented in the NameValue style with a path of + /// "doc", as this is how they are processed by the compiler and by + /// `macro_rules!` macros. /// /// ```text - /// #[derive(Copy)] #[crate::precondition x < 5] - /// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~ - /// path tokens path tokens + /// #[derive(Copy, Clone)] + /// ~~~~~~Path + /// ^^^^^^^^^^^^^^^^^^^Meta::List + /// + /// #[path = "sys/windows.rs"] + /// ~~~~Path + /// ^^^^^^^^^^^^^^^^^^^^^^^Meta::NameValue + /// + /// #[test] + /// ^^^^Meta::Path /// ``` /// ///
@@ -98,132 +114,287 @@ ast_struct! { /// /// The grammar of attributes in Rust is very flexible, which makes the /// syntax tree not that useful on its own. In particular, arguments of the - /// attribute are held in an arbitrary `tokens: TokenStream`. Macros are - /// expected to check the `path` of the attribute, decide whether they - /// recognize it, and then parse the remaining tokens according to whatever - /// grammar they wish to require for that kind of attribute. - /// - /// If the attribute you are parsing is expected to conform to the - /// conventional structured form of attribute, use [`parse_meta()`] to - /// obtain that structured representation. If the attribute follows some - /// other grammar of its own, use [`parse_args()`] to parse that into the - /// expected data structure. - /// - /// [`parse_meta()`]: Attribute::parse_meta + /// `Meta::List` variety of attribute are held in an arbitrary `tokens: + /// TokenStream`. Macros are expected to check the `path` of the attribute, + /// decide whether they recognize it, and then parse the remaining tokens + /// according to whatever grammar they wish to require for that kind of + /// attribute. Use [`parse_args()`] to parse those tokens into the expected + /// data structure. + /// /// [`parse_args()`]: Attribute::parse_args - pub struct Attribute #manual_extra_traits { + /// + ///


+ /// + /// # Doc comments + /// + /// The compiler transforms doc comments, such as `/// comment` and `/*! + /// comment */`, into attributes before macros are expanded. Each comment is + /// expanded into an attribute of the form `#[doc = r"comment"]`. + /// + /// As an example, the following `mod` items are expanded identically: + /// + /// ``` + /// # use syn::{ItemMod, parse_quote}; + /// let doc: ItemMod = parse_quote! { + /// /// Single line doc comments + /// /// We write so many! + /// /** + /// * Multi-line comments... + /// * May span many lines + /// */ + /// mod example { + /// //! Of course, they can be inner too + /// /*! And fit in a single line */ + /// } + /// }; + /// let attr: ItemMod = parse_quote! { + /// #[doc = r" Single line doc comments"] + /// #[doc = r" We write so many!"] + /// #[doc = r" + /// * Multi-line comments... + /// * May span many lines + /// "] + /// mod example { + /// #![doc = r" Of course, they can be inner too"] + /// #![doc = r" And fit in a single line "] + /// } + /// }; + /// assert_eq!(doc, attr); + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Attribute { pub pound_token: Token![#], pub style: AttrStyle, pub bracket_token: token::Bracket, - pub path: Path, - pub tokens: TokenStream, - } -} - -#[cfg(feature = "extra-traits")] -impl Eq for Attribute {} - -#[cfg(feature = "extra-traits")] -impl PartialEq for Attribute { - fn eq(&self, other: &Self) -> bool { - self.style == other.style - && self.pound_token == other.pound_token - && self.bracket_token == other.bracket_token - && self.path == other.path - && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens) - } -} - -#[cfg(feature = "extra-traits")] -impl Hash for Attribute { - fn hash(&self, state: &mut H) - where - H: Hasher, - { - self.style.hash(state); - self.pound_token.hash(state); - self.bracket_token.hash(state); - self.path.hash(state); - TokenStreamHelper(&self.tokens).hash(state); + pub meta: Meta, } } impl Attribute { - /// Parses the content of the attribute, consisting of the path and tokens, - /// as a [`Meta`] if possible. + /// Returns the path that identifies the interpretation of this attribute. /// - /// *This function is available if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - pub fn parse_meta(&self) -> Result { - fn clone_ident_segment(segment: &PathSegment) -> PathSegment { - PathSegment { - ident: segment.ident.clone(), - arguments: PathArguments::None, - } - } - - let path = Path { - leading_colon: self - .path - .leading_colon - .as_ref() - .map(|colon| Token![::](colon.spans)), - segments: self - .path - .segments - .pairs() - .map(|pair| match pair { - Pair::Punctuated(seg, punct) => { - Pair::Punctuated(clone_ident_segment(seg), Token![::](punct.spans)) - } - Pair::End(seg) => Pair::End(clone_ident_segment(seg)), - }) - .collect(), - }; - - let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input); - parse::Parser::parse2(parser, self.tokens.clone()) + /// For example this would return the `test` in `#[test]`, the `derive` in + /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. + pub fn path(&self) -> &Path { + self.meta.path() } /// Parse the arguments to the attribute as a syntax tree. /// - /// This is similar to `syn::parse2::(attr.tokens)` except that: + /// This is similar to pulling out the `TokenStream` from `Meta::List` and + /// doing `syn::parse2::(meta_list.tokens)`, except that using + /// `parse_args` the error message has a more useful span when `tokens` is + /// empty. /// - /// - the surrounding delimiters are *not* included in the input to the - /// parser; and - /// - the error message has a more useful span when `tokens` is empty. + /// The surrounding delimiters are *not* included in the input to the + /// parser. /// /// ```text /// #[my_attr(value < 5)] /// ^^^^^^^^^ what gets parsed /// ``` /// - /// *This function is available if Syn is built with the `"parsing"` - /// feature.* + /// # Example + /// + /// ``` + /// use syn::{parse_quote, Attribute, Expr}; + /// + /// let attr: Attribute = parse_quote! { + /// #[precondition(value < 5)] + /// }; + /// + /// if attr.path().is_ident("precondition") { + /// let precondition: Expr = attr.parse_args()?; + /// // ... + /// } + /// # anyhow::Ok(()) + /// ``` #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn parse_args(&self) -> Result { self.parse_args_with(T::parse) } /// Parse the arguments to the attribute using the given parser. /// - /// *This function is available if Syn is built with the `"parsing"` - /// feature.* + /// # Example + /// + /// ``` + /// use syn::{parse_quote, Attribute}; + /// + /// let attr: Attribute = parse_quote! { + /// #[inception { #[brrrrrrraaaaawwwwrwrrrmrmrmmrmrmmmmm] }] + /// }; + /// + /// let bwom = attr.parse_args_with(Attribute::parse_outer)?; + /// + /// // Attribute does not have a Parse impl, so we couldn't directly do: + /// // let bwom: Attribute = attr.parse_args()?; + /// # anyhow::Ok(()) + /// ``` #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn parse_args_with(&self, parser: F) -> Result { - let parser = |input: ParseStream| { - let args = enter_args(self, input)?; - parse::parse_stream(parser, &args) - }; - parser.parse2(self.tokens.clone()) + match &self.meta { + Meta::Path(path) => Err(crate::error::new2( + path.segments.first().unwrap().ident.span(), + path.segments.last().unwrap().ident.span(), + format!( + "expected attribute arguments in parentheses: {}[{}(...)]", + parsing::DisplayAttrStyle(&self.style), + parsing::DisplayPath(path), + ), + )), + Meta::NameValue(meta) => Err(Error::new( + meta.eq_token.span, + format_args!( + "expected parentheses: {}[{}(...)]", + parsing::DisplayAttrStyle(&self.style), + parsing::DisplayPath(&meta.path), + ), + )), + Meta::List(meta) => meta.parse_args_with(parser), + } + } + + /// Parse the arguments to the attribute, expecting it to follow the + /// conventional structure used by most of Rust's built-in attributes. + /// + /// The [*Meta Item Attribute Syntax*][syntax] section in the Rust reference + /// explains the convention in more detail. Not all attributes follow this + /// convention, so [`parse_args()`][Self::parse_args] is available if you + /// need to parse arbitrarily goofy attribute syntax. + /// + /// [syntax]: https://doc.rust-lang.org/reference/attributes.html#meta-item-attribute-syntax + /// + /// # Example + /// + /// We'll parse a struct, and then parse some of Rust's `#[repr]` attribute + /// syntax. + /// + /// ``` + /// use syn::{parenthesized, parse_quote, token, ItemStruct, LitInt}; + /// + /// let input: ItemStruct = parse_quote! { + /// #[repr(C, align(4))] + /// pub struct MyStruct(u16, u32); + /// }; + /// + /// let mut repr_c = false; + /// let mut repr_transparent = false; + /// let mut repr_align = None::; + /// let mut repr_packed = None::; + /// for attr in &input.attrs { + /// if attr.path().is_ident("repr") { + /// attr.parse_nested_meta(|meta| { + /// // #[repr(C)] + /// if meta.path.is_ident("C") { + /// repr_c = true; + /// return Ok(()); + /// } + /// + /// // #[repr(transparent)] + /// if meta.path.is_ident("transparent") { + /// repr_transparent = true; + /// return Ok(()); + /// } + /// + /// // #[repr(align(N))] + /// if meta.path.is_ident("align") { + /// let content; + /// parenthesized!(content in meta.input); + /// let lit: LitInt = content.parse()?; + /// let n: usize = lit.base10_parse()?; + /// repr_align = Some(n); + /// return Ok(()); + /// } + /// + /// // #[repr(packed)] or #[repr(packed(N))], omitted N means 1 + /// if meta.path.is_ident("packed") { + /// if meta.input.peek(token::Paren) { + /// let content; + /// parenthesized!(content in meta.input); + /// let lit: LitInt = content.parse()?; + /// let n: usize = lit.base10_parse()?; + /// repr_packed = Some(n); + /// } else { + /// repr_packed = Some(1); + /// } + /// return Ok(()); + /// } + /// + /// Err(meta.error("unrecognized repr")) + /// })?; + /// } + /// } + /// # anyhow::Ok(()) + /// ``` + /// + /// # Alternatives + /// + /// In some cases, for attributes which have nested layers of structured + /// content, the following less flexible approach might be more convenient: + /// + /// ``` + /// # use syn::{parse_quote, ItemStruct}; + /// # + /// # let input: ItemStruct = parse_quote! { + /// # #[repr(C, align(4))] + /// # pub struct MyStruct(u16, u32); + /// # }; + /// # + /// use syn::punctuated::Punctuated; + /// use syn::{parenthesized, token, Error, LitInt, Meta, Token}; + /// + /// let mut repr_c = false; + /// let mut repr_transparent = false; + /// let mut repr_align = None::; + /// let mut repr_packed = None::; + /// for attr in &input.attrs { + /// if attr.path().is_ident("repr") { + /// let nested = attr.parse_args_with(Punctuated::::parse_terminated)?; + /// for meta in nested { + /// match meta { + /// // #[repr(C)] + /// Meta::Path(path) if path.is_ident("C") => { + /// repr_c = true; + /// } + /// + /// // #[repr(align(N))] + /// Meta::List(meta) if meta.path.is_ident("align") => { + /// let lit: LitInt = meta.parse_args()?; + /// let n: usize = lit.base10_parse()?; + /// repr_align = Some(n); + /// } + /// + /// /* ... */ + /// + /// _ => { + /// return Err(Error::new_spanned(meta, "unrecognized repr")); + /// } + /// } + /// } + /// } + /// } + /// # Ok(()) + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_nested_meta( + &self, + logic: impl FnMut(ParseNestedMeta) -> Result<()>, + ) -> Result<()> { + self.parse_args_with(meta::parser(logic)) } /// Parses zero or more outer attributes from the stream. /// - /// *This function is available if Syn is built with the `"parsing"` - /// feature.* + /// # Example + /// + /// See + /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn parse_outer(input: ParseStream) -> Result> { let mut attrs = Vec::new(); while input.peek(Token![#]) { @@ -234,73 +405,23 @@ impl Attribute { /// Parses zero or more inner attributes from the stream. /// - /// *This function is available if Syn is built with the `"parsing"` - /// feature.* + /// # Example + /// + /// See + /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn parse_inner(input: ParseStream) -> Result> { let mut attrs = Vec::new(); - while input.peek(Token![#]) && input.peek2(Token![!]) { - attrs.push(input.call(parsing::single_parse_inner)?); - } + parsing::parse_inner(input, &mut attrs)?; Ok(attrs) } } -#[cfg(feature = "parsing")] -fn error_expected_args(attr: &Attribute) -> Error { - let style = match attr.style { - AttrStyle::Outer => "#", - AttrStyle::Inner(_) => "#!", - }; - - let mut path = String::new(); - for segment in &attr.path.segments { - if !path.is_empty() || attr.path.leading_colon.is_some() { - path += "::"; - } - path += &segment.ident.to_string(); - } - - let msg = format!("expected attribute arguments: {}[{}(...)]", style, path); - - #[cfg(feature = "printing")] - return Error::new_spanned(attr, msg); - - #[cfg(not(feature = "printing"))] - return Error::new(attr.bracket_token.span, msg); -} - -#[cfg(feature = "parsing")] -fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result> { - if input.is_empty() { - return Err(error_expected_args(attr)); - }; - - let content; - if input.peek(token::Paren) { - parenthesized!(content in input); - } else if input.peek(token::Bracket) { - bracketed!(content in input); - } else if input.peek(token::Brace) { - braced!(content in input); - } else { - return Err(input.error("unexpected token in attribute arguments")); - } - - if input.is_empty() { - Ok(content) - } else { - Err(input.error("unexpected token in attribute arguments")) - } -} - ast_enum! { /// Distinguishes between attributes that decorate an item and attributes /// that are contained within an item. /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// /// # Outer attributes /// /// - `#[repr(transparent)]` @@ -312,7 +433,7 @@ ast_enum! { /// - `#![feature(proc_macro)]` /// - `//! # Example` /// - `/*! Please file an issue */` - #[cfg_attr(feature = "clone-impls", derive(Copy))] + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum AttrStyle { Outer, Inner(Token![!]), @@ -322,9 +443,6 @@ ast_enum! { ast_enum_of_structs! { /// Content of a compile-time structured attribute. /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// /// ## Path /// /// A meta path is like the `test` in `#[test]`. @@ -342,10 +460,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - // - // TODO: change syntax-tree-enum link to an intra rustdoc link, currently - // blocked on https://github.com/rust-lang/rust/issues/62833 + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum Meta { Path(Path), @@ -359,30 +475,26 @@ ast_enum_of_structs! { ast_struct! { /// A structured list within an attribute, like `derive(Copy, Clone)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct MetaList { pub path: Path, - pub paren_token: token::Paren, - pub nested: Punctuated, + pub delimiter: MacroDelimiter, + pub tokens: TokenStream, } } ast_struct! { /// A name-value pair within an attribute, like `feature = "nightly"`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct MetaNameValue { pub path: Path, pub eq_token: Token![=], - pub lit: Lit, + pub value: Expr, } } impl Meta { - /// Returns the identifier that begins this structured meta item. + /// Returns the path that begins this structured meta item. /// /// For example this would return the `test` in `#[test]`, the `derive` in /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. @@ -393,194 +505,174 @@ impl Meta { Meta::NameValue(meta) => &meta.path, } } + + /// Error if this is a `Meta::List` or `Meta::NameValue`. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn require_path_only(&self) -> Result<&Path> { + let error_span = match self { + Meta::Path(path) => return Ok(path), + Meta::List(meta) => meta.delimiter.span().open(), + Meta::NameValue(meta) => meta.eq_token.span, + }; + Err(Error::new(error_span, "unexpected token in attribute")) + } + + /// Error if this is a `Meta::Path` or `Meta::NameValue`. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn require_list(&self) -> Result<&MetaList> { + match self { + Meta::List(meta) => Ok(meta), + Meta::Path(path) => Err(crate::error::new2( + path.segments.first().unwrap().ident.span(), + path.segments.last().unwrap().ident.span(), + format!( + "expected attribute arguments in parentheses: `{}(...)`", + parsing::DisplayPath(path), + ), + )), + Meta::NameValue(meta) => Err(Error::new(meta.eq_token.span, "expected `(`")), + } + } + + /// Error if this is a `Meta::Path` or `Meta::List`. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn require_name_value(&self) -> Result<&MetaNameValue> { + match self { + Meta::NameValue(meta) => Ok(meta), + Meta::Path(path) => Err(crate::error::new2( + path.segments.first().unwrap().ident.span(), + path.segments.last().unwrap().ident.span(), + format!( + "expected a value for this attribute: `{} = ...`", + parsing::DisplayPath(path), + ), + )), + Meta::List(meta) => Err(Error::new(meta.delimiter.span().open(), "expected `=`")), + } + } } -ast_enum_of_structs! { - /// Element of a compile-time attribute list. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - pub enum NestedMeta { - /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which - /// would be a nested `Meta::Path`. - Meta(Meta), +impl MetaList { + /// See [`Attribute::parse_args`]. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args(&self) -> Result { + self.parse_args_with(T::parse) + } - /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`. - Lit(Lit), + /// See [`Attribute::parse_args_with`]. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args_with(&self, parser: F) -> Result { + let scope = self.delimiter.span().close(); + crate::parse::parse_scoped(parser, scope, self.tokens.clone()) + } + + /// See [`Attribute::parse_nested_meta`]. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_nested_meta( + &self, + logic: impl FnMut(ParseNestedMeta) -> Result<()>, + ) -> Result<()> { + self.parse_args_with(meta::parser(logic)) } } -/// Conventional argument type associated with an invocation of an attribute -/// macro. -/// -/// For example if we are developing an attribute macro that is intended to be -/// invoked on function items as follows: -/// -/// ``` -/// # const IGNORE: &str = stringify! { -/// #[my_attribute(path = "/v1/refresh")] -/// # }; -/// pub fn refresh() { -/// /* ... */ -/// } -/// ``` -/// -/// The implementation of this macro would want to parse its attribute arguments -/// as type `AttributeArgs`. -/// -/// ``` -/// extern crate proc_macro; -/// -/// use proc_macro::TokenStream; -/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; -/// -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_attribute] -/// # }; -/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream { -/// let args = parse_macro_input!(args as AttributeArgs); -/// let input = parse_macro_input!(input as ItemFn); -/// -/// /* ... */ -/// # "".parse().unwrap() -/// } -/// ``` -pub type AttributeArgs = Vec; - -pub trait FilterAttrs<'a> { +pub(crate) trait FilterAttrs<'a> { type Ret: Iterator; fn outer(self) -> Self::Ret; fn inner(self) -> Self::Ret; } -impl<'a, T> FilterAttrs<'a> for T -where - T: IntoIterator, -{ - type Ret = iter::Filter bool>; +impl<'a> FilterAttrs<'a> for &'a [Attribute] { + type Ret = iter::Filter, fn(&&Attribute) -> bool>; fn outer(self) -> Self::Ret { fn is_outer(attr: &&Attribute) -> bool { match attr.style { AttrStyle::Outer => true, - _ => false, + AttrStyle::Inner(_) => false, } } - self.into_iter().filter(is_outer) + self.iter().filter(is_outer) } fn inner(self) -> Self::Ret { fn is_inner(attr: &&Attribute) -> bool { match attr.style { AttrStyle::Inner(_) => true, - _ => false, + AttrStyle::Outer => false, } } - self.into_iter().filter(is_inner) + self.iter().filter(is_inner) } } #[cfg(feature = "parsing")] -pub mod parsing { +pub(crate) mod parsing { use super::*; - - use crate::ext::IdentExt; + use crate::parse::discouraged::Speculative; use crate::parse::{Parse, ParseStream, Result}; - #[cfg(feature = "full")] - use crate::private; + use std::fmt::{self, Display}; + + pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec) -> Result<()> { + while input.peek(Token![#]) && input.peek2(Token![!]) { + attrs.push(input.call(parsing::single_parse_inner)?); + } + Ok(()) + } - pub fn single_parse_inner(input: ParseStream) -> Result { + pub(crate) fn single_parse_inner(input: ParseStream) -> Result { let content; Ok(Attribute { pound_token: input.parse()?, style: AttrStyle::Inner(input.parse()?), bracket_token: bracketed!(content in input), - path: content.call(Path::parse_mod_style)?, - tokens: content.parse()?, + meta: content.parse()?, }) } - pub fn single_parse_outer(input: ParseStream) -> Result { + pub(crate) fn single_parse_outer(input: ParseStream) -> Result { let content; Ok(Attribute { pound_token: input.parse()?, style: AttrStyle::Outer, bracket_token: bracketed!(content in input), - path: content.call(Path::parse_mod_style)?, - tokens: content.parse()?, - }) - } - - #[cfg(feature = "full")] - impl private { - pub fn attrs(outer: Vec, inner: Vec) -> Vec { - let mut attrs = outer; - attrs.extend(inner); - attrs - } - } - - // Like Path::parse_mod_style but accepts keywords in the path. - fn parse_meta_path(input: ParseStream) -> Result { - Ok(Path { - leading_colon: input.parse()?, - segments: { - let mut segments = Punctuated::new(); - while input.peek(Ident::peek_any) { - let ident = Ident::parse_any(input)?; - segments.push_value(PathSegment::from(ident)); - if !input.peek(Token![::]) { - break; - } - let punct = input.parse()?; - segments.push_punct(punct); - } - if segments.is_empty() { - return Err(input.error("expected path")); - } else if segments.trailing_punct() { - return Err(input.error("expected path segment")); - } - segments - }, + meta: content.parse()?, }) } + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for Meta { fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; + let path = input.call(Path::parse_mod_style)?; parse_meta_after_path(path, input) } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for MetaList { fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; + let path = input.call(Path::parse_mod_style)?; parse_meta_list_after_path(path, input) } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for MetaNameValue { fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; + let path = input.call(Path::parse_mod_style)?; parse_meta_name_value_after_path(path, input) } } - impl Parse for NestedMeta { - fn parse(input: ParseStream) -> Result { - if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) { - input.parse().map(NestedMeta::Lit) - } else if input.peek(Ident::peek_any) { - input.parse().map(NestedMeta::Meta) - } else { - Err(input.error("expected identifier or literal")) - } - } - } - - pub fn parse_meta_after_path(path: Path, input: ParseStream) -> Result { - if input.peek(token::Paren) { + pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) -> Result { + if input.peek(token::Paren) || input.peek(token::Bracket) || input.peek(token::Brace) { parse_meta_list_after_path(path, input).map(Meta::List) } else if input.peek(Token![=]) { parse_meta_name_value_after_path(path, input).map(Meta::NameValue) @@ -590,21 +682,60 @@ pub mod parsing { } fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result { - let content; + let (delimiter, tokens) = mac::parse_delimiter(input)?; Ok(MetaList { path, - paren_token: parenthesized!(content in input), - nested: content.parse_terminated(NestedMeta::parse)?, + delimiter, + tokens, }) } fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result { + let eq_token: Token![=] = input.parse()?; + let ahead = input.fork(); + let lit: Option = ahead.parse()?; + let value = if let (Some(lit), true) = (lit, ahead.is_empty()) { + input.advance_to(&ahead); + Expr::Lit(ExprLit { + attrs: Vec::new(), + lit, + }) + } else if input.peek(Token![#]) && input.peek2(token::Bracket) { + return Err(input.error("unexpected attribute inside of attribute")); + } else { + input.parse()? + }; Ok(MetaNameValue { path, - eq_token: input.parse()?, - lit: input.parse()?, + eq_token, + value, }) } + + pub(super) struct DisplayAttrStyle<'a>(pub &'a AttrStyle); + + impl<'a> Display for DisplayAttrStyle<'a> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str(match self.0 { + AttrStyle::Outer => "#", + AttrStyle::Inner(_) => "#!", + }) + } + } + + pub(super) struct DisplayPath<'a>(pub &'a Path); + + impl<'a> Display for DisplayPath<'a> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + for (i, segment) in self.0.segments.iter().enumerate() { + if i > 0 || self.0.leading_colon.is_some() { + formatter.write_str("::")?; + } + write!(formatter, "{}", segment.ident)?; + } + Ok(()) + } + } } #[cfg(feature = "printing")] @@ -613,6 +744,7 @@ mod printing { use proc_macro2::TokenStream; use quote::ToTokens; + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for Attribute { fn to_tokens(&self, tokens: &mut TokenStream) { self.pound_token.to_tokens(tokens); @@ -620,26 +752,25 @@ mod printing { b.to_tokens(tokens); } self.bracket_token.surround(tokens, |tokens| { - self.path.to_tokens(tokens); - self.tokens.to_tokens(tokens); + self.meta.to_tokens(tokens); }); } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for MetaList { fn to_tokens(&self, tokens: &mut TokenStream) { self.path.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.nested.to_tokens(tokens); - }) + self.delimiter.surround(tokens, self.tokens.clone()); } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for MetaNameValue { fn to_tokens(&self, tokens: &mut TokenStream) { self.path.to_tokens(tokens); self.eq_token.to_tokens(tokens); - self.lit.to_tokens(tokens); + self.value.to_tokens(tokens); } } } diff --git a/src/await.rs b/src/await.rs deleted file mode 100644 index a8e24fd4e5..0000000000 --- a/src/await.rs +++ /dev/null @@ -1,2 +0,0 @@ -// See include!("await.rs") in token.rs. -export_token_macro![(await)]; diff --git a/src/bigint.rs b/src/bigint.rs index 5397d6beee..66aaa93725 100644 --- a/src/bigint.rs +++ b/src/bigint.rs @@ -1,16 +1,16 @@ use std::ops::{AddAssign, MulAssign}; // For implementing base10_digits() accessor on LitInt. -pub struct BigInt { +pub(crate) struct BigInt { digits: Vec, } impl BigInt { - pub fn new() -> Self { + pub(crate) fn new() -> Self { BigInt { digits: Vec::new() } } - pub fn to_string(&self) -> String { + pub(crate) fn to_string(&self) -> String { let mut repr = String::with_capacity(self.digits.len()); let mut has_nonzero = false; diff --git a/src/buffer.rs b/src/buffer.rs index 551a5ac816..e16f2adea8 100644 --- a/src/buffer.rs +++ b/src/buffer.rs @@ -1,7 +1,5 @@ //! A stably addressed token buffer supporting efficient traversal based on a //! cheaply copyable cursor. -//! -//! *This module is available if Syn is built with the `"parsing"` feature.* // This module is heavily commented as it contains most of the unsafe code in // Syn, and caution should be used when editing it. The public-facing interface @@ -12,112 +10,81 @@ feature = "proc-macro" ))] use crate::proc_macro as pm; +use crate::Lifetime; +use proc_macro2::extra::DelimSpan; use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; - +use std::cmp::Ordering; use std::marker::PhantomData; -use std::ptr; - -use crate::Lifetime; /// Internal type which is used instead of `TokenTree` to represent a token tree /// within a `TokenBuffer`. enum Entry { // Mimicking types from proc-macro. - Group(Group, TokenBuffer), + // Group entries contain the offset to the matching End entry. + Group(Group, usize), Ident(Ident), Punct(Punct), Literal(Literal), - // End entries contain a raw pointer to the entry from the containing - // token tree, or null if this is the outermost level. - End(*const Entry), + // End entries contain the offset (negative) to the start of the buffer. + End(isize), } /// A buffer that can be efficiently traversed multiple times, unlike /// `TokenStream` which requires a deep copy in order to traverse more than /// once. -/// -/// *This type is available if Syn is built with the `"parsing"` feature.* pub struct TokenBuffer { - // NOTE: Do not derive clone on this - there are raw pointers inside which - // will be messed up. Moving the `TokenBuffer` itself is safe as the actual - // backing slices won't be moved. - data: Box<[Entry]>, + // NOTE: Do not implement clone on this - while the current design could be + // cloned, other designs which could be desirable may not be cloneable. + entries: Box<[Entry]>, } impl TokenBuffer { - // NOTE: DO NOT MUTATE THE `Vec` RETURNED FROM THIS FUNCTION ONCE IT - // RETURNS, THE ADDRESS OF ITS BACKING MEMORY MUST REMAIN STABLE. - fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer { - // Build up the entries list, recording the locations of any Groups - // in the list to be processed later. - let mut entries = Vec::new(); - let mut seqs = Vec::new(); + fn recursive_new(entries: &mut Vec, stream: TokenStream) { for tt in stream { match tt { - TokenTree::Ident(sym) => { - entries.push(Entry::Ident(sym)); - } - TokenTree::Punct(op) => { - entries.push(Entry::Punct(op)); - } - TokenTree::Literal(l) => { - entries.push(Entry::Literal(l)); - } - TokenTree::Group(g) => { - // Record the index of the interesting entry, and store an - // `End(null)` there temporarially. - seqs.push((entries.len(), g)); - entries.push(Entry::End(ptr::null())); + TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)), + TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)), + TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), + TokenTree::Group(group) => { + let group_start_index = entries.len(); + entries.push(Entry::End(0)); // we replace this below + Self::recursive_new(entries, group.stream()); + let group_end_index = entries.len(); + entries.push(Entry::End(-(group_end_index as isize))); + let group_end_offset = group_end_index - group_start_index; + entries[group_start_index] = Entry::Group(group, group_end_offset); } } } - // Add an `End` entry to the end with a reference to the enclosing token - // stream which was passed in. - entries.push(Entry::End(up)); - - // NOTE: This is done to ensure that we don't accidentally modify the - // length of the backing buffer. The backing buffer must remain at a - // constant address after this point, as we are going to store a raw - // pointer into it. - let mut entries = entries.into_boxed_slice(); - for (idx, group) in seqs { - // We know that this index refers to one of the temporary - // `End(null)` entries, and we know that the last entry is - // `End(up)`, so the next index is also valid. - let seq_up = &entries[idx + 1] as *const Entry; - - // The end entry stored at the end of this Entry::Group should - // point to the Entry which follows the Group in the list. - let inner = Self::inner_new(group.stream(), seq_up); - entries[idx] = Entry::Group(group, inner); - } - - TokenBuffer { data: entries } } /// Creates a `TokenBuffer` containing all the tokens from the input - /// `TokenStream`. - /// - /// *This method is available if Syn is built with both the `"parsing"` and - /// `"proc-macro"` features.* + /// `proc_macro::TokenStream`. #[cfg(all( not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), feature = "proc-macro" ))] - pub fn new(stream: pm::TokenStream) -> TokenBuffer { + #[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] + pub fn new(stream: pm::TokenStream) -> Self { Self::new2(stream.into()) } /// Creates a `TokenBuffer` containing all the tokens from the input - /// `TokenStream`. - pub fn new2(stream: TokenStream) -> TokenBuffer { - Self::inner_new(stream, ptr::null()) + /// `proc_macro2::TokenStream`. + pub fn new2(stream: TokenStream) -> Self { + let mut entries = Vec::new(); + Self::recursive_new(&mut entries, stream); + entries.push(Entry::End(-(entries.len() as isize))); + Self { + entries: entries.into_boxed_slice(), + } } /// Creates a cursor referencing the first token in the buffer and able to /// traverse until the end of the buffer. pub fn begin(&self) -> Cursor { - unsafe { Cursor::create(&self.data[0], &self.data[self.data.len() - 1]) } + let ptr = self.entries.as_ptr(); + unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) } } } @@ -129,16 +96,10 @@ impl TokenBuffer { /// /// An empty `Cursor` can be created directly, or one may create a `TokenBuffer` /// object and get a cursor to its first token with `begin()`. -/// -/// Two cursors are equal if they have the same location in the same input -/// stream, and have the same scope. -/// -/// *This type is available if Syn is built with the `"parsing"` feature.* -#[derive(Copy, Clone, Eq, PartialEq)] pub struct Cursor<'a> { // The current entry which the `Cursor` is pointing at. ptr: *const Entry, - // This is the only `Entry::End(..)` object which this cursor is allowed to + // This is the only `Entry::End` object which this cursor is allowed to // point at. All other `End` objects are skipped over in `Cursor::create`. scope: *const Entry, // Cursor is covariant in 'a. This field ensures that our pointers are still @@ -158,7 +119,7 @@ impl<'a> Cursor<'a> { // object in global storage. struct UnsafeSyncEntry(Entry); unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry)); + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0)); Cursor { ptr: &EMPTY_ENTRY.0, @@ -171,15 +132,15 @@ impl<'a> Cursor<'a> { /// `None`-delimited scopes when the cursor reaches the end of them, /// allowing for them to be treated transparently. unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { - // NOTE: If we're looking at a `End(..)`, we want to advance the cursor + // NOTE: If we're looking at a `End`, we want to advance the cursor // past it, unless `ptr == scope`, which means that we're at the edge of // our cursor's scope. We should only have `ptr != scope` at the exit // from None-delimited groups entered with `ignore_none`. - while let Entry::End(exit) = *ptr { + while let Entry::End(_) = *ptr { if ptr == scope { break; } - ptr = exit; + ptr = ptr.add(1); } Cursor { @@ -197,31 +158,30 @@ impl<'a> Cursor<'a> { /// Bump the cursor to point at the next token after the current one. This /// is undefined behavior if the cursor is currently looking at an /// `Entry::End`. - unsafe fn bump(self) -> Cursor<'a> { + /// + /// If the cursor is looking at an `Entry::Group`, the bumped cursor will + /// point at the first token in the group (with the same scope end). + unsafe fn bump_ignore_group(self) -> Cursor<'a> { Cursor::create(self.ptr.offset(1), self.scope) } - /// If the cursor is looking at a `None`-delimited group, move it to look at - /// the first token inside instead. If the group is empty, this will move + /// While the cursor is looking at a `None`-delimited group, move it to look + /// at the first token inside instead. If the group is empty, this will move /// the cursor past the `None`-delimited group. /// /// WARNING: This mutates its argument. fn ignore_none(&mut self) { - if let Entry::Group(group, buf) = self.entry() { + while let Entry::Group(group, _) = self.entry() { if group.delimiter() == Delimiter::None { - // NOTE: We call `Cursor::create` here to make sure that - // situations where we should immediately exit the span after - // entering it are handled correctly. - unsafe { - *self = Cursor::create(&buf.data[0], self.scope); - } + unsafe { *self = self.bump_ignore_group() }; + } else { + break; } } } /// Checks whether the cursor is currently pointing at the end of its valid /// scope. - #[inline] pub fn eof(self) -> bool { // We're at eof if we're at the end of our scope. self.ptr == self.scope @@ -229,7 +189,7 @@ impl<'a> Cursor<'a> { /// If the cursor is pointing at a `Group` with the given delimiter, returns /// a cursor into that group and one pointing to the next `TokenTree`. - pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> { + pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> { // If we're not trying to enter a none-delimited group, we want to // ignore them. We have to make sure to _not_ ignore them when we want // to enter them, of course. For obvious reasons. @@ -237,31 +197,60 @@ impl<'a> Cursor<'a> { self.ignore_none(); } - if let Entry::Group(group, buf) = self.entry() { + if let Entry::Group(group, end_offset) = self.entry() { if group.delimiter() == delim { - return Some((buf.begin(), group.span(), unsafe { self.bump() })); + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, span, after_group)); } } None } + pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let delimiter = group.delimiter(); + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, delimiter, span, after_group)); + } + + None + } + + pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((group.clone(), after_group)); + } + + None + } + /// If the cursor is pointing at a `Ident`, returns it along with a cursor /// pointing at the next `TokenTree`. pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump() })), + Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })), _ => None, } } - /// If the cursor is pointing at an `Punct`, returns it along with a cursor + /// If the cursor is pointing at a `Punct`, returns it along with a cursor /// pointing at the next `TokenTree`. pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })), + Entry::Punct(punct) if punct.as_char() != '\'' => { + Some((punct.clone(), unsafe { self.bump_ignore_group() })) + } _ => None, } } @@ -271,7 +260,7 @@ impl<'a> Cursor<'a> { pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })), + Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })), _ => None, } } @@ -281,18 +270,14 @@ impl<'a> Cursor<'a> { pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.ident() { - Some((ident, rest)) => { - let lifetime = Lifetime { - apostrophe: op.span(), - ident, - }; - Some((lifetime, rest)) - } - None => None, - } + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + let next = unsafe { self.bump_ignore_group() }; + let (ident, rest) = next.ident()?; + let lifetime = Lifetime { + apostrophe: punct.span(), + ident, + }; + Some((lifetime, rest)) } _ => None, } @@ -318,17 +303,16 @@ impl<'a> Cursor<'a> { /// This method does not treat `None`-delimited groups as transparent, and /// will return a `Group(None, ..)` if the cursor is looking at one. pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { - let tree = match self.entry() { - Entry::Group(group, _) => group.clone().into(), - Entry::Literal(lit) => lit.clone().into(), - Entry::Ident(ident) => ident.clone().into(), - Entry::Punct(op) => op.clone().into(), - Entry::End(..) => { - return None; - } + let (tree, len) = match self.entry() { + Entry::Group(group, end_offset) => (group.clone().into(), *end_offset), + Entry::Literal(literal) => (literal.clone().into(), 1), + Entry::Ident(ident) => (ident.clone().into(), 1), + Entry::Punct(punct) => (punct.clone().into(), 1), + Entry::End(_) => return None, }; - Some((tree, unsafe { self.bump() })) + let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; + Some((tree, rest)) } /// Returns the `Span` of the current token, or `Span::call_site()` if this @@ -336,10 +320,86 @@ impl<'a> Cursor<'a> { pub fn span(self) -> Span { match self.entry() { Entry::Group(group, _) => group.span(), - Entry::Literal(l) => l.span(), - Entry::Ident(t) => t.span(), - Entry::Punct(o) => o.span(), - Entry::End(..) => Span::call_site(), + Entry::Literal(literal) => literal.span(), + Entry::Ident(ident) => ident.span(), + Entry::Punct(punct) => punct.span(), + Entry::End(_) => Span::call_site(), + } + } + + /// Returns the `Span` of the token immediately prior to the position of + /// this cursor, or of the current token if there is no previous one. + #[cfg(any(feature = "full", feature = "derive"))] + pub(crate) fn prev_span(mut self) -> Span { + if start_of_buffer(self) < self.ptr { + self.ptr = unsafe { self.ptr.offset(-1) }; + if let Entry::End(_) = self.entry() { + // Locate the matching Group begin token. + let mut depth = 1; + loop { + self.ptr = unsafe { self.ptr.offset(-1) }; + match self.entry() { + Entry::Group(group, _) => { + depth -= 1; + if depth == 0 { + return group.span(); + } + } + Entry::End(_) => depth += 1, + Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_) => {} + } + } + } + } + self.span() + } + + /// Skip over the next token without cloning it. Returns `None` if this + /// cursor points to eof. + /// + /// This method treats `'lifetimes` as a single token. + pub(crate) fn skip(self) -> Option> { + let len = match self.entry() { + Entry::End(_) => return None, + + // Treat lifetimes as a single tt for the purposes of 'skip'. + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + match unsafe { &*self.ptr.add(1) } { + Entry::Ident(_) => 2, + _ => 1, + } + } + + Entry::Group(_, end_offset) => *end_offset, + _ => 1, + }; + + Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) }) + } +} + +impl<'a> Copy for Cursor<'a> {} + +impl<'a> Clone for Cursor<'a> { + fn clone(&self) -> Self { + *self + } +} + +impl<'a> Eq for Cursor<'a> {} + +impl<'a> PartialEq for Cursor<'a> { + fn eq(&self, other: &Self) -> bool { + self.ptr == other.ptr + } +} + +impl<'a> PartialOrd for Cursor<'a> { + fn partial_cmp(&self, other: &Self) -> Option { + if same_buffer(*self, *other) { + Some(self.ptr.cmp(&other.ptr)) + } else { + None } } } @@ -348,6 +408,24 @@ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { a.scope == b.scope } +pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool { + start_of_buffer(a) == start_of_buffer(b) +} + +fn start_of_buffer(cursor: Cursor) -> *const Entry { + unsafe { + match &*cursor.scope { + Entry::End(offset) => cursor.scope.offset(*offset), + _ => unreachable!(), + } + } +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering { + a.ptr.cmp(&b.ptr) +} + pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { match cursor.entry() { Entry::Group(group, _) => group.span_open(), diff --git a/src/custom_keyword.rs b/src/custom_keyword.rs index 200e8478ef..379d159e74 100644 --- a/src/custom_keyword.rs +++ b/src/custom_keyword.rs @@ -26,8 +26,8 @@ /// /// - Field access to its span — `let sp = whatever_token.span` /// -/// [Peeking]: parse::ParseBuffer::peek -/// [Parsing]: parse::ParseBuffer::parse +/// [Peeking]: crate::parse::ParseBuffer::peek +/// [Parsing]: crate::parse::ParseBuffer::parse /// [Printing]: quote::ToTokens /// [`Span`]: proc_macro2::Span /// @@ -86,36 +86,38 @@ /// } /// } /// ``` -#[macro_export(local_inner_macros)] +#[macro_export] macro_rules! custom_keyword { ($ident:ident) => { #[allow(non_camel_case_types)] pub struct $ident { - pub span: $crate::export::Span, + pub span: $crate::__private::Span, } #[doc(hidden)] - #[allow(non_snake_case)] - pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>( + #[allow(dead_code, non_snake_case)] + pub fn $ident<__S: $crate::__private::IntoSpans<$crate::__private::Span>>( span: __S, ) -> $ident { $ident { - span: $crate::export::IntoSpans::into_spans(span)[0], + span: $crate::__private::IntoSpans::into_spans(span), } } - impl $crate::export::Default for $ident { - fn default() -> Self { - $ident { - span: $crate::export::Span::call_site(), + const _: () = { + impl $crate::__private::Default for $ident { + fn default() -> Self { + $ident { + span: $crate::__private::Span::call_site(), + } } } - } - impl_parse_for_custom_keyword!($ident); - impl_to_tokens_for_custom_keyword!($ident); - impl_clone_for_custom_keyword!($ident); - impl_extra_traits_for_custom_keyword!($ident); + $crate::impl_parse_for_custom_keyword!($ident); + $crate::impl_to_tokens_for_custom_keyword!($ident); + $crate::impl_clone_for_custom_keyword!($ident); + $crate::impl_extra_traits_for_custom_keyword!($ident); + }; }; } @@ -127,31 +129,31 @@ macro_rules! impl_parse_for_custom_keyword { ($ident:ident) => { // For peek. impl $crate::token::CustomToken for $ident { - fn peek(cursor: $crate::buffer::Cursor) -> $crate::export::bool { - if let Some((ident, _rest)) = cursor.ident() { - ident == stringify!($ident) + fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { + if let $crate::__private::Some((ident, _rest)) = cursor.ident() { + ident == $crate::__private::stringify!($ident) } else { false } } - fn display() -> &'static $crate::export::str { - concat!("`", stringify!($ident), "`") + fn display() -> &'static $crate::__private::str { + $crate::__private::concat!("`", $crate::__private::stringify!($ident), "`") } } impl $crate::parse::Parse for $ident { fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { input.step(|cursor| { - if let $crate::export::Some((ident, rest)) = cursor.ident() { - if ident == stringify!($ident) { - return $crate::export::Ok(($ident { span: ident.span() }, rest)); + if let $crate::__private::Some((ident, rest)) = cursor.ident() { + if ident == $crate::__private::stringify!($ident) { + return $crate::__private::Ok(($ident { span: ident.span() }, rest)); } } - $crate::export::Err(cursor.error(concat!( + $crate::__private::Err(cursor.error($crate::__private::concat!( "expected `", - stringify!($ident), - "`" + $crate::__private::stringify!($ident), + "`", ))) }) } @@ -173,10 +175,10 @@ macro_rules! impl_parse_for_custom_keyword { #[macro_export] macro_rules! impl_to_tokens_for_custom_keyword { ($ident:ident) => { - impl $crate::export::ToTokens for $ident { - fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) { - let ident = $crate::Ident::new(stringify!($ident), self.span); - $crate::export::TokenStreamExt::append(tokens, ident); + impl $crate::__private::ToTokens for $ident { + fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { + let ident = $crate::Ident::new($crate::__private::stringify!($ident), self.span); + $crate::__private::TokenStreamExt::append(tokens, ident); } } }; @@ -196,9 +198,10 @@ macro_rules! impl_to_tokens_for_custom_keyword { #[macro_export] macro_rules! impl_clone_for_custom_keyword { ($ident:ident) => { - impl $crate::export::Copy for $ident {} + impl $crate::__private::Copy for $ident {} - impl $crate::export::Clone for $ident { + #[allow(clippy::expl_impl_clone_on_copy)] + impl $crate::__private::Clone for $ident { fn clone(&self) -> Self { *self } @@ -220,25 +223,29 @@ macro_rules! impl_clone_for_custom_keyword { #[macro_export] macro_rules! impl_extra_traits_for_custom_keyword { ($ident:ident) => { - impl $crate::export::Debug for $ident { - fn fmt(&self, f: &mut $crate::export::Formatter) -> $crate::export::fmt::Result { - $crate::export::Formatter::write_str( + impl $crate::__private::Debug for $ident { + fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { + $crate::__private::Formatter::write_str( f, - concat!("Keyword [", stringify!($ident), "]"), + $crate::__private::concat!( + "Keyword [", + $crate::__private::stringify!($ident), + "]", + ), ) } } - impl $crate::export::Eq for $ident {} + impl $crate::__private::Eq for $ident {} - impl $crate::export::PartialEq for $ident { - fn eq(&self, _other: &Self) -> $crate::export::bool { + impl $crate::__private::PartialEq for $ident { + fn eq(&self, _other: &Self) -> $crate::__private::bool { true } } - impl $crate::export::Hash for $ident { - fn hash<__H: $crate::export::Hasher>(&self, _state: &mut __H) {} + impl $crate::__private::Hash for $ident { + fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} } }; } diff --git a/src/custom_punctuation.rs b/src/custom_punctuation.rs index 29fa448bd8..e8cbcd2feb 100644 --- a/src/custom_punctuation.rs +++ b/src/custom_punctuation.rs @@ -22,8 +22,8 @@ /// /// - Field access to its spans — `let spans = lrarrow.spans` /// -/// [Peeking]: parse::ParseBuffer::peek -/// [Parsing]: parse::ParseBuffer::parse +/// [Peeking]: crate::parse::ParseBuffer::peek +/// [Parsing]: crate::parse::ParseBuffer::parse /// [Printing]: quote::ToTokens /// [`Span`]: proc_macro2::Span /// @@ -74,57 +74,59 @@ /// let _: PathSegments = syn::parse_str(input).unwrap(); /// } /// ``` -#[macro_export(local_inner_macros)] +#[macro_export] macro_rules! custom_punctuation { ($ident:ident, $($tt:tt)+) => { pub struct $ident { - pub spans: custom_punctuation_repr!($($tt)+), + pub spans: $crate::custom_punctuation_repr!($($tt)+), } #[doc(hidden)] - #[allow(non_snake_case)] - pub fn $ident<__S: $crate::export::IntoSpans>( + #[allow(dead_code, non_snake_case)] + pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>( spans: __S, ) -> $ident { - let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*; + let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*; $ident { - spans: $crate::export::IntoSpans::into_spans(spans) + spans: $crate::__private::IntoSpans::into_spans(spans) } } - impl $crate::export::Default for $ident { - fn default() -> Self { - $ident($crate::export::Span::call_site()) + const _: () = { + impl $crate::__private::Default for $ident { + fn default() -> Self { + $ident($crate::__private::Span::call_site()) + } } - } - impl_parse_for_custom_punctuation!($ident, $($tt)+); - impl_to_tokens_for_custom_punctuation!($ident, $($tt)+); - impl_clone_for_custom_punctuation!($ident, $($tt)+); - impl_extra_traits_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+); + }; }; } // Not public API. #[cfg(feature = "parsing")] #[doc(hidden)] -#[macro_export(local_inner_macros)] +#[macro_export] macro_rules! impl_parse_for_custom_punctuation { ($ident:ident, $($tt:tt)+) => { impl $crate::token::CustomToken for $ident { fn peek(cursor: $crate::buffer::Cursor) -> bool { - $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+)) + $crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+)) } - fn display() -> &'static $crate::export::str { - custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`") + fn display() -> &'static $crate::__private::str { + $crate::__private::concat!("`", $crate::stringify_punct!($($tt)+), "`") } } impl $crate::parse::Parse for $ident { fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { - let spans: custom_punctuation_repr!($($tt)+) = - $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?; + let spans: $crate::custom_punctuation_repr!($($tt)+) = + $crate::__private::parse_punct(input, $crate::stringify_punct!($($tt)+))?; Ok($ident(spans)) } } @@ -142,12 +144,12 @@ macro_rules! impl_parse_for_custom_punctuation { // Not public API. #[cfg(feature = "printing")] #[doc(hidden)] -#[macro_export(local_inner_macros)] +#[macro_export] macro_rules! impl_to_tokens_for_custom_punctuation { ($ident:ident, $($tt:tt)+) => { - impl $crate::export::ToTokens for $ident { - fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) { - $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens) + impl $crate::__private::ToTokens for $ident { + fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { + $crate::__private::print_punct($crate::stringify_punct!($($tt)+), &self.spans, tokens) } } }; @@ -167,9 +169,10 @@ macro_rules! impl_to_tokens_for_custom_punctuation { #[macro_export] macro_rules! impl_clone_for_custom_punctuation { ($ident:ident, $($tt:tt)+) => { - impl $crate::export::Copy for $ident {} + impl $crate::__private::Copy for $ident {} - impl $crate::export::Clone for $ident { + #[allow(clippy::expl_impl_clone_on_copy)] + impl $crate::__private::Clone for $ident { fn clone(&self) -> Self { *self } @@ -191,22 +194,22 @@ macro_rules! impl_clone_for_custom_punctuation { #[macro_export] macro_rules! impl_extra_traits_for_custom_punctuation { ($ident:ident, $($tt:tt)+) => { - impl $crate::export::Debug for $ident { - fn fmt(&self, f: &mut $crate::export::Formatter) -> $crate::export::fmt::Result { - $crate::export::Formatter::write_str(f, stringify!($ident)) + impl $crate::__private::Debug for $ident { + fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { + $crate::__private::Formatter::write_str(f, $crate::__private::stringify!($ident)) } } - impl $crate::export::Eq for $ident {} + impl $crate::__private::Eq for $ident {} - impl $crate::export::PartialEq for $ident { - fn eq(&self, _other: &Self) -> $crate::export::bool { + impl $crate::__private::PartialEq for $ident { + fn eq(&self, _other: &Self) -> $crate::__private::bool { true } } - impl $crate::export::Hash for $ident { - fn hash<__H: $crate::export::Hasher>(&self, _state: &mut __H) {} + impl $crate::__private::Hash for $ident { + fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} } }; } @@ -221,16 +224,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation { // Not public API. #[doc(hidden)] -#[macro_export(local_inner_macros)] +#[macro_export] macro_rules! custom_punctuation_repr { ($($tt:tt)+) => { - [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+] + [$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+] }; } // Not public API. #[doc(hidden)] -#[macro_export(local_inner_macros)] +#[macro_export] #[rustfmt::skip] macro_rules! custom_punctuation_len { ($mode:ident, +) => { 1 }; @@ -279,7 +282,7 @@ macro_rules! custom_punctuation_len { ($mode:ident, -=) => { 2 }; ($mode:ident, ~) => { 1 }; (lenient, $tt:tt) => { 0 }; - (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }}; + (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }}; } // Not public API. @@ -294,16 +297,6 @@ macro_rules! custom_punctuation_unexpected { #[macro_export] macro_rules! stringify_punct { ($($tt:tt)+) => { - concat!($(stringify!($tt)),+) - }; -} - -// Not public API. -// Without this, local_inner_macros breaks when looking for concat! -#[doc(hidden)] -#[macro_export] -macro_rules! custom_punctuation_concat { - ($($tt:tt)*) => { - concat!($($tt)*) + $crate::__private::concat!($($crate::__private::stringify!($tt)),+) }; } diff --git a/src/data.rs b/src/data.rs index be43679874..185f88ba01 100644 --- a/src/data.rs +++ b/src/data.rs @@ -3,11 +3,8 @@ use crate::punctuated::Punctuated; ast_struct! { /// An enum variant. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Variant { - /// Attributes tagged on the variant. pub attrs: Vec, /// Name of the variant. @@ -24,17 +21,12 @@ ast_struct! { ast_enum_of_structs! { /// Data stored within an enum variant or struct. /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// /// # Syntax tree enum /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - // - // TODO: change syntax-tree-enum link to an intra rustdoc link, currently - // blocked on https://github.com/rust-lang/rust/issues/62833 + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum Fields { /// Named fields of a struct or struct variant such as `Point { x: f64, /// y: f64 }`. @@ -51,9 +43,7 @@ ast_enum_of_structs! { ast_struct! { /// Named fields of a struct or struct variant such as `Point { x: f64, /// y: f64 }`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct FieldsNamed { pub brace_token: token::Brace, pub named: Punctuated, @@ -62,9 +52,7 @@ ast_struct! { ast_struct! { /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct FieldsUnnamed { pub paren_token: token::Paren, pub unnamed: Punctuated, @@ -93,6 +81,24 @@ impl Fields { Fields::Unnamed(f) => f.unnamed.iter_mut(), } } + + /// Returns the number of fields. + pub fn len(&self) -> usize { + match self { + Fields::Unit => 0, + Fields::Named(f) => f.named.len(), + Fields::Unnamed(f) => f.unnamed.len(), + } + } + + /// Returns `true` if there are zero fields. + pub fn is_empty(&self) -> bool { + match self { + Fields::Unit => true, + Fields::Named(f) => f.named.is_empty(), + Fields::Unnamed(f) => f.unnamed.is_empty(), + } + } } impl IntoIterator for Fields { @@ -128,16 +134,14 @@ impl<'a> IntoIterator for &'a mut Fields { ast_struct! { /// A field of a struct or enum variant. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Field { - /// Attributes tagged on the field. pub attrs: Vec, - /// Visibility of the field. pub vis: Visibility, + pub mutability: FieldMutability, + /// Name of the field, if any. /// /// Fields of tuple structs have no names. @@ -145,220 +149,108 @@ ast_struct! { pub colon_token: Option, - /// Type of the field. pub ty: Type, } } -ast_enum_of_structs! { - /// The visibility level of an item: inherited or `pub` or - /// `pub(restricted)`. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - // - // TODO: change syntax-tree-enum link to an intra rustdoc link, currently - // blocked on https://github.com/rust-lang/rust/issues/62833 - pub enum Visibility { - /// A public visibility level: `pub`. - Public(VisPublic), - - /// A crate-level visibility: `crate`. - Crate(VisCrate), - - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - Restricted(VisRestricted), - - /// An inherited visibility, which usually means private. - Inherited, - } -} - -ast_struct! { - /// A public visibility level: `pub`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub struct VisPublic { - pub pub_token: Token![pub], - } -} - -ast_struct! { - /// A crate-level visibility: `crate`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub struct VisCrate { - pub crate_token: Token![crate], - } -} - -ast_struct! { - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub struct VisRestricted { - pub pub_token: Token![pub], - pub paren_token: token::Paren, - pub in_token: Option, - pub path: Box, - } -} - #[cfg(feature = "parsing")] -pub mod parsing { +pub(crate) mod parsing { use super::*; - use crate::ext::IdentExt; use crate::parse::{Parse, ParseStream, Result}; + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for Variant { fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + let _visibility: Visibility = input.parse()?; + let ident: Ident = input.parse()?; + let fields = if input.peek(token::Brace) { + Fields::Named(input.parse()?) + } else if input.peek(token::Paren) { + Fields::Unnamed(input.parse()?) + } else { + Fields::Unit + }; + let discriminant = if input.peek(Token![=]) { + let eq_token: Token![=] = input.parse()?; + let discriminant: Expr = input.parse()?; + Some((eq_token, discriminant)) + } else { + None + }; Ok(Variant { - attrs: input.call(Attribute::parse_outer)?, - ident: input.parse()?, - fields: { - if input.peek(token::Brace) { - Fields::Named(input.parse()?) - } else if input.peek(token::Paren) { - Fields::Unnamed(input.parse()?) - } else { - Fields::Unit - } - }, - discriminant: { - if input.peek(Token![=]) { - let eq_token: Token![=] = input.parse()?; - let discriminant: Expr = input.parse()?; - Some((eq_token, discriminant)) - } else { - None - } - }, + attrs, + ident, + fields, + discriminant, }) } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for FieldsNamed { fn parse(input: ParseStream) -> Result { let content; Ok(FieldsNamed { brace_token: braced!(content in input), - named: content.parse_terminated(Field::parse_named)?, + named: content.parse_terminated(Field::parse_named, Token![,])?, }) } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for FieldsUnnamed { fn parse(input: ParseStream) -> Result { let content; Ok(FieldsUnnamed { paren_token: parenthesized!(content in input), - unnamed: content.parse_terminated(Field::parse_unnamed)?, + unnamed: content.parse_terminated(Field::parse_unnamed, Token![,])?, }) } } impl Field { /// Parses a named (braced struct) field. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn parse_named(input: ParseStream) -> Result { Ok(Field { attrs: input.call(Attribute::parse_outer)?, vis: input.parse()?, - ident: Some(input.parse()?), + mutability: FieldMutability::None, + ident: Some(if input.peek(Token![_]) { + input.call(Ident::parse_any) + } else { + input.parse() + }?), colon_token: Some(input.parse()?), ty: input.parse()?, }) } /// Parses an unnamed (tuple struct) field. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn parse_unnamed(input: ParseStream) -> Result { Ok(Field { attrs: input.call(Attribute::parse_outer)?, vis: input.parse()?, + mutability: FieldMutability::None, ident: None, colon_token: None, ty: input.parse()?, }) } } - - impl Parse for Visibility { - fn parse(input: ParseStream) -> Result { - if input.peek(Token![pub]) { - Self::parse_pub(input) - } else if input.peek(Token![crate]) { - Self::parse_crate(input) - } else { - Ok(Visibility::Inherited) - } - } - } - - impl Visibility { - fn parse_pub(input: ParseStream) -> Result { - let pub_token = input.parse::()?; - - if input.peek(token::Paren) { - // TODO: optimize using advance_to - let ahead = input.fork(); - let mut content; - parenthesized!(content in ahead); - - if content.peek(Token![crate]) - || content.peek(Token![self]) - || content.peek(Token![super]) - { - return Ok(Visibility::Restricted(VisRestricted { - pub_token, - paren_token: parenthesized!(content in input), - in_token: None, - path: Box::new(Path::from(content.call(Ident::parse_any)?)), - })); - } else if content.peek(Token![in]) { - return Ok(Visibility::Restricted(VisRestricted { - pub_token, - paren_token: parenthesized!(content in input), - in_token: Some(content.parse()?), - path: Box::new(content.call(Path::parse_mod_style)?), - })); - } - } - - Ok(Visibility::Public(VisPublic { pub_token })) - } - - fn parse_crate(input: ParseStream) -> Result { - if input.peek2(Token![::]) { - Ok(Visibility::Inherited) - } else { - Ok(Visibility::Crate(VisCrate { - crate_token: input.parse()?, - })) - } - } - } } #[cfg(feature = "printing")] mod printing { use super::*; - + use crate::print::TokensOrDefault; use proc_macro2::TokenStream; use quote::{ToTokens, TokenStreamExt}; - use crate::print::TokensOrDefault; - + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for Variant { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(&self.attrs); @@ -371,6 +263,7 @@ mod printing { } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for FieldsNamed { fn to_tokens(&self, tokens: &mut TokenStream) { self.brace_token.surround(tokens, |tokens| { @@ -379,6 +272,7 @@ mod printing { } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for FieldsUnnamed { fn to_tokens(&self, tokens: &mut TokenStream) { self.paren_token.surround(tokens, |tokens| { @@ -387,6 +281,7 @@ mod printing { } } + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for Field { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(&self.attrs); @@ -398,28 +293,4 @@ mod printing { self.ty.to_tokens(tokens); } } - - impl ToTokens for VisPublic { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pub_token.to_tokens(tokens) - } - } - - impl ToTokens for VisCrate { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.crate_token.to_tokens(tokens); - } - } - - impl ToTokens for VisRestricted { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pub_token.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - // TODO: If we have a path which is not "self" or "super" or - // "crate", automatically add the "in" token. - self.in_token.to_tokens(tokens); - self.path.to_tokens(tokens); - }); - } - } } diff --git a/src/derive.rs b/src/derive.rs index 8cb9cf7b6d..25fa4c910b 100644 --- a/src/derive.rs +++ b/src/derive.rs @@ -3,58 +3,35 @@ use crate::punctuated::Punctuated; ast_struct! { /// Data structure sent to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] pub struct DeriveInput { - /// Attributes tagged on the whole struct or enum. pub attrs: Vec, - - /// Visibility of the struct or enum. pub vis: Visibility, - - /// Name of the struct or enum. pub ident: Ident, - - /// Generics required to complete the definition. pub generics: Generics, - - /// Data within the struct or enum. pub data: Data, } } -ast_enum_of_structs! { +ast_enum! { /// The storage of a struct, enum or union data structure. /// - /// *This type is available if Syn is built with the `"derive"` feature.* - /// /// # Syntax tree enum /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - // - // TODO: change syntax-tree-enum link to an intra rustdoc link, currently - // blocked on https://github.com/rust-lang/rust/issues/62833 + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] pub enum Data { - /// A struct input to a `proc_macro_derive` macro. Struct(DataStruct), - - /// An enum input to a `proc_macro_derive` macro. Enum(DataEnum), - - /// An untagged union input to a `proc_macro_derive` macro. Union(DataUnion), } - - do_not_generate_to_tokens } ast_struct! { /// A struct input to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` - /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] pub struct DataStruct { pub struct_token: Token![struct], pub fields: Fields, @@ -64,9 +41,7 @@ ast_struct! { ast_struct! { /// An enum input to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` - /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] pub struct DataEnum { pub enum_token: Token![enum], pub brace_token: token::Brace, @@ -76,9 +51,7 @@ ast_struct! { ast_struct! { /// An untagged union input to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` - /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] pub struct DataUnion { pub union_token: Token![union], pub fields: FieldsNamed, @@ -86,11 +59,11 @@ ast_struct! { } #[cfg(feature = "parsing")] -pub mod parsing { +pub(crate) mod parsing { use super::*; - use crate::parse::{Parse, ParseStream, Result}; + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for DeriveInput { fn parse(input: ParseStream) -> Result { let attrs = input.call(Attribute::parse_outer)?; @@ -159,7 +132,7 @@ pub mod parsing { } } - pub fn data_struct( + pub(crate) fn data_struct( input: ParseStream, ) -> Result<(Option, Fields, Option)> { let mut lookahead = input.lookahead1(); @@ -195,7 +168,7 @@ pub mod parsing { } } - pub fn data_enum( + pub(crate) fn data_enum( input: ParseStream, ) -> Result<( Option, @@ -206,12 +179,12 @@ pub mod parsing { let content; let brace = braced!(content in input); - let variants = content.parse_terminated(Variant::parse)?; + let variants = content.parse_terminated(Variant::parse, Token![,])?; Ok((where_clause, brace, variants)) } - pub fn data_union(input: ParseStream) -> Result<(Option, FieldsNamed)> { + pub(crate) fn data_union(input: ParseStream) -> Result<(Option, FieldsNamed)> { let where_clause = input.parse()?; let fields = input.parse()?; Ok((where_clause, fields)) @@ -221,13 +194,12 @@ pub mod parsing { #[cfg(feature = "printing")] mod printing { use super::*; - - use proc_macro2::TokenStream; - use quote::ToTokens; - use crate::attr::FilterAttrs; use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::ToTokens; + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] impl ToTokens for DeriveInput { fn to_tokens(&self, tokens: &mut TokenStream) { for attr in self.attrs.outer() { diff --git a/src/discouraged.rs b/src/discouraged.rs index 4d9ff93728..fb98d6332c 100644 --- a/src/discouraged.rs +++ b/src/discouraged.rs @@ -1,6 +1,7 @@ //! Extensions to the parsing API with niche applicability. use super::*; +use proc_macro2::extra::DelimSpan; /// Extensions to the `ParseStream` API to support speculative parsing. pub trait Speculative { @@ -16,7 +17,7 @@ pub trait Speculative { /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem /// is that when the fork fails to parse an `A`, it's impossible to tell /// whether that was because of a syntax error and the user meant to provide - /// an `A`, or that the `A`s are finished and its time to start parsing + /// an `A`, or that the `A`s are finished and it's time to start parsing /// `B`s. Use with care. /// /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by @@ -72,7 +73,6 @@ pub trait Speculative { /// || input.peek(Token![self]) /// || input.peek(Token![Self]) /// || input.peek(Token![crate]) - /// || input.peek(Token![extern]) /// { /// let ident = input.call(Ident::parse_any)?; /// return Ok(PathSegment::from(ident)); @@ -164,8 +164,56 @@ impl<'a> Speculative for ParseBuffer<'a> { panic!("Fork was not derived from the advancing parse stream"); } + let (self_unexp, self_sp) = inner_unexpected(self); + let (fork_unexp, fork_sp) = inner_unexpected(fork); + if !Rc::ptr_eq(&self_unexp, &fork_unexp) { + match (fork_sp, self_sp) { + // Unexpected set on the fork, but not on `self`, copy it over. + (Some(span), None) => { + self_unexp.set(Unexpected::Some(span)); + } + // Unexpected unset. Use chain to propagate errors from fork. + (None, None) => { + fork_unexp.set(Unexpected::Chain(self_unexp)); + + // Ensure toplevel 'unexpected' tokens from the fork don't + // bubble up the chain by replacing the root `unexpected` + // pointer, only 'unexpected' tokens from existing group + // parsers should bubble. + fork.unexpected + .set(Some(Rc::new(Cell::new(Unexpected::None)))); + } + // Unexpected has been set on `self`. No changes needed. + (_, Some(_)) => {} + } + } + // See comment on `cell` in the struct definition. self.cell - .set(unsafe { mem::transmute::>(fork.cursor()) }) + .set(unsafe { mem::transmute::>(fork.cursor()) }); + } +} + +/// Extensions to the `ParseStream` API to support manipulating invisible +/// delimiters the same as if they were visible. +pub trait AnyDelimiter { + /// Returns the delimiter, the span of the delimiter token, and the nested + /// contents for further parsing. + fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)>; +} + +impl<'a> AnyDelimiter for ParseBuffer<'a> { + fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> { + self.step(|cursor| { + if let Some((content, delimiter, span, rest)) = cursor.any_group() { + let scope = crate::buffer::close_span_of_group(*cursor); + let nested = crate::parse::advance_step_cursor(cursor, content); + let unexpected = crate::parse::get_unexpected(self); + let content = crate::parse::new_parse_buffer(scope, nested, unexpected); + Ok(((delimiter, span, content), rest)) + } else { + Err(cursor.error("expected any delimiter")) + } + }) } } diff --git a/src/drops.rs b/src/drops.rs new file mode 100644 index 0000000000..89b42d82ef --- /dev/null +++ b/src/drops.rs @@ -0,0 +1,58 @@ +use std::iter; +use std::mem::ManuallyDrop; +use std::ops::{Deref, DerefMut}; +use std::option; +use std::slice; + +#[repr(transparent)] +pub(crate) struct NoDrop(ManuallyDrop); + +impl NoDrop { + pub(crate) fn new(value: T) -> Self + where + T: TrivialDrop, + { + NoDrop(ManuallyDrop::new(value)) + } +} + +impl Deref for NoDrop { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for NoDrop { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +pub(crate) trait TrivialDrop {} + +impl TrivialDrop for iter::Empty {} +impl<'a, T> TrivialDrop for slice::Iter<'a, T> {} +impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {} + +#[test] +fn test_needs_drop() { + use std::mem::needs_drop; + + struct NeedsDrop; + + impl Drop for NeedsDrop { + fn drop(&mut self) {} + } + + assert!(needs_drop::()); + + // Test each of the types with a handwritten TrivialDrop impl above. + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); +} diff --git a/src/error.rs b/src/error.rs index 146d652299..7eebd243c4 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,18 +1,14 @@ -use std; -use std::fmt::{self, Debug, Display}; -use std::iter::FromIterator; -use std::slice; -use std::vec; - +#[cfg(feature = "parsing")] +use crate::buffer::Cursor; +use crate::thread::ThreadBound; use proc_macro2::{ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, }; #[cfg(feature = "printing")] use quote::ToTokens; - -#[cfg(feature = "parsing")] -use crate::buffer::Cursor; -use crate::thread::ThreadBound; +use std::fmt::{self, Debug, Display}; +use std::slice; +use std::vec; /// The result of a Syn parser. pub type Result = std::result::Result; @@ -26,34 +22,53 @@ pub type Result = std::result::Result; /// [`compile_error!`] in the generated code. This produces a better diagnostic /// message than simply panicking the macro. /// -/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html +/// [`compile_error!`]: std::compile_error! /// /// When parsing macro input, the [`parse_macro_input!`] macro handles the /// conversion to `compile_error!` automatically. /// -/// ``` -/// extern crate proc_macro; +/// [`parse_macro_input!`]: crate::parse_macro_input! /// +/// ``` +/// # extern crate proc_macro; +/// # /// use proc_macro::TokenStream; -/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; +/// use syn::parse::{Parse, ParseStream, Result}; +/// use syn::{parse_macro_input, ItemFn}; /// /// # const IGNORE: &str = stringify! { /// #[proc_macro_attribute] /// # }; /// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream { -/// let args = parse_macro_input!(args as AttributeArgs); +/// let args = parse_macro_input!(args as MyAttrArgs); /// let input = parse_macro_input!(input as ItemFn); /// /// /* ... */ /// # TokenStream::new() /// } +/// +/// struct MyAttrArgs { +/// # _k: [(); { stringify! { +/// ... +/// # }; 0 }] +/// } +/// +/// impl Parse for MyAttrArgs { +/// fn parse(input: ParseStream) -> Result { +/// # stringify! { +/// ... +/// # }; +/// # unimplemented!() +/// } +/// } /// ``` /// /// For errors that arise later than the initial parsing stage, the -/// [`.to_compile_error()`] method can be used to perform an explicit conversion -/// to `compile_error!`. +/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to +/// perform an explicit conversion to `compile_error!`. /// /// [`.to_compile_error()`]: Error::to_compile_error +/// [`.into_compile_error()`]: Error::into_compile_error /// /// ``` /// # extern crate proc_macro; @@ -69,7 +84,7 @@ pub type Result = std::result::Result; /// /// // fn(DeriveInput) -> syn::Result /// expand::my_derive(input) -/// .unwrap_or_else(|err| err.to_compile_error()) +/// .unwrap_or_else(syn::Error::into_compile_error) /// .into() /// } /// # @@ -82,7 +97,6 @@ pub type Result = std::result::Result; /// # } /// # } /// ``` -#[derive(Clone)] pub struct Error { messages: Vec, } @@ -90,14 +104,21 @@ pub struct Error { struct ErrorMessage { // Span is implemented as an index into a thread-local interner to keep the // size small. It is not safe to access from a different thread. We want - // errors to be Send and Sync to play nicely with the Failure crate, so pin - // the span we're given to its original thread and assume it is - // Span::call_site if accessed from any other thread. - start_span: ThreadBound, - end_span: ThreadBound, + // errors to be Send and Sync to play nicely with ecosystem crates for error + // handling, so pin the span we're given to its original thread and assume + // it is Span::call_site if accessed from any other thread. + span: ThreadBound, message: String, } +// Cannot use std::ops::Range because that does not implement Copy, +// whereas ThreadBound requires a Copy impl as a way to ensure no Drop impls +// are involved. +struct SpanRange { + start: Span, + end: Span, +} + #[cfg(test)] struct _Test where @@ -135,12 +156,18 @@ impl Error { /// } /// ``` pub fn new(span: Span, message: T) -> Self { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(span), - end_span: ThreadBound::new(span), - message: message.to_string(), - }], + return new(span, message.to_string()); + + fn new(span: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + span: ThreadBound::new(SpanRange { + start: span, + end: span, + }), + message, + }], + } } } @@ -159,15 +186,18 @@ impl Error { /// `ParseStream::error`)! #[cfg(feature = "printing")] pub fn new_spanned(tokens: T, message: U) -> Self { - let mut iter = tokens.into_token_stream().into_iter(); - let start = iter.next().map_or_else(Span::call_site, |t| t.span()); - let end = iter.last().map_or(start, |t| t.span()); - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], + return new_spanned(tokens.into_token_stream(), message.to_string()); + + fn new_spanned(tokens: TokenStream, message: String) -> Error { + let mut iter = tokens.into_iter(); + let start = iter.next().map_or_else(Span::call_site, |t| t.span()); + let end = iter.last().map_or(start, |t| t.span()); + Error { + messages: vec![ErrorMessage { + span: ThreadBound::new(SpanRange { start, end }), + message, + }], + } } } @@ -177,11 +207,7 @@ impl Error { /// if called from a different thread than the one on which the `Error` was /// originally created. pub fn span(&self) -> Span { - let start = match self.messages[0].start_span.get() { - Some(span) => *span, - None => return Span::call_site(), - }; - let end = match self.messages[0].end_span.get() { + let SpanRange { start, end } = match self.messages[0].span.get() { Some(span) => *span, None => return Span::call_site(), }; @@ -193,7 +219,8 @@ impl Error { /// The [`parse_macro_input!`] macro provides a convenient way to invoke /// this method correctly in a procedural macro. /// - /// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html + /// [`compile_error!`]: std::compile_error! + /// [`parse_macro_input!`]: crate::parse_macro_input! pub fn to_compile_error(&self) -> TokenStream { self.messages .iter() @@ -201,21 +228,55 @@ impl Error { .collect() } + /// Render the error as an invocation of [`compile_error!`]. + /// + /// [`compile_error!`]: std::compile_error! + /// + /// # Example + /// + /// ``` + /// # extern crate proc_macro; + /// # + /// use proc_macro::TokenStream; + /// use syn::{parse_macro_input, DeriveInput, Error}; + /// + /// # const _: &str = stringify! { + /// #[proc_macro_derive(MyTrait)] + /// # }; + /// pub fn derive_my_trait(input: TokenStream) -> TokenStream { + /// let input = parse_macro_input!(input as DeriveInput); + /// my_trait::expand(input) + /// .unwrap_or_else(Error::into_compile_error) + /// .into() + /// } + /// + /// mod my_trait { + /// use proc_macro2::TokenStream; + /// use syn::{DeriveInput, Result}; + /// + /// pub(crate) fn expand(input: DeriveInput) -> Result { + /// /* ... */ + /// # unimplemented!() + /// } + /// } + /// ``` + pub fn into_compile_error(self) -> TokenStream { + self.to_compile_error() + } + /// Add another error message to self such that when `to_compile_error()` is /// called, both errors will be emitted together. pub fn combine(&mut self, another: Error) { - self.messages.extend(another.messages) + self.messages.extend(another.messages); } } impl ErrorMessage { fn to_compile_error(&self) -> TokenStream { - let start = self - .start_span - .get() - .cloned() - .unwrap_or_else(Span::call_site); - let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); + let (start, end) = match self.span.get() { + Some(range) => (range.start, range.end), + None => (Span::call_site(), Span::call_site()), + }; // compile_error!($message) TokenStream::from_iter(vec![ @@ -241,7 +302,7 @@ impl ErrorMessage { } #[cfg(feature = "parsing")] -pub fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { +pub(crate) fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { if cursor.eof() { Error::new(scope, format!("unexpected end of input, {}", message)) } else { @@ -250,6 +311,20 @@ pub fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { } } +#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] +pub(crate) fn new2(start: Span, end: Span, message: T) -> Error { + return new2(start, end, message.to_string()); + + fn new2(start: Span, end: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + span: ThreadBound::new(SpanRange { start, end }), + message, + }], + } + } +} + impl Debug for Error { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { if self.messages.len() == 1 { @@ -278,31 +353,36 @@ impl Display for Error { } } +impl Clone for Error { + fn clone(&self) -> Self { + Error { + messages: self.messages.clone(), + } + } +} + impl Clone for ErrorMessage { fn clone(&self) -> Self { - let start = self - .start_span - .get() - .cloned() - .unwrap_or_else(Span::call_site); - let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), + span: self.span.clone(), message: self.message.clone(), } } } -impl std::error::Error for Error { - fn description(&self) -> &str { - "parse error" +impl Clone for SpanRange { + fn clone(&self) -> Self { + *self } } +impl Copy for SpanRange {} + +impl std::error::Error for Error {} + impl From for Error { fn from(err: LexError) -> Self { - Error::new(Span::call_site(), format!("{:?}", err)) + Error::new(err.span(), "lex error") } } @@ -355,3 +435,11 @@ impl<'a> Iterator for Iter<'a> { }) } } + +impl Extend for Error { + fn extend>(&mut self, iter: T) { + for err in iter { + self.combine(err); + } + } +} diff --git a/src/export.rs b/src/export.rs index 37dc467a7f..c1c16f9ed1 100644 --- a/src/export.rs +++ b/src/export.rs @@ -1,20 +1,33 @@ pub use std::clone::Clone; pub use std::cmp::{Eq, PartialEq}; -pub use std::convert::From; +pub use std::concat; pub use std::default::Default; pub use std::fmt::{self, Debug, Formatter}; pub use std::hash::{Hash, Hasher}; pub use std::marker::Copy; pub use std::option::Option::{None, Some}; pub use std::result::Result::{Err, Ok}; +pub use std::stringify; #[cfg(feature = "printing")] -pub extern crate quote; +pub use quote; pub use proc_macro2::{Span, TokenStream as TokenStream2}; +#[cfg(feature = "parsing")] +pub use crate::group::{parse_braces, parse_brackets, parse_parens}; + pub use crate::span::IntoSpans; +#[cfg(all(feature = "parsing", feature = "printing"))] +pub use crate::parse_quote::parse as parse_quote; + +#[cfg(feature = "parsing")] +pub use crate::token::parsing::{peek_punct, punct as parse_punct}; + +#[cfg(feature = "printing")] +pub use crate::token::printing::punct as print_punct; + #[cfg(all( not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), feature = "proc-macro" @@ -33,3 +46,5 @@ mod help { pub type Bool = bool; pub type Str = str; } + +pub struct private(pub(crate) ()); diff --git a/src/expr.rs b/src/expr.rs index 2874a463aa..d1b3920f50 100644 --- a/src/expr.rs +++ b/src/expr.rs @@ -1,18 +1,19 @@ use super::*; use crate::punctuated::Punctuated; -#[cfg(feature = "extra-traits")] -use crate::tt::TokenStreamHelper; use proc_macro2::{Span, TokenStream}; -#[cfg(feature = "extra-traits")] +#[cfg(feature = "printing")] +use quote::IdentFragment; +#[cfg(feature = "printing")] +use std::fmt::{self, Display}; use std::hash::{Hash, Hasher}; -#[cfg(all(feature = "parsing", feature = "full"))] +#[cfg(feature = "parsing")] use std::mem; ast_enum_of_structs! { /// A Rust expression. /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature, but most of the variants are not available unless "full" is enabled.* /// /// # Syntax tree enums /// @@ -83,31 +84,27 @@ ast_enum_of_structs! { /// A sign that you may not be choosing the right variable names is if you /// see names getting repeated in your code, like accessing /// `receiver.receiver` or `pat.pat` or `cond.cond`. - pub enum Expr #manual_extra_traits { + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[non_exhaustive] + pub enum Expr { /// A slice literal expression: `[a, b, c, d]`. Array(ExprArray), /// An assignment expression: `a = compute()`. Assign(ExprAssign), - /// A compound assignment expression: `counter += 1`. - AssignOp(ExprAssignOp), - /// An async block: `async { ... }`. Async(ExprAsync), /// An await expression: `fut.await`. Await(ExprAwait), - /// A binary operation: `a + b`, `a * b`. + /// A binary operation: `a + b`, `a += b`. Binary(ExprBinary), /// A blocked scope: `{ ... }`. Block(ExprBlock), - /// A box expression: `box f`. - Box(ExprBox), - /// A `break`, with an optional label to break and an optional /// expression. Break(ExprBreak), @@ -121,6 +118,9 @@ ast_enum_of_structs! { /// A closure expression: `|a, b| a + b`. Closure(ExprClosure), + /// A const block: `const { ... }`. + Const(ExprConst), + /// A `continue`, with an optional label. Continue(ExprContinue), @@ -148,6 +148,9 @@ ast_enum_of_structs! { /// A square bracketed indexing expression: `vector[2]`. Index(ExprIndex), + /// The inferred value of a const generic argument, denoted `_`. + Infer(ExprInfer), + /// A `let` guard: `let Some(x) = opt`. Let(ExprLet), @@ -202,9 +205,6 @@ ast_enum_of_structs! { /// A tuple expression: `(a, b, c, d)`. Tuple(ExprTuple), - /// A type ascription expression: `foo: f64`. - Type(ExprType), - /// A unary operation: `!x`, `*x`. Unary(ExprUnary), @@ -220,15 +220,28 @@ ast_enum_of_structs! { /// A yield expression: `yield expr`. Yield(ExprYield), - #[doc(hidden)] - __Nonexhaustive, + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match expr { + // Expr::Array(expr) => {...} + // Expr::Assign(expr) => {...} + // ... + // Expr::Yield(expr) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. } } ast_struct! { /// A slice literal expression: `[a, b, c, d]`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] pub struct ExprArray #full { pub attrs: Vec, pub bracket_token: token::Bracket, @@ -238,8 +251,7 @@ ast_struct! { ast_struct! { /// An assignment expression: `a = compute()`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] pub struct ExprAssign #full { pub attrs: Vec, pub left: Box, @@ -248,22 +260,9 @@ ast_struct! { } } -ast_struct! { - /// A compound assignment expression: `counter += 1`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub struct ExprAssignOp #full { - pub attrs: Vec, - pub left: Box, - pub op: BinOp, - pub right: Box, - } -} - ast_struct! { /// An async block: `async { ... }`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] pub struct ExprAsync #full { pub attrs: Vec, pub async_token: Token![async], @@ -274,21 +273,18 @@ ast_struct! { ast_struct! { /// An await expression: `fut.await`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] pub struct ExprAwait #full { pub attrs: Vec, pub base: Box, pub dot_token: Token![.], - pub await_token: token::Await, + pub await_token: Token![await], } } ast_struct! { - /// A binary operation: `a + b`, `a * b`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* + /// A binary operation: `a + b`, `a += b`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprBinary { pub attrs: Vec, pub left: Box, @@ -299,8 +295,7 @@ ast_struct! { ast_struct! { /// A blocked scope: `{ ... }`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] pub struct ExprBlock #full { pub attrs: Vec, pub label: Option