Snap for 10453563 from e78d800069db0aa9a43aa425ebd797dc5d90abea to mainline-ipsec-release

Change-Id: Ieac02081eda383f6916fd1c912e115f769922f25
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index cb0b7f1..649e893 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,6 +1,6 @@
 {
   "git": {
-    "sha1": "7ff292d5ac9802a3cc7908c5641a060bd5cf0d78"
+    "sha1": "4168f6bbdf91a01b24e893c451dd726ecddd88d2"
   },
   "path_in_vcs": ""
 }
\ No newline at end of file
diff --git a/Android.bp b/Android.bp
index 1ec1d6c..cee562d 100644
--- a/Android.bp
+++ b/Android.bp
@@ -41,7 +41,7 @@
     name: "libsyn",
     crate_name: "syn",
     cargo_env_compat: true,
-    cargo_pkg_version: "1.0.86",
+    cargo_pkg_version: "1.0.107",
     srcs: ["src/lib.rs"],
     edition: "2018",
     features: [
@@ -61,7 +61,9 @@
     rustlibs: [
         "libproc_macro2",
         "libquote",
-        "libunicode_xid",
+        "libunicode_ident",
     ],
     compile_multilib: "first",
+    product_available: true,
+    vendor_available: true,
 }
diff --git a/Cargo.toml b/Cargo.toml
index 11f0619..c2a3601 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,33 +13,69 @@
 edition = "2018"
 rust-version = "1.31"
 name = "syn"
-version = "1.0.86"
+version = "1.0.107"
 authors = ["David Tolnay <dtolnay@gmail.com>"]
-include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+include = [
+    "/benches/**",
+    "/build.rs",
+    "/Cargo.toml",
+    "/LICENSE-APACHE",
+    "/LICENSE-MIT",
+    "/README.md",
+    "/src/**",
+    "/tests/**",
+]
 description = "Parser for Rust source code"
 documentation = "https://docs.rs/syn"
 readme = "README.md"
-categories = ["development-tools::procedural-macro-helpers"]
+keywords = [
+    "macros",
+    "syn",
+]
+categories = [
+    "development-tools::procedural-macro-helpers",
+    "parser-implementations",
+]
 license = "MIT OR Apache-2.0"
 repository = "https://github.com/dtolnay/syn"
+
 [package.metadata.docs.rs]
 all-features = true
-rustdoc-args = ["--cfg", "doc_cfg"]
 targets = ["x86_64-unknown-linux-gnu"]
+rustdoc-args = [
+    "--cfg",
+    "doc_cfg",
+]
 
 [package.metadata.playground]
-features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+features = [
+    "full",
+    "visit",
+    "visit-mut",
+    "fold",
+    "extra-traits",
+]
+
+[lib]
+doc-scrape-examples = false
 
 [[bench]]
 name = "rust"
 harness = false
-required-features = ["full", "parsing"]
+required-features = [
+    "full",
+    "parsing",
+]
 
 [[bench]]
 name = "file"
-required-features = ["full", "parsing"]
+required-features = [
+    "full",
+    "parsing",
+]
+
 [dependencies.proc-macro2]
-version = "1.0.32"
+version = "1.0.46"
 default-features = false
 
 [dependencies.quote]
@@ -47,8 +83,9 @@
 optional = true
 default-features = false
 
-[dependencies.unicode-xid]
-version = "0.2"
+[dependencies.unicode-ident]
+version = "1.0"
+
 [dev-dependencies.anyhow]
 version = "1.0"
 
@@ -88,14 +125,23 @@
 
 [features]
 clone-impls = []
-default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
+default = [
+    "derive",
+    "parsing",
+    "printing",
+    "clone-impls",
+    "proc-macro",
+]
 derive = []
 extra-traits = []
 fold = []
 full = []
 parsing = []
 printing = ["quote"]
-proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
+proc-macro = [
+    "proc-macro2/proc-macro",
+    "quote/proc-macro",
+]
 test = ["syn-test-suite/all-features"]
 visit = []
 visit-mut = []
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index b6fc02a..dd32f92 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,13 +1,11 @@
 [package]
 name = "syn"
-version = "1.0.86" # don't forget to update html_root_url and syn.json
+version = "1.0.107" # don't forget to update html_root_url and syn.json
 authors = ["David Tolnay <dtolnay@gmail.com>"]
-license = "MIT OR Apache-2.0"
+categories = ["development-tools::procedural-macro-helpers", "parser-implementations"]
 description = "Parser for Rust source code"
-repository = "https://github.com/dtolnay/syn"
 documentation = "https://docs.rs/syn"
-categories = ["development-tools::procedural-macro-helpers"]
-readme = "README.md"
+edition = "2018"
 include = [
     "/benches/**",
     "/build.rs",
@@ -18,7 +16,9 @@
     "/src/**",
     "/tests/**",
 ]
-edition = "2018"
+keywords = ["macros", "syn"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/syn"
 rust-version = "1.31"
 
 [features]
@@ -36,9 +36,9 @@
 test = ["syn-test-suite/all-features"]
 
 [dependencies]
-proc-macro2 = { version = "1.0.32", default-features = false }
+proc-macro2 = { version = "1.0.46", default-features = false }
 quote = { version = "1.0", optional = true, default-features = false }
-unicode-xid = "0.2"
+unicode-ident = "1.0"
 
 [dev-dependencies]
 anyhow = "1.0"
@@ -54,6 +54,9 @@
 termcolor = "1.0"
 walkdir = "2.1"
 
+[lib]
+doc-scrape-examples = false
+
 [[bench]]
 name = "rust"
 harness = false
@@ -72,4 +75,17 @@
 features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
 
 [workspace]
-members = ["dev", "json", "tests/crates", "tests/features"]
+members = [
+    "dev",
+    "examples/dump-syntax",
+    "examples/heapsize/example",
+    "examples/heapsize/heapsize",
+    "examples/heapsize/heapsize_derive",
+    "examples/lazy-static/example",
+    "examples/lazy-static/lazy-static",
+    "examples/trace-var/example",
+    "examples/trace-var/trace-var",
+    "json",
+    "tests/crates",
+    "tests/features",
+]
diff --git a/METADATA b/METADATA
index e4a8012..2fa7d2c 100644
--- a/METADATA
+++ b/METADATA
@@ -1,3 +1,7 @@
+# This project was upgraded with external_updater.
+# Usage: tools/external_updater/updater.sh update rust/crates/syn
+# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+
 name: "syn"
 description: "Parser for Rust source code"
 third_party {
@@ -7,13 +11,13 @@
   }
   url {
     type: ARCHIVE
-    value: "https://static.crates.io/crates/syn/syn-1.0.86.crate"
+    value: "https://static.crates.io/crates/syn/syn-1.0.107.crate"
   }
-  version: "1.0.86"
+  version: "1.0.107"
   license_type: NOTICE
   last_upgrade_date {
     year: 2022
-    month: 3
-    day: 1
+    month: 12
+    day: 22
   }
 }
diff --git a/README.md b/README.md
index 38005f5..eeef83d 100644
--- a/README.md
+++ b/README.md
@@ -3,8 +3,8 @@
 
 [<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
 [<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
-[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/syn)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/syn/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
 
 Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
 of Rust source code.
diff --git a/TEST_MAPPING b/TEST_MAPPING
index bff320d..7c46892 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -11,6 +11,9 @@
       "path": "external/rust/crates/argh"
     },
     {
+      "path": "external/rust/crates/async-stream"
+    },
+    {
       "path": "external/rust/crates/base64"
     },
     {
@@ -20,18 +23,39 @@
       "path": "external/rust/crates/bytes"
     },
     {
+      "path": "external/rust/crates/coset"
+    },
+    {
       "path": "external/rust/crates/either"
     },
     {
+      "path": "external/rust/crates/futures-channel"
+    },
+    {
+      "path": "external/rust/crates/futures-executor"
+    },
+    {
+      "path": "external/rust/crates/futures-test"
+    },
+    {
       "path": "external/rust/crates/futures-util"
     },
     {
+      "path": "external/rust/crates/hashbrown"
+    },
+    {
+      "path": "external/rust/crates/hashlink"
+    },
+    {
       "path": "external/rust/crates/jni"
     },
     {
       "path": "external/rust/crates/libm"
     },
     {
+      "path": "external/rust/crates/libsqlite3-sys"
+    },
+    {
       "path": "external/rust/crates/oid-registry"
     },
     {
@@ -69,160 +93,75 @@
     },
     {
       "path": "external/rust/crates/url"
-    }
-  ],
-  "presubmit": [
-    {
-      "name": "ZipFuseTest"
     },
     {
-      "name": "apkdmverity.test"
+      "path": "external/rust/crates/virtio-drivers"
     },
     {
-      "name": "authfs_device_test_src_lib"
+      "path": "external/rust/crates/zerocopy"
     },
     {
-      "name": "diced_open_dice_cbor_test"
+      "path": "external/rust/crates/zeroize"
     },
     {
-      "name": "diced_sample_inputs_test"
+      "path": "external/uwb/src"
     },
     {
-      "name": "diced_test"
+      "path": "packages/modules/DnsResolver"
     },
     {
-      "name": "diced_utils_test"
+      "path": "packages/modules/Virtualization/apkdmverity"
     },
     {
-      "name": "diced_vendor_test"
+      "path": "packages/modules/Virtualization/authfs"
     },
     {
-      "name": "doh_unit_test"
+      "path": "packages/modules/Virtualization/avmd"
     },
     {
-      "name": "keystore2_crypto_test_rust"
+      "path": "packages/modules/Virtualization/encryptedstore"
     },
     {
-      "name": "keystore2_km_compat_test"
+      "path": "packages/modules/Virtualization/libs/apexutil"
     },
     {
-      "name": "keystore2_selinux_concurrency_test"
+      "path": "packages/modules/Virtualization/libs/apkverify"
     },
     {
-      "name": "keystore2_selinux_test"
+      "path": "packages/modules/Virtualization/libs/devicemapper"
     },
     {
-      "name": "keystore2_test"
+      "path": "packages/modules/Virtualization/microdroid_manager"
     },
     {
-      "name": "keystore2_test_utils_test"
+      "path": "packages/modules/Virtualization/virtualizationmanager"
     },
     {
-      "name": "keystore2_vintf_test"
+      "path": "packages/modules/Virtualization/vm"
     },
     {
-      "name": "legacykeystore_test"
+      "path": "packages/modules/Virtualization/zipfuse"
     },
     {
-      "name": "libapkverify.integration_test"
+      "path": "system/keymint/derive"
     },
     {
-      "name": "libapkverify.test"
+      "path": "system/keymint/hal"
     },
     {
-      "name": "libcert_request_validator_tests"
+      "path": "system/security/diced"
     },
     {
-      "name": "libidsig.test"
+      "path": "system/security/keystore2"
     },
     {
-      "name": "librustutils_test"
+      "path": "system/security/keystore2/legacykeystore"
     },
     {
-      "name": "microdroid_manager_test"
+      "path": "system/security/keystore2/selinux"
     },
     {
-      "name": "rustBinderTest"
-    },
-    {
-      "name": "virtualizationservice_device_test"
-    }
-  ],
-  "presubmit-rust": [
-    {
-      "name": "ZipFuseTest"
-    },
-    {
-      "name": "apkdmverity.test"
-    },
-    {
-      "name": "authfs_device_test_src_lib"
-    },
-    {
-      "name": "diced_open_dice_cbor_test"
-    },
-    {
-      "name": "diced_sample_inputs_test"
-    },
-    {
-      "name": "diced_test"
-    },
-    {
-      "name": "diced_utils_test"
-    },
-    {
-      "name": "diced_vendor_test"
-    },
-    {
-      "name": "doh_unit_test"
-    },
-    {
-      "name": "keystore2_crypto_test_rust"
-    },
-    {
-      "name": "keystore2_km_compat_test"
-    },
-    {
-      "name": "keystore2_selinux_concurrency_test"
-    },
-    {
-      "name": "keystore2_selinux_test"
-    },
-    {
-      "name": "keystore2_test"
-    },
-    {
-      "name": "keystore2_test_utils_test"
-    },
-    {
-      "name": "keystore2_vintf_test"
-    },
-    {
-      "name": "legacykeystore_test"
-    },
-    {
-      "name": "libapkverify.integration_test"
-    },
-    {
-      "name": "libapkverify.test"
-    },
-    {
-      "name": "libcert_request_validator_tests"
-    },
-    {
-      "name": "libidsig.test"
-    },
-    {
-      "name": "librustutils_test"
-    },
-    {
-      "name": "microdroid_manager_test"
-    },
-    {
-      "name": "rustBinderTest"
-    },
-    {
-      "name": "virtualizationservice_device_test"
+      "path": "system/security/keystore2/src/crypto"
     }
   ]
 }
diff --git a/benches/file.rs b/benches/file.rs
index 86204df..bd4a247 100644
--- a/benches/file.rs
+++ b/benches/file.rs
@@ -1,8 +1,12 @@
-// $ cargo bench --features full --bench file
+// $ cargo bench --features full,test --bench file
 
 #![feature(rustc_private, test)]
 #![recursion_limit = "1024"]
-#![allow(clippy::missing_panics_doc, clippy::must_use_candidate)]
+#![allow(
+    clippy::items_after_statements,
+    clippy::missing_panics_doc,
+    clippy::must_use_candidate
+)]
 
 extern crate test;
 
@@ -15,17 +19,37 @@
 #[path = "../tests/repo/mod.rs"]
 pub mod repo;
 
-use proc_macro2::TokenStream;
+use proc_macro2::{Span, TokenStream};
 use std::fs;
 use std::str::FromStr;
+use syn::parse::{ParseStream, Parser};
 use test::Bencher;
 
 const FILE: &str = "tests/rust/library/core/src/str/mod.rs";
 
-#[bench]
-fn parse_file(b: &mut Bencher) {
+fn get_tokens() -> TokenStream {
     repo::clone_rust();
     let content = fs::read_to_string(FILE).unwrap();
-    let tokens = TokenStream::from_str(&content).unwrap();
+    TokenStream::from_str(&content).unwrap()
+}
+
+#[bench]
+fn baseline(b: &mut Bencher) {
+    let tokens = get_tokens();
+    b.iter(|| drop(tokens.clone()));
+}
+
+#[bench]
+fn create_token_buffer(b: &mut Bencher) {
+    let tokens = get_tokens();
+    fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
+        Err(syn::Error::new(Span::call_site(), ""))
+    }
+    b.iter(|| immediate_fail.parse2(tokens.clone()));
+}
+
+#[bench]
+fn parse_file(b: &mut Bencher) {
+    let tokens = get_tokens();
     b.iter(|| syn::parse2::<syn::File>(tokens.clone()));
 }
diff --git a/benches/rust.rs b/benches/rust.rs
index 5454293..e3f8f55 100644
--- a/benches/rust.rs
+++ b/benches/rust.rs
@@ -1,7 +1,7 @@
-// $ cargo bench --features full --bench rust
+// $ cargo bench --features full,test --bench rust
 //
 // Syn only, useful for profiling:
-// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test --bench rust
 
 #![cfg_attr(not(syn_only), feature(rustc_private))]
 #![recursion_limit = "1024"]
@@ -38,13 +38,15 @@
 #[cfg(not(syn_only))]
 mod librustc_parse {
     extern crate rustc_data_structures;
+    extern crate rustc_error_messages;
     extern crate rustc_errors;
     extern crate rustc_parse;
     extern crate rustc_session;
     extern crate rustc_span;
 
     use rustc_data_structures::sync::Lrc;
-    use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
+    use rustc_error_messages::FluentBundle;
+    use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler};
     use rustc_session::parse::ParseSess;
     use rustc_span::source_map::{FilePathMapping, SourceMap};
     use rustc_span::{edition::Edition, FileName};
@@ -59,12 +61,21 @@
             }
         }
 
+        impl Translate for SilentEmitter {
+            fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> {
+                None
+            }
+            fn fallback_fluent_bundle(&self) -> &FluentBundle {
+                panic!("silent emitter attempted to translate a diagnostic");
+            }
+        }
+
         rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| {
             let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let emitter = Box::new(SilentEmitter);
             let handler = Handler::with_emitter(false, None, emitter);
             let sess = ParseSess::with_span_handler(handler, cm);
-            if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+            if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str(
                 FileName::Custom("bench".to_owned()),
                 content.to_owned(),
                 &sess,
@@ -80,7 +91,7 @@
 #[cfg(not(syn_only))]
 mod read_from_disk {
     pub fn bench(content: &str) -> Result<(), ()> {
-        let _ = content;
+        _ = content;
         Ok(())
     }
 }
diff --git a/build.rs b/build.rs
index c705fc5..1a2c077 100644
--- a/build.rs
+++ b/build.rs
@@ -19,6 +19,10 @@
         println!("cargo:rustc-cfg=syn_no_const_vec_new");
     }
 
+    if compiler.minor < 40 {
+        println!("cargo:rustc-cfg=syn_no_non_exhaustive");
+    }
+
     if compiler.minor < 56 {
         println!("cargo:rustc-cfg=syn_no_negative_literal_parse");
     }
diff --git a/src/buffer.rs b/src/buffer.rs
index 43e77e9..0d5cf30 100644
--- a/src/buffer.rs
+++ b/src/buffer.rs
@@ -14,21 +14,20 @@
 use crate::proc_macro as pm;
 use crate::Lifetime;
 use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::cmp::Ordering;
 use std::marker::PhantomData;
-use std::ptr;
-use std::slice;
 
 /// Internal type which is used instead of `TokenTree` to represent a token tree
 /// within a `TokenBuffer`.
 enum Entry {
     // Mimicking types from proc-macro.
-    Group(Group, TokenBuffer),
+    // Group entries contain the offset to the matching End entry.
+    Group(Group, usize),
     Ident(Ident),
     Punct(Punct),
     Literal(Literal),
-    // End entries contain a raw pointer to the entry from the containing
-    // token tree, or null if this is the outermost level.
-    End(*const Entry),
+    // End entries contain the offset (negative) to the start of the buffer.
+    End(isize),
 }
 
 /// A buffer that can be efficiently traversed multiple times, unlike
@@ -37,76 +36,29 @@
 ///
 /// *This type is available only if Syn is built with the `"parsing"` feature.*
 pub struct TokenBuffer {
-    // NOTE: Do not implement clone on this - there are raw pointers inside
-    // these entries which will be messed up. Moving the `TokenBuffer` itself is
-    // safe as the data pointed to won't be moved.
-    ptr: *const Entry,
-    len: usize,
-}
-
-impl Drop for TokenBuffer {
-    fn drop(&mut self) {
-        unsafe {
-            let slice = slice::from_raw_parts_mut(self.ptr as *mut Entry, self.len);
-            let _ = Box::from_raw(slice);
-        }
-    }
+    // NOTE: Do not implement clone on this - while the current design could be
+    // cloned, other designs which could be desirable may not be cloneable.
+    entries: Box<[Entry]>,
 }
 
 impl TokenBuffer {
-    // NOTE: Do not mutate the Vec returned from this function once it returns;
-    // the address of its backing memory must remain stable.
-    fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer {
-        // Build up the entries list, recording the locations of any Groups
-        // in the list to be processed later.
-        let mut entries = Vec::new();
-        let mut groups = Vec::new();
+    fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) {
         for tt in stream {
             match tt {
-                TokenTree::Ident(sym) => {
-                    entries.push(Entry::Ident(sym));
-                }
-                TokenTree::Punct(op) => {
-                    entries.push(Entry::Punct(op));
-                }
-                TokenTree::Literal(l) => {
-                    entries.push(Entry::Literal(l));
-                }
-                TokenTree::Group(g) => {
-                    // Record the index of the interesting entry, and store an
-                    // `End(null)` there temporarily.
-                    groups.push((entries.len(), g));
-                    entries.push(Entry::End(ptr::null()));
+                TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)),
+                TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)),
+                TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
+                TokenTree::Group(group) => {
+                    let group_start_index = entries.len();
+                    entries.push(Entry::End(0)); // we replace this below
+                    Self::recursive_new(entries, group.stream());
+                    let group_end_index = entries.len();
+                    entries.push(Entry::End(-(group_end_index as isize)));
+                    let group_end_offset = group_end_index - group_start_index;
+                    entries[group_start_index] = Entry::Group(group, group_end_offset);
                 }
             }
         }
-        // Add an `End` entry to the end with a reference to the enclosing token
-        // stream which was passed in.
-        entries.push(Entry::End(up));
-
-        // NOTE: This is done to ensure that we don't accidentally modify the
-        // length of the backing buffer. The backing buffer must remain at a
-        // constant address after this point, as we are going to store a raw
-        // pointer into it.
-        let mut entries = entries.into_boxed_slice();
-        for (idx, group) in groups {
-            // We know that this index refers to one of the temporary
-            // `End(null)` entries, and we know that the last entry is
-            // `End(up)`, so the next index is also valid.
-            let group_up = unsafe { entries.as_ptr().add(idx + 1) };
-
-            // The end entry stored at the end of this Entry::Group should
-            // point to the Entry which follows the Group in the list.
-            let inner = Self::inner_new(group.stream(), group_up);
-            entries[idx] = Entry::Group(group, inner);
-        }
-
-        let len = entries.len();
-        let ptr = Box::into_raw(entries);
-        TokenBuffer {
-            ptr: ptr as *const Entry,
-            len,
-        }
     }
 
     /// Creates a `TokenBuffer` containing all the tokens from the input
@@ -125,13 +77,19 @@
     /// Creates a `TokenBuffer` containing all the tokens from the input
     /// `proc_macro2::TokenStream`.
     pub fn new2(stream: TokenStream) -> Self {
-        Self::inner_new(stream, ptr::null())
+        let mut entries = Vec::new();
+        Self::recursive_new(&mut entries, stream);
+        entries.push(Entry::End(-(entries.len() as isize)));
+        Self {
+            entries: entries.into_boxed_slice(),
+        }
     }
 
     /// Creates a cursor referencing the first token in the buffer and able to
     /// traverse until the end of the buffer.
     pub fn begin(&self) -> Cursor {
-        unsafe { Cursor::create(self.ptr, self.ptr.add(self.len - 1)) }
+        let ptr = self.entries.as_ptr();
+        unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) }
     }
 }
 
@@ -151,7 +109,7 @@
 pub struct Cursor<'a> {
     // The current entry which the `Cursor` is pointing at.
     ptr: *const Entry,
-    // This is the only `Entry::End(..)` object which this cursor is allowed to
+    // This is the only `Entry::End` object which this cursor is allowed to
     // point at. All other `End` objects are skipped over in `Cursor::create`.
     scope: *const Entry,
     // Cursor is covariant in 'a. This field ensures that our pointers are still
@@ -171,7 +129,7 @@
         // object in global storage.
         struct UnsafeSyncEntry(Entry);
         unsafe impl Sync for UnsafeSyncEntry {}
-        static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry));
+        static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0));
 
         Cursor {
             ptr: &EMPTY_ENTRY.0,
@@ -184,15 +142,15 @@
     /// `None`-delimited scopes when the cursor reaches the end of them,
     /// allowing for them to be treated transparently.
     unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
-        // NOTE: If we're looking at a `End(..)`, we want to advance the cursor
+        // NOTE: If we're looking at a `End`, we want to advance the cursor
         // past it, unless `ptr == scope`, which means that we're at the edge of
         // our cursor's scope. We should only have `ptr != scope` at the exit
         // from None-delimited groups entered with `ignore_none`.
-        while let Entry::End(exit) = *ptr {
+        while let Entry::End(_) = *ptr {
             if ptr == scope {
                 break;
             }
-            ptr = exit;
+            ptr = ptr.add(1);
         }
 
         Cursor {
@@ -210,7 +168,10 @@
     /// Bump the cursor to point at the next token after the current one. This
     /// is undefined behavior if the cursor is currently looking at an
     /// `Entry::End`.
-    unsafe fn bump(self) -> Cursor<'a> {
+    ///
+    /// If the cursor is looking at an `Entry::Group`, the bumped cursor will
+    /// point at the first token in the group (with the same scope end).
+    unsafe fn bump_ignore_group(self) -> Cursor<'a> {
         Cursor::create(self.ptr.offset(1), self.scope)
     }
 
@@ -220,14 +181,9 @@
     ///
     /// WARNING: This mutates its argument.
     fn ignore_none(&mut self) {
-        while let Entry::Group(group, buf) = self.entry() {
+        while let Entry::Group(group, _) = self.entry() {
             if group.delimiter() == Delimiter::None {
-                // NOTE: We call `Cursor::create` here to make sure that
-                // situations where we should immediately exit the span after
-                // entering it are handled correctly.
-                unsafe {
-                    *self = Cursor::create(buf.ptr, self.scope);
-                }
+                unsafe { *self = self.bump_ignore_group() };
             } else {
                 break;
             }
@@ -251,9 +207,12 @@
             self.ignore_none();
         }
 
-        if let Entry::Group(group, buf) = self.entry() {
+        if let Entry::Group(group, end_offset) = self.entry() {
             if group.delimiter() == delim {
-                return Some((buf.begin(), group.span(), unsafe { self.bump() }));
+                let end_of_group = unsafe { self.ptr.add(*end_offset) };
+                let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
+                let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
+                return Some((inside_of_group, group.span(), after_group));
             }
         }
 
@@ -265,7 +224,7 @@
     pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
         self.ignore_none();
         match self.entry() {
-            Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump() })),
+            Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })),
             _ => None,
         }
     }
@@ -275,7 +234,9 @@
     pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
         self.ignore_none();
         match self.entry() {
-            Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })),
+            Entry::Punct(punct) if punct.as_char() != '\'' => {
+                Some((punct.clone(), unsafe { self.bump_ignore_group() }))
+            }
             _ => None,
         }
     }
@@ -285,7 +246,7 @@
     pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
         self.ignore_none();
         match self.entry() {
-            Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })),
+            Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })),
             _ => None,
         }
     }
@@ -295,18 +256,14 @@
     pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
         self.ignore_none();
         match self.entry() {
-            Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
-                let next = unsafe { self.bump() };
-                match next.ident() {
-                    Some((ident, rest)) => {
-                        let lifetime = Lifetime {
-                            apostrophe: op.span(),
-                            ident,
-                        };
-                        Some((lifetime, rest))
-                    }
-                    None => None,
-                }
+            Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
+                let next = unsafe { self.bump_ignore_group() };
+                let (ident, rest) = next.ident()?;
+                let lifetime = Lifetime {
+                    apostrophe: punct.span(),
+                    ident,
+                };
+                Some((lifetime, rest))
             }
             _ => None,
         }
@@ -332,15 +289,16 @@
     /// This method does not treat `None`-delimited groups as transparent, and
     /// will return a `Group(None, ..)` if the cursor is looking at one.
     pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
-        let tree = match self.entry() {
-            Entry::Group(group, _) => group.clone().into(),
-            Entry::Literal(lit) => lit.clone().into(),
-            Entry::Ident(ident) => ident.clone().into(),
-            Entry::Punct(op) => op.clone().into(),
-            Entry::End(..) => return None,
+        let (tree, len) = match self.entry() {
+            Entry::Group(group, end_offset) => (group.clone().into(), *end_offset),
+            Entry::Literal(literal) => (literal.clone().into(), 1),
+            Entry::Ident(ident) => (ident.clone().into(), 1),
+            Entry::Punct(punct) => (punct.clone().into(), 1),
+            Entry::End(_) => return None,
         };
 
-        Some((tree, unsafe { self.bump() }))
+        let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
+        Some((tree, rest))
     }
 
     /// Returns the `Span` of the current token, or `Span::call_site()` if this
@@ -348,10 +306,10 @@
     pub fn span(self) -> Span {
         match self.entry() {
             Entry::Group(group, _) => group.span(),
-            Entry::Literal(l) => l.span(),
-            Entry::Ident(t) => t.span(),
-            Entry::Punct(o) => o.span(),
-            Entry::End(..) => Span::call_site(),
+            Entry::Literal(literal) => literal.span(),
+            Entry::Ident(ident) => ident.span(),
+            Entry::Punct(punct) => punct.span(),
+            Entry::End(_) => Span::call_site(),
         }
     }
 
@@ -360,19 +318,22 @@
     ///
     /// This method treats `'lifetimes` as a single token.
     pub(crate) fn skip(self) -> Option<Cursor<'a>> {
-        match self.entry() {
-            Entry::End(..) => None,
+        let len = match self.entry() {
+            Entry::End(_) => return None,
 
             // Treat lifetimes as a single tt for the purposes of 'skip'.
-            Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
-                let next = unsafe { self.bump() };
-                match next.entry() {
-                    Entry::Ident(_) => Some(unsafe { next.bump() }),
-                    _ => Some(next),
+            Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
+                match unsafe { &*self.ptr.add(1) } {
+                    Entry::Ident(_) => 2,
+                    _ => 1,
                 }
             }
-            _ => Some(unsafe { self.bump() }),
-        }
+
+            Entry::Group(_, end_offset) => *end_offset,
+            _ => 1,
+        };
+
+        Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) })
     }
 }
 
@@ -388,9 +349,17 @@
 
 impl<'a> PartialEq for Cursor<'a> {
     fn eq(&self, other: &Self) -> bool {
-        let Cursor { ptr, scope, marker } = self;
-        let _ = marker;
-        *ptr == other.ptr && *scope == other.scope
+        self.ptr == other.ptr
+    }
+}
+
+impl<'a> PartialOrd for Cursor<'a> {
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        if same_buffer(*self, *other) {
+            Some(self.ptr.cmp(&other.ptr))
+        } else {
+            None
+        }
     }
 }
 
@@ -398,6 +367,22 @@
     a.scope == b.scope
 }
 
+pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
+    unsafe {
+        match (&*a.scope, &*b.scope) {
+            (Entry::End(a_offset), Entry::End(b_offset)) => {
+                a.scope.offset(*a_offset) == b.scope.offset(*b_offset)
+            }
+            _ => unreachable!(),
+        }
+    }
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
+    a.ptr.cmp(&b.ptr)
+}
+
 pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
     match cursor.entry() {
         Entry::Group(group, _) => group.span_open(),
diff --git a/src/custom_keyword.rs b/src/custom_keyword.rs
index 69d787e..a3ec9d4 100644
--- a/src/custom_keyword.rs
+++ b/src/custom_keyword.rs
@@ -128,7 +128,7 @@
         // For peek.
         impl $crate::token::CustomToken for $ident {
             fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
-                if let Some((ident, _rest)) = cursor.ident() {
+                if let $crate::__private::Some((ident, _rest)) = cursor.ident() {
                     ident == stringify!($ident)
                 } else {
                     false
diff --git a/src/data.rs b/src/data.rs
index dc2138c..3b46661 100644
--- a/src/data.rs
+++ b/src/data.rs
@@ -246,12 +246,11 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for Variant {
         fn parse(input: ParseStream) -> Result<Self> {
-            let mut attrs = input.call(Attribute::parse_outer)?;
+            let attrs = input.call(Attribute::parse_outer)?;
             let _visibility: Visibility = input.parse()?;
             let ident: Ident = input.parse()?;
             let fields = if input.peek(token::Brace) {
-                let fields = parse_braced(input, &mut attrs)?;
-                Fields::Named(fields)
+                Fields::Named(input.parse()?)
             } else if input.peek(token::Paren) {
                 Fields::Unnamed(input.parse()?)
             } else {
@@ -295,17 +294,6 @@
         }
     }
 
-    pub(crate) fn parse_braced(
-        input: ParseStream,
-        attrs: &mut Vec<Attribute>,
-    ) -> Result<FieldsNamed> {
-        let content;
-        let brace_token = braced!(content in input);
-        attr::parsing::parse_inner(&content, attrs)?;
-        let named = content.parse_terminated(Field::parse_named)?;
-        Ok(FieldsNamed { brace_token, named })
-    }
-
     impl Field {
         /// Parses a named (braced struct) field.
         #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
diff --git a/src/derive.rs b/src/derive.rs
index 17387e4..af9bb91 100644
--- a/src/derive.rs
+++ b/src/derive.rs
@@ -95,7 +95,7 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for DeriveInput {
         fn parse(input: ParseStream) -> Result<Self> {
-            let mut attrs = input.call(Attribute::parse_outer)?;
+            let attrs = input.call(Attribute::parse_outer)?;
             let vis = input.parse::<Visibility>()?;
 
             let lookahead = input.lookahead1();
@@ -103,7 +103,7 @@
                 let struct_token = input.parse::<Token![struct]>()?;
                 let ident = input.parse::<Ident>()?;
                 let generics = input.parse::<Generics>()?;
-                let (where_clause, fields, semi) = data_struct(input, &mut attrs)?;
+                let (where_clause, fields, semi) = data_struct(input)?;
                 Ok(DeriveInput {
                     attrs,
                     vis,
@@ -122,7 +122,7 @@
                 let enum_token = input.parse::<Token![enum]>()?;
                 let ident = input.parse::<Ident>()?;
                 let generics = input.parse::<Generics>()?;
-                let (where_clause, brace, variants) = data_enum(input, &mut attrs)?;
+                let (where_clause, brace, variants) = data_enum(input)?;
                 Ok(DeriveInput {
                     attrs,
                     vis,
@@ -141,7 +141,7 @@
                 let union_token = input.parse::<Token![union]>()?;
                 let ident = input.parse::<Ident>()?;
                 let generics = input.parse::<Generics>()?;
-                let (where_clause, fields) = data_union(input, &mut attrs)?;
+                let (where_clause, fields) = data_union(input)?;
                 Ok(DeriveInput {
                     attrs,
                     vis,
@@ -163,7 +163,6 @@
 
     pub fn data_struct(
         input: ParseStream,
-        attrs: &mut Vec<Attribute>,
     ) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
         let mut lookahead = input.lookahead1();
         let mut where_clause = None;
@@ -188,7 +187,7 @@
                 Err(lookahead.error())
             }
         } else if lookahead.peek(token::Brace) {
-            let fields = data::parsing::parse_braced(input, attrs)?;
+            let fields = input.parse()?;
             Ok((where_clause, Fields::Named(fields), None))
         } else if lookahead.peek(Token![;]) {
             let semi = input.parse()?;
@@ -200,7 +199,6 @@
 
     pub fn data_enum(
         input: ParseStream,
-        attrs: &mut Vec<Attribute>,
     ) -> Result<(
         Option<WhereClause>,
         token::Brace,
@@ -210,18 +208,14 @@
 
         let content;
         let brace = braced!(content in input);
-        attr::parsing::parse_inner(&content, attrs)?;
         let variants = content.parse_terminated(Variant::parse)?;
 
         Ok((where_clause, brace, variants))
     }
 
-    pub fn data_union(
-        input: ParseStream,
-        attrs: &mut Vec<Attribute>,
-    ) -> Result<(Option<WhereClause>, FieldsNamed)> {
+    pub fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
         let where_clause = input.parse()?;
-        let fields = data::parsing::parse_braced(input, attrs)?;
+        let fields = input.parse()?;
         Ok((where_clause, fields))
     }
 }
diff --git a/src/drops.rs b/src/drops.rs
new file mode 100644
index 0000000..89b42d8
--- /dev/null
+++ b/src/drops.rs
@@ -0,0 +1,58 @@
+use std::iter;
+use std::mem::ManuallyDrop;
+use std::ops::{Deref, DerefMut};
+use std::option;
+use std::slice;
+
+#[repr(transparent)]
+pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>);
+
+impl<T> NoDrop<T> {
+    pub(crate) fn new(value: T) -> Self
+    where
+        T: TrivialDrop,
+    {
+        NoDrop(ManuallyDrop::new(value))
+    }
+}
+
+impl<T: ?Sized> Deref for NoDrop<T> {
+    type Target = T;
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<T: ?Sized> DerefMut for NoDrop<T> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+pub(crate) trait TrivialDrop {}
+
+impl<T> TrivialDrop for iter::Empty<T> {}
+impl<'a, T> TrivialDrop for slice::Iter<'a, T> {}
+impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {}
+impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {}
+impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {}
+
+#[test]
+fn test_needs_drop() {
+    use std::mem::needs_drop;
+
+    struct NeedsDrop;
+
+    impl Drop for NeedsDrop {
+        fn drop(&mut self) {}
+    }
+
+    assert!(needs_drop::<NeedsDrop>());
+
+    // Test each of the types with a handwritten TrivialDrop impl above.
+    assert!(!needs_drop::<iter::Empty<NeedsDrop>>());
+    assert!(!needs_drop::<slice::Iter<NeedsDrop>>());
+    assert!(!needs_drop::<slice::IterMut<NeedsDrop>>());
+    assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>());
+    assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>());
+}
diff --git a/src/error.rs b/src/error.rs
index b505b89..e301367 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -28,6 +28,8 @@
 /// When parsing macro input, the [`parse_macro_input!`] macro handles the
 /// conversion to `compile_error!` automatically.
 ///
+/// [`parse_macro_input!`]: crate::parse_macro_input!
+///
 /// ```
 /// # extern crate proc_macro;
 /// #
@@ -132,12 +134,16 @@
     /// }
     /// ```
     pub fn new<T: Display>(span: Span, message: T) -> Self {
-        Error {
-            messages: vec![ErrorMessage {
-                start_span: ThreadBound::new(span),
-                end_span: ThreadBound::new(span),
-                message: message.to_string(),
-            }],
+        return new(span, message.to_string());
+
+        fn new(span: Span, message: String) -> Error {
+            Error {
+                messages: vec![ErrorMessage {
+                    start_span: ThreadBound::new(span),
+                    end_span: ThreadBound::new(span),
+                    message,
+                }],
+            }
         }
     }
 
@@ -156,15 +162,19 @@
     /// `ParseStream::error`)!
     #[cfg(feature = "printing")]
     pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
-        let mut iter = tokens.into_token_stream().into_iter();
-        let start = iter.next().map_or_else(Span::call_site, |t| t.span());
-        let end = iter.last().map_or(start, |t| t.span());
-        Error {
-            messages: vec![ErrorMessage {
-                start_span: ThreadBound::new(start),
-                end_span: ThreadBound::new(end),
-                message: message.to_string(),
-            }],
+        return new_spanned(tokens.into_token_stream(), message.to_string());
+
+        fn new_spanned(tokens: TokenStream, message: String) -> Error {
+            let mut iter = tokens.into_iter();
+            let start = iter.next().map_or_else(Span::call_site, |t| t.span());
+            let end = iter.last().map_or(start, |t| t.span());
+            Error {
+                messages: vec![ErrorMessage {
+                    start_span: ThreadBound::new(start),
+                    end_span: ThreadBound::new(end),
+                    message,
+                }],
+            }
         }
     }
 
@@ -191,6 +201,7 @@
     /// this method correctly in a procedural macro.
     ///
     /// [`compile_error!`]: std::compile_error!
+    /// [`parse_macro_input!`]: crate::parse_macro_input!
     pub fn to_compile_error(&self) -> TokenStream {
         self.messages
             .iter()
@@ -285,12 +296,16 @@
 
 #[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
 pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
-    Error {
-        messages: vec![ErrorMessage {
-            start_span: ThreadBound::new(start),
-            end_span: ThreadBound::new(end),
-            message: message.to_string(),
-        }],
+    return new2(start, end, message.to_string());
+
+    fn new2(start: Span, end: Span, message: String) -> Error {
+        Error {
+            messages: vec![ErrorMessage {
+                start_span: ThreadBound::new(start),
+                end_span: ThreadBound::new(end),
+                message,
+            }],
+        }
     }
 }
 
diff --git a/src/export.rs b/src/export.rs
index d3a0878..f478d09 100644
--- a/src/export.rs
+++ b/src/export.rs
@@ -12,6 +12,9 @@
 
 pub use proc_macro2::{Span, TokenStream as TokenStream2};
 
+#[cfg(feature = "parsing")]
+pub use crate::group::{parse_braces, parse_brackets, parse_parens};
+
 pub use crate::span::IntoSpans;
 
 #[cfg(all(
diff --git a/src/expr.rs b/src/expr.rs
index 95da090..93a59b0 100644
--- a/src/expr.rs
+++ b/src/expr.rs
@@ -87,6 +87,7 @@
     /// see names getting repeated in your code, like accessing
     /// `receiver.receiver` or `pat.pat` or `cond.cond`.
     #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum Expr {
         /// A slice literal expression: `[a, b, c, d]`.
         Array(ExprArray),
@@ -224,18 +225,17 @@
         /// A yield expression: `yield expr`.
         Yield(ExprYield),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
+        //
+        // For testing exhaustiveness in downstream code, use the following idiom:
         //
         //     match expr {
-        //         Expr::Array(e) => {...}
-        //         Expr::Assign(e) => {...}
+        //         Expr::Array(expr) => {...}
+        //         Expr::Assign(expr) => {...}
         //         ...
-        //         Expr::Yield(e) => {...}
+        //         Expr::Yield(expr) => {...}
         //
-        //         #[cfg(test)]
-        //         Expr::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -243,12 +243,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, Expr will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -784,6 +781,16 @@
 }
 
 impl Expr {
+    #[cfg(all(feature = "parsing", not(syn_no_const_vec_new)))]
+    const DUMMY: Self = Expr::Path(ExprPath {
+        attrs: Vec::new(),
+        qself: None,
+        path: Path {
+            leading_colon: None,
+            segments: Punctuated::new(),
+        },
+    });
+
     #[cfg(all(feature = "parsing", feature = "full"))]
     pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
         match self {
@@ -828,9 +835,7 @@
             | Expr::Yield(ExprYield { attrs, .. }) => mem::replace(attrs, new),
             Expr::Verbatim(_) => Vec::new(),
 
-            #[cfg(test)]
-            Expr::__TestExhaustive(_) => unimplemented!(),
-            #[cfg(not(test))]
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1087,6 +1092,8 @@
 #[cfg(feature = "parsing")]
 pub(crate) mod parsing {
     use super::*;
+    #[cfg(feature = "full")]
+    use crate::parse::ParseBuffer;
     use crate::parse::{Parse, ParseStream, Result};
     use crate::path;
     #[cfg(feature = "full")]
@@ -1364,7 +1371,10 @@
                 });
             } else if Precedence::Cast >= base && input.peek(Token![as]) {
                 let as_token: Token![as] = input.parse()?;
-                let ty = input.call(Type::without_plus)?;
+                let allow_plus = false;
+                let allow_group_generic = false;
+                let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?;
+                check_cast(input)?;
                 lhs = Expr::Cast(ExprCast {
                     attrs: Vec::new(),
                     expr: Box::new(lhs),
@@ -1373,7 +1383,10 @@
                 });
             } else if Precedence::Cast >= base && input.peek(Token![:]) && !input.peek(Token![::]) {
                 let colon_token: Token![:] = input.parse()?;
-                let ty = input.call(Type::without_plus)?;
+                let allow_plus = false;
+                let allow_group_generic = false;
+                let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?;
+                check_cast(input)?;
                 lhs = Expr::Type(ExprType {
                     attrs: Vec::new(),
                     expr: Box::new(lhs),
@@ -1420,7 +1433,10 @@
                 });
             } else if Precedence::Cast >= base && input.peek(Token![as]) {
                 let as_token: Token![as] = input.parse()?;
-                let ty = input.call(Type::without_plus)?;
+                let allow_plus = false;
+                let allow_group_generic = false;
+                let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?;
+                check_cast(input)?;
                 lhs = Expr::Cast(ExprCast {
                     attrs: Vec::new(),
                     expr: Box::new(lhs),
@@ -1517,7 +1533,7 @@
         } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
             expr_unary(input, attrs, allow_struct).map(Expr::Unary)
         } else {
-            trailer_expr(attrs, input, allow_struct)
+            trailer_expr(begin, attrs, input, allow_struct)
         }
     }
 
@@ -1542,6 +1558,7 @@
     // <atom> ? ...
     #[cfg(feature = "full")]
     fn trailer_expr(
+        begin: ParseBuffer,
         mut attrs: Vec<Attribute>,
         input: ParseStream,
         allow_struct: AllowStruct,
@@ -1549,9 +1566,14 @@
         let atom = atom_expr(input, allow_struct)?;
         let mut e = trailer_helper(input, atom)?;
 
-        let inner_attrs = e.replace_attrs(Vec::new());
-        attrs.extend(inner_attrs);
-        e.replace_attrs(attrs);
+        if let Expr::Verbatim(tokens) = &mut e {
+            *tokens = verbatim::between(begin, input);
+        } else {
+            let inner_attrs = e.replace_attrs(Vec::new());
+            attrs.extend(inner_attrs);
+            e.replace_attrs(attrs);
+        }
+
         Ok(e)
     }
 
@@ -1595,27 +1617,7 @@
 
                 let member: Member = input.parse()?;
                 let turbofish = if member.is_named() && input.peek(Token![::]) {
-                    Some(MethodTurbofish {
-                        colon2_token: input.parse()?,
-                        lt_token: input.parse()?,
-                        args: {
-                            let mut args = Punctuated::new();
-                            loop {
-                                if input.peek(Token![>]) {
-                                    break;
-                                }
-                                let value = input.call(generic_method_argument)?;
-                                args.push_value(value);
-                                if input.peek(Token![>]) {
-                                    break;
-                                }
-                                let punct = input.parse()?;
-                                args.push_punct(punct);
-                            }
-                            args
-                        },
-                        gt_token: input.parse()?,
-                    })
+                    Some(input.parse::<MethodTurbofish>()?)
                 } else {
                     None
                 };
@@ -1731,6 +1733,15 @@
             || input.peek(Token![move])
         {
             expr_closure(input, allow_struct).map(Expr::Closure)
+        } else if input.peek(Token![for])
+            && input.peek2(Token![<])
+            && (input.peek3(Lifetime) || input.peek3(Token![>]))
+        {
+            let begin = input.fork();
+            input.parse::<BoundLifetimes>()?;
+            expr_closure(input, allow_struct)?;
+            let verbatim = verbatim::between(begin, input);
+            Ok(Expr::Verbatim(verbatim))
         } else if input.peek(Ident)
             || input.peek(Token![::])
             || input.peek(Token![<])
@@ -1854,8 +1865,7 @@
         }
 
         if allow_struct.0 && input.peek(token::Brace) {
-            let outer_attrs = Vec::new();
-            let expr_struct = expr_struct_helper(input, outer_attrs, expr.path)?;
+            let expr_struct = expr_struct_helper(input, expr.path)?;
             if expr.qself.is_some() {
                 Ok(Expr::Verbatim(verbatim::between(begin, input)))
             } else {
@@ -1881,10 +1891,9 @@
     fn paren_or_tuple(input: ParseStream) -> Result<Expr> {
         let content;
         let paren_token = parenthesized!(content in input);
-        let inner_attrs = content.call(Attribute::parse_inner)?;
         if content.is_empty() {
             return Ok(Expr::Tuple(ExprTuple {
-                attrs: inner_attrs,
+                attrs: Vec::new(),
                 paren_token,
                 elems: Punctuated::new(),
             }));
@@ -1893,7 +1902,7 @@
         let first: Expr = content.parse()?;
         if content.is_empty() {
             return Ok(Expr::Paren(ExprParen {
-                attrs: inner_attrs,
+                attrs: Vec::new(),
                 paren_token,
                 expr: Box::new(first),
             }));
@@ -1911,7 +1920,7 @@
             elems.push_value(value);
         }
         Ok(Expr::Tuple(ExprTuple {
-            attrs: inner_attrs,
+            attrs: Vec::new(),
             paren_token,
             elems,
         }))
@@ -1921,10 +1930,9 @@
     fn array_or_repeat(input: ParseStream) -> Result<Expr> {
         let content;
         let bracket_token = bracketed!(content in input);
-        let inner_attrs = content.call(Attribute::parse_inner)?;
         if content.is_empty() {
             return Ok(Expr::Array(ExprArray {
-                attrs: inner_attrs,
+                attrs: Vec::new(),
                 bracket_token,
                 elems: Punctuated::new(),
             }));
@@ -1944,7 +1952,7 @@
                 elems.push_value(value);
             }
             Ok(Expr::Array(ExprArray {
-                attrs: inner_attrs,
+                attrs: Vec::new(),
                 bracket_token,
                 elems,
             }))
@@ -1952,7 +1960,7 @@
             let semi_token: Token![;] = content.parse()?;
             let len: Expr = content.parse()?;
             Ok(Expr::Repeat(ExprRepeat {
-                attrs: inner_attrs,
+                attrs: Vec::new(),
                 bracket_token,
                 expr: Box::new(first),
                 semi_token,
@@ -1969,7 +1977,6 @@
         fn parse(input: ParseStream) -> Result<Self> {
             let content;
             let bracket_token = bracketed!(content in input);
-            let inner_attrs = content.call(Attribute::parse_inner)?;
             let mut elems = Punctuated::new();
 
             while !content.is_empty() {
@@ -1983,7 +1990,7 @@
             }
 
             Ok(ExprArray {
-                attrs: inner_attrs,
+                attrs: Vec::new(),
                 bracket_token,
                 elems,
             })
@@ -1997,7 +2004,7 @@
             let content;
             Ok(ExprRepeat {
                 bracket_token: bracketed!(content in input),
-                attrs: content.call(Attribute::parse_inner)?,
+                attrs: Vec::new(),
                 expr: content.parse()?,
                 semi_token: content.parse()?,
                 len: content.parse()?,
@@ -2012,7 +2019,9 @@
             Expr::If(input.parse()?)
         } else if input.peek(Token![while]) {
             Expr::While(input.parse()?)
-        } else if input.peek(Token![for]) {
+        } else if input.peek(Token![for])
+            && !(input.peek2(Token![<]) && (input.peek3(Lifetime) || input.peek3(Token![>])))
+        {
             Expr::ForLoop(input.parse()?)
         } else if input.peek(Token![loop]) {
             Expr::Loop(input.parse()?)
@@ -2089,18 +2098,49 @@
     }
 
     #[cfg(feature = "full")]
-    fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
-        if input.peek(Lit) {
-            let lit = input.parse()?;
-            return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
-        }
+    #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
+    impl Parse for GenericMethodArgument {
+        fn parse(input: ParseStream) -> Result<Self> {
+            if input.peek(Lit) {
+                let lit = input.parse()?;
+                return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
+            }
 
-        if input.peek(token::Brace) {
-            let block: ExprBlock = input.parse()?;
-            return Ok(GenericMethodArgument::Const(Expr::Block(block)));
-        }
+            if input.peek(token::Brace) {
+                let block: ExprBlock = input.parse()?;
+                return Ok(GenericMethodArgument::Const(Expr::Block(block)));
+            }
 
-        input.parse().map(GenericMethodArgument::Type)
+            input.parse().map(GenericMethodArgument::Type)
+        }
+    }
+
+    #[cfg(feature = "full")]
+    #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
+    impl Parse for MethodTurbofish {
+        fn parse(input: ParseStream) -> Result<Self> {
+            Ok(MethodTurbofish {
+                colon2_token: input.parse()?,
+                lt_token: input.parse()?,
+                args: {
+                    let mut args = Punctuated::new();
+                    loop {
+                        if input.peek(Token![>]) {
+                            break;
+                        }
+                        let value: GenericMethodArgument = input.parse()?;
+                        args.push_value(value);
+                        if input.peek(Token![>]) {
+                            break;
+                        }
+                        let punct = input.parse()?;
+                        args.push_punct(punct);
+                    }
+                    args
+                },
+                gt_token: input.parse()?,
+            })
+        }
     }
 
     #[cfg(feature = "full")]
@@ -2267,18 +2307,19 @@
     }
 
     impl_by_parsing_expr! {
-        ExprCall, Call, "expected function call expression",
-        ExprMethodCall, MethodCall, "expected method call expression",
-        ExprTuple, Tuple, "expected tuple expression",
-        ExprBinary, Binary, "expected binary operation",
-        ExprCast, Cast, "expected cast expression",
-        ExprType, Type, "expected type ascription expression",
         ExprAssign, Assign, "expected assignment expression",
         ExprAssignOp, AssignOp, "expected compound assignment expression",
+        ExprAwait, Await, "expected await expression",
+        ExprBinary, Binary, "expected binary operation",
+        ExprCall, Call, "expected function call expression",
+        ExprCast, Cast, "expected cast expression",
         ExprField, Field, "expected struct field access",
         ExprIndex, Index, "expected indexing expression",
+        ExprMethodCall, MethodCall, "expected method call expression",
         ExprRange, Range, "expected range expression",
         ExprTry, Try, "expected try expression",
+        ExprTuple, Tuple, "expected tuple expression",
+        ExprType, Type, "expected type ascription expression",
     }
 
     #[cfg(feature = "full")]
@@ -2495,9 +2536,7 @@
                 Pat::Verbatim(_) => {}
                 Pat::Wild(pat) => pat.attrs = attrs,
 
-                #[cfg(test)]
-                Pat::__TestExhaustive(_) => unimplemented!(),
-                #[cfg(not(test))]
+                #[cfg(syn_no_non_exhaustive)]
                 _ => unreachable!(),
             }
             Ok(pat)
@@ -2639,27 +2678,21 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for ExprStruct {
         fn parse(input: ParseStream) -> Result<Self> {
-            let attrs = Vec::new();
             let path: Path = input.parse()?;
-            expr_struct_helper(input, attrs, path)
+            expr_struct_helper(input, path)
         }
     }
 
     #[cfg(feature = "full")]
-    fn expr_struct_helper(
-        input: ParseStream,
-        mut attrs: Vec<Attribute>,
-        path: Path,
-    ) -> Result<ExprStruct> {
+    fn expr_struct_helper(input: ParseStream, path: Path) -> Result<ExprStruct> {
         let content;
         let brace_token = braced!(content in input);
-        attr::parsing::parse_inner(&content, &mut attrs)?;
 
         let mut fields = Punctuated::new();
         while !content.is_empty() {
             if content.peek(Token![..]) {
                 return Ok(ExprStruct {
-                    attrs,
+                    attrs: Vec::new(),
                     brace_token,
                     path,
                     fields,
@@ -2681,7 +2714,7 @@
         }
 
         Ok(ExprStruct {
-            attrs,
+            attrs: Vec::new(),
             brace_token,
             path,
             fields,
@@ -2869,7 +2902,10 @@
         }
         for part in float_repr.split('.') {
             let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
-            let base = mem::replace(e, Expr::__TestExhaustive(crate::private(())));
+            #[cfg(not(syn_no_const_vec_new))]
+            let base = mem::replace(e, Expr::DUMMY);
+            #[cfg(syn_no_const_vec_new)]
+            let base = mem::replace(e, Expr::Verbatim(TokenStream::new()));
             *e = Expr::Field(ExprField {
                 attrs: Vec::new(),
                 base: Box::new(base),
@@ -2890,6 +2926,28 @@
             }
         }
     }
+
+    fn check_cast(input: ParseStream) -> Result<()> {
+        let kind = if input.peek(Token![.]) && !input.peek(Token![..]) {
+            if input.peek2(token::Await) {
+                "`.await`"
+            } else if input.peek2(Ident) && (input.peek3(token::Paren) || input.peek3(Token![::])) {
+                "a method call"
+            } else {
+                "a field access"
+            }
+        } else if input.peek(Token![?]) {
+            "`?`"
+        } else if input.peek(token::Bracket) {
+            "indexing"
+        } else if input.peek(token::Paren) {
+            "a function call"
+        } else {
+            return Ok(());
+        };
+        let msg = format!("casts cannot be followed by {}", kind);
+        Err(input.error(msg))
+    }
 }
 
 #[cfg(feature = "printing")]
@@ -2926,9 +2984,6 @@
     #[cfg(not(feature = "full"))]
     pub(crate) fn outer_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {}
 
-    #[cfg(not(feature = "full"))]
-    fn inner_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {}
-
     #[cfg(feature = "full")]
     #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
     impl ToTokens for ExprBox {
@@ -2945,7 +3000,6 @@
         fn to_tokens(&self, tokens: &mut TokenStream) {
             outer_attrs_to_tokens(&self.attrs, tokens);
             self.bracket_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
                 self.elems.to_tokens(tokens);
             });
         }
@@ -3005,7 +3059,6 @@
         fn to_tokens(&self, tokens: &mut TokenStream) {
             outer_attrs_to_tokens(&self.attrs, tokens);
             self.paren_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
                 self.elems.to_tokens(tokens);
                 // If we only have one argument, we need a trailing comma to
                 // distinguish ExprTuple from ExprParen.
@@ -3324,14 +3377,22 @@
 
     #[cfg(feature = "full")]
     #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
+    impl ToTokens for RangeLimits {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
+            match self {
+                RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+                RangeLimits::Closed(t) => t.to_tokens(tokens),
+            }
+        }
+    }
+
+    #[cfg(feature = "full")]
+    #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
     impl ToTokens for ExprRange {
         fn to_tokens(&self, tokens: &mut TokenStream) {
             outer_attrs_to_tokens(&self.attrs, tokens);
             self.from.to_tokens(tokens);
-            match &self.limits {
-                RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
-                RangeLimits::Closed(t) => t.to_tokens(tokens),
-            }
+            self.limits.to_tokens(tokens);
             self.to.to_tokens(tokens);
         }
     }
@@ -3340,7 +3401,7 @@
     impl ToTokens for ExprPath {
         fn to_tokens(&self, tokens: &mut TokenStream) {
             outer_attrs_to_tokens(&self.attrs, tokens);
-            private::print_path(tokens, &self.qself, &self.path);
+            path::printing::print_path(tokens, &self.qself, &self.path);
         }
     }
 
@@ -3402,7 +3463,6 @@
             outer_attrs_to_tokens(&self.attrs, tokens);
             self.path.to_tokens(tokens);
             self.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
                 self.fields.to_tokens(tokens);
                 if let Some(dot2_token) = &self.dot2_token {
                     dot2_token.to_tokens(tokens);
@@ -3420,7 +3480,6 @@
         fn to_tokens(&self, tokens: &mut TokenStream) {
             outer_attrs_to_tokens(&self.attrs, tokens);
             self.bracket_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
                 self.expr.to_tokens(tokens);
                 self.semi_token.to_tokens(tokens);
                 self.len.to_tokens(tokens);
@@ -3444,7 +3503,6 @@
         fn to_tokens(&self, tokens: &mut TokenStream) {
             outer_attrs_to_tokens(&self.attrs, tokens);
             self.paren_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
                 self.expr.to_tokens(tokens);
             });
         }
diff --git a/src/gen/clone.rs b/src/gen/clone.rs
index 1c8814d..a413e3e 100644
--- a/src/gen/clone.rs
+++ b/src/gen/clone.rs
@@ -273,6 +273,7 @@
             Expr::While(v0) => Expr::While(v0.clone()),
             #[cfg(feature = "full")]
             Expr::Yield(v0) => Expr::Yield(v0.clone()),
+            #[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
             _ => unreachable!(),
         }
     }
@@ -845,6 +846,7 @@
             ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
             ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
             ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -908,9 +910,9 @@
         match self {
             GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
             GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
+            GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
             GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
             GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
-            GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
         }
     }
 }
@@ -957,6 +959,7 @@
             ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
             ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
             ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1052,6 +1055,7 @@
             Item::Union(v0) => Item::Union(v0.clone()),
             Item::Use(v0) => Item::Use(v0.clone()),
             Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1474,6 +1478,7 @@
             Pat::Type(v0) => Pat::Type(v0.clone()),
             Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
             Pat::Wild(v0) => Pat::Wild(v0.clone()),
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1821,6 +1826,7 @@
             TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
             TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
             TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1899,6 +1905,7 @@
             Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
             Type::Tuple(v0) => Type::Tuple(v0.clone()),
             Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
diff --git a/src/gen/debug.rs b/src/gen/debug.rs
index 11e197e..a1f0afa 100644
--- a/src/gen/debug.rs
+++ b/src/gen/debug.rs
@@ -587,6 +587,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
             _ => unreachable!(),
         }
     }
@@ -1195,6 +1196,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1266,6 +1268,11 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            GenericArgument::Const(v0) => {
+                let mut formatter = formatter.debug_tuple("Const");
+                formatter.field(v0);
+                formatter.finish()
+            }
             GenericArgument::Binding(v0) => {
                 let mut formatter = formatter.debug_tuple("Binding");
                 formatter.field(v0);
@@ -1276,11 +1283,6 @@
                 formatter.field(v0);
                 formatter.finish()
             }
-            GenericArgument::Const(v0) => {
-                let mut formatter = formatter.debug_tuple("Const");
-                formatter.field(v0);
-                formatter.finish()
-            }
         }
     }
 }
@@ -1367,6 +1369,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1530,6 +1533,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -2088,6 +2092,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -2495,6 +2500,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -2633,6 +2639,7 @@
                 formatter.field(v0);
                 formatter.finish()
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
diff --git a/src/gen/eq.rs b/src/gen/eq.rs
index 40fed0b..20acb80 100644
--- a/src/gen/eq.rs
+++ b/src/gen/eq.rs
@@ -878,15 +878,15 @@
             (GenericArgument::Type(self0), GenericArgument::Type(other0)) => {
                 self0 == other0
             }
+            (GenericArgument::Const(self0), GenericArgument::Const(other0)) => {
+                self0 == other0
+            }
             (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => {
                 self0 == other0
             }
             (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
                 self0 == other0
             }
-            (GenericArgument::Const(self0), GenericArgument::Const(other0)) => {
-                self0 == other0
-            }
             _ => false,
         }
     }
diff --git a/src/gen/fold.rs b/src/gen/fold.rs
index 7916a62..98bb579 100644
--- a/src/gen/fold.rs
+++ b/src/gen/fold.rs
@@ -18,7 +18,7 @@
 #[cfg(all(feature = "derive", not(feature = "full")))]
 macro_rules! full {
     ($e:expr) => {
-        unreachable ! ()
+        unreachable!()
     };
 }
 /// Syntax tree traversal to transform the nodes of an owned syntax tree.
@@ -1130,6 +1130,7 @@
         Expr::Verbatim(_binding_0) => Expr::Verbatim(_binding_0),
         Expr::While(_binding_0) => Expr::While(full!(f.fold_expr_while(_binding_0))),
         Expr::Yield(_binding_0) => Expr::Yield(full!(f.fold_expr_yield(_binding_0))),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1715,6 +1716,7 @@
             ForeignItem::Macro(f.fold_foreign_item_macro(_binding_0))
         }
         ForeignItem::Verbatim(_binding_0) => ForeignItem::Verbatim(_binding_0),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1785,15 +1787,15 @@
         GenericArgument::Type(_binding_0) => {
             GenericArgument::Type(f.fold_type(_binding_0))
         }
+        GenericArgument::Const(_binding_0) => {
+            GenericArgument::Const(f.fold_expr(_binding_0))
+        }
         GenericArgument::Binding(_binding_0) => {
             GenericArgument::Binding(f.fold_binding(_binding_0))
         }
         GenericArgument::Constraint(_binding_0) => {
             GenericArgument::Constraint(f.fold_constraint(_binding_0))
         }
-        GenericArgument::Const(_binding_0) => {
-            GenericArgument::Const(f.fold_expr(_binding_0))
-        }
     }
 }
 #[cfg(feature = "full")]
@@ -1868,6 +1870,7 @@
             ImplItem::Macro(f.fold_impl_item_macro(_binding_0))
         }
         ImplItem::Verbatim(_binding_0) => ImplItem::Verbatim(_binding_0),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1972,6 +1975,7 @@
         Item::Union(_binding_0) => Item::Union(f.fold_item_union(_binding_0)),
         Item::Use(_binding_0) => Item::Use(f.fold_item_use(_binding_0)),
         Item::Verbatim(_binding_0) => Item::Verbatim(_binding_0),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2492,6 +2496,7 @@
         Pat::Type(_binding_0) => Pat::Type(f.fold_pat_type(_binding_0)),
         Pat::Verbatim(_binding_0) => Pat::Verbatim(_binding_0),
         Pat::Wild(_binding_0) => Pat::Wild(f.fold_pat_wild(_binding_0)),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2891,6 +2896,7 @@
             TraitItem::Macro(f.fold_trait_item_macro(_binding_0))
         }
         TraitItem::Verbatim(_binding_0) => TraitItem::Verbatim(_binding_0),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2975,6 +2981,7 @@
         }
         Type::Tuple(_binding_0) => Type::Tuple(f.fold_type_tuple(_binding_0)),
         Type::Verbatim(_binding_0) => Type::Verbatim(_binding_0),
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
diff --git a/src/gen/hash.rs b/src/gen/hash.rs
index 686ed86..d0400e1 100644
--- a/src/gen/hash.rs
+++ b/src/gen/hash.rs
@@ -498,6 +498,7 @@
                 state.write_u8(39u8);
                 v0.hash(state);
             }
+            #[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
             _ => unreachable!(),
         }
     }
@@ -1112,6 +1113,7 @@
                 state.write_u8(4u8);
                 TokenStreamHelper(v0).hash(state);
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1182,15 +1184,15 @@
                 state.write_u8(1u8);
                 v0.hash(state);
             }
-            GenericArgument::Binding(v0) => {
+            GenericArgument::Const(v0) => {
                 state.write_u8(2u8);
                 v0.hash(state);
             }
-            GenericArgument::Constraint(v0) => {
+            GenericArgument::Binding(v0) => {
                 state.write_u8(3u8);
                 v0.hash(state);
             }
-            GenericArgument::Const(v0) => {
+            GenericArgument::Constraint(v0) => {
                 state.write_u8(4u8);
                 v0.hash(state);
             }
@@ -1280,6 +1282,7 @@
                 state.write_u8(4u8);
                 TokenStreamHelper(v0).hash(state);
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1416,6 +1419,7 @@
                 state.write_u8(16u8);
                 TokenStreamHelper(v0).hash(state);
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -1920,6 +1924,7 @@
                 state.write_u8(15u8);
                 v0.hash(state);
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -2339,6 +2344,7 @@
                 state.write_u8(4u8);
                 TokenStreamHelper(v0).hash(state);
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -2464,6 +2470,7 @@
                 state.write_u8(14u8);
                 TokenStreamHelper(v0).hash(state);
             }
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
diff --git a/src/gen/visit.rs b/src/gen/visit.rs
index d84e431..19ddd2e 100644
--- a/src/gen/visit.rs
+++ b/src/gen/visit.rs
@@ -17,7 +17,7 @@
 #[cfg(all(feature = "derive", not(feature = "full")))]
 macro_rules! full {
     ($e:expr) => {
-        unreachable ! ()
+        unreachable!()
     };
 }
 macro_rules! skip {
@@ -1213,6 +1213,7 @@
         Expr::Yield(_binding_0) => {
             full!(v.visit_expr_yield(_binding_0));
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1901,6 +1902,7 @@
         ForeignItem::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1972,15 +1974,15 @@
         GenericArgument::Type(_binding_0) => {
             v.visit_type(_binding_0);
         }
+        GenericArgument::Const(_binding_0) => {
+            v.visit_expr(_binding_0);
+        }
         GenericArgument::Binding(_binding_0) => {
             v.visit_binding(_binding_0);
         }
         GenericArgument::Constraint(_binding_0) => {
             v.visit_constraint(_binding_0);
         }
-        GenericArgument::Const(_binding_0) => {
-            v.visit_expr(_binding_0);
-        }
     }
 }
 #[cfg(feature = "full")]
@@ -2066,6 +2068,7 @@
         ImplItem::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2201,6 +2204,7 @@
         Item::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2800,6 +2804,7 @@
         Pat::Wild(_binding_0) => {
             v.visit_pat_wild(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -3255,6 +3260,7 @@
         TraitItem::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -3383,6 +3389,7 @@
         Type::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
diff --git a/src/gen/visit_mut.rs b/src/gen/visit_mut.rs
index 51e10b6..239709d 100644
--- a/src/gen/visit_mut.rs
+++ b/src/gen/visit_mut.rs
@@ -17,7 +17,7 @@
 #[cfg(all(feature = "derive", not(feature = "full")))]
 macro_rules! full {
     ($e:expr) => {
-        unreachable ! ()
+        unreachable!()
     };
 }
 macro_rules! skip {
@@ -1214,6 +1214,7 @@
         Expr::Yield(_binding_0) => {
             full!(v.visit_expr_yield_mut(_binding_0));
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1902,6 +1903,7 @@
         ForeignItem::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -1973,15 +1975,15 @@
         GenericArgument::Type(_binding_0) => {
             v.visit_type_mut(_binding_0);
         }
+        GenericArgument::Const(_binding_0) => {
+            v.visit_expr_mut(_binding_0);
+        }
         GenericArgument::Binding(_binding_0) => {
             v.visit_binding_mut(_binding_0);
         }
         GenericArgument::Constraint(_binding_0) => {
             v.visit_constraint_mut(_binding_0);
         }
-        GenericArgument::Const(_binding_0) => {
-            v.visit_expr_mut(_binding_0);
-        }
     }
 }
 #[cfg(feature = "full")]
@@ -2066,6 +2068,7 @@
         ImplItem::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2201,6 +2204,7 @@
         Item::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -2800,6 +2804,7 @@
         Pat::Wild(_binding_0) => {
             v.visit_pat_wild_mut(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -3255,6 +3260,7 @@
         TraitItem::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
@@ -3383,6 +3389,7 @@
         Type::Verbatim(_binding_0) => {
             skip!(_binding_0);
         }
+        #[cfg(syn_no_non_exhaustive)]
         _ => unreachable!(),
     }
 }
diff --git a/src/generics.rs b/src/generics.rs
index 9c2802f..6d4fe84 100644
--- a/src/generics.rs
+++ b/src/generics.rs
@@ -828,6 +828,31 @@
         }
     }
 
+    impl TypeParamBound {
+        pub(crate) fn parse_multiple(
+            input: ParseStream,
+            allow_plus: bool,
+        ) -> Result<Punctuated<Self, Token![+]>> {
+            let mut bounds = Punctuated::new();
+            loop {
+                bounds.push_value(input.parse()?);
+                if !(allow_plus && input.peek(Token![+])) {
+                    break;
+                }
+                bounds.push_punct(input.parse()?);
+                if !(input.peek(Ident::peek_any)
+                    || input.peek(Token![::])
+                    || input.peek(Token![?])
+                    || input.peek(Lifetime)
+                    || input.peek(token::Paren))
+                {
+                    break;
+                }
+            }
+            Ok(bounds)
+        }
+    }
+
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for TraitBound {
         fn parse(input: ParseStream) -> Result<Self> {
diff --git a/src/group.rs b/src/group.rs
index 6b05710..7fd273c 100644
--- a/src/group.rs
+++ b/src/group.rs
@@ -136,7 +136,7 @@
 #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
 macro_rules! parenthesized {
     ($content:ident in $cursor:expr) => {
-        match $crate::group::parse_parens(&$cursor) {
+        match $crate::__private::parse_parens(&$cursor) {
             $crate::__private::Ok(parens) => {
                 $content = parens.content;
                 parens.token
@@ -214,7 +214,7 @@
 #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
 macro_rules! braced {
     ($content:ident in $cursor:expr) => {
-        match $crate::group::parse_braces(&$cursor) {
+        match $crate::__private::parse_braces(&$cursor) {
             $crate::__private::Ok(braces) => {
                 $content = braces.content;
                 braces.token
@@ -269,7 +269,7 @@
 #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
 macro_rules! bracketed {
     ($content:ident in $cursor:expr) => {
-        match $crate::group::parse_brackets(&$cursor) {
+        match $crate::__private::parse_brackets(&$cursor) {
             $crate::__private::Ok(brackets) => {
                 $content = brackets.content;
                 brackets.token
diff --git a/src/ident.rs b/src/ident.rs
index 61e0887..8e3d8bd 100644
--- a/src/ident.rs
+++ b/src/ident.rs
@@ -6,7 +6,6 @@
 use crate::parse::{Parse, ParseStream, Result};
 #[cfg(feature = "parsing")]
 use crate::token::Token;
-use unicode_xid::UnicodeXID;
 
 pub use proc_macro2::Ident;
 
@@ -90,11 +89,11 @@
 pub fn xid_ok(symbol: &str) -> bool {
     let mut chars = symbol.chars();
     let first = chars.next().unwrap();
-    if !(UnicodeXID::is_xid_start(first) || first == '_') {
+    if !(first == '_' || unicode_ident::is_xid_start(first)) {
         return false;
     }
     for ch in chars {
-        if !UnicodeXID::is_xid_continue(ch) {
+        if !unicode_ident::is_xid_continue(ch) {
             return false;
         }
     }
diff --git a/src/item.rs b/src/item.rs
index 917d4f1..a1ef7ab 100644
--- a/src/item.rs
+++ b/src/item.rs
@@ -17,6 +17,7 @@
     ///
     /// [syntax tree enum]: Expr#syntax-tree-enums
     #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum Item {
         /// A constant item: `const MAX: u16 = 65535`.
         Const(ItemConst),
@@ -71,18 +72,17 @@
         /// Tokens forming an item not interpreted by Syn.
         Verbatim(TokenStream),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
         //
-        //     match expr {
-        //         Item::Const(e) => {...}
-        //         Item::Enum(e) => {...}
+        // For testing exhaustiveness in downstream code, use the following idiom:
+        //
+        //     match item {
+        //         Item::Const(item) => {...}
+        //         Item::Enum(item) => {...}
         //         ...
-        //         Item::Verbatim(e) => {...}
+        //         Item::Verbatim(item) => {...}
         //
-        //         #[cfg(test)]
-        //         Item::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -90,12 +90,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, Item will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -381,9 +378,7 @@
             | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
             Item::Verbatim(_) => Vec::new(),
 
-            #[cfg(test)]
-            Item::__TestExhaustive(_) => unimplemented!(),
-            #[cfg(not(test))]
+            #[cfg(syn_no_non_exhaustive)]
             _ => unreachable!(),
         }
     }
@@ -564,6 +559,7 @@
     ///
     /// [syntax tree enum]: Expr#syntax-tree-enums
     #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum ForeignItem {
         /// A foreign function in an `extern` block.
         Fn(ForeignItemFn),
@@ -580,18 +576,17 @@
         /// Tokens in an `extern` block not interpreted by Syn.
         Verbatim(TokenStream),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
         //
-        //     match expr {
-        //         ForeignItem::Fn(e) => {...}
-        //         ForeignItem::Static(e) => {...}
+        // For testing exhaustiveness in downstream code, use the following idiom:
+        //
+        //     match item {
+        //         ForeignItem::Fn(item) => {...}
+        //         ForeignItem::Static(item) => {...}
         //         ...
-        //         ForeignItem::Verbatim(e) => {...}
+        //         ForeignItem::Verbatim(item) => {...}
         //
-        //         #[cfg(test)]
-        //         ForeignItem::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -599,12 +594,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, ForeignItem will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -675,6 +667,7 @@
     ///
     /// [syntax tree enum]: Expr#syntax-tree-enums
     #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum TraitItem {
         /// An associated constant within the definition of a trait.
         Const(TraitItemConst),
@@ -691,18 +684,17 @@
         /// Tokens within the definition of a trait not interpreted by Syn.
         Verbatim(TokenStream),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
         //
-        //     match expr {
-        //         TraitItem::Const(e) => {...}
-        //         TraitItem::Method(e) => {...}
+        // For testing exhaustiveness in downstream code, use the following idiom:
+        //
+        //     match item {
+        //         TraitItem::Const(item) => {...}
+        //         TraitItem::Method(item) => {...}
         //         ...
-        //         TraitItem::Verbatim(e) => {...}
+        //         TraitItem::Verbatim(item) => {...}
         //
-        //         #[cfg(test)]
-        //         TraitItem::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -710,12 +702,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, TraitItem will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -788,6 +777,7 @@
     ///
     /// [syntax tree enum]: Expr#syntax-tree-enums
     #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum ImplItem {
         /// An associated constant within an impl block.
         Const(ImplItemConst),
@@ -804,18 +794,17 @@
         /// Tokens within an impl block not interpreted by Syn.
         Verbatim(TokenStream),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
         //
-        //     match expr {
-        //         ImplItem::Const(e) => {...}
-        //         ImplItem::Method(e) => {...}
+        // For testing exhaustiveness in downstream code, use the following idiom:
+        //
+        //     match item {
+        //         ImplItem::Const(item) => {...}
+        //         ImplItem::Method(item) => {...}
         //         ...
-        //         ImplItem::Verbatim(e) => {...}
+        //         ImplItem::Verbatim(item) => {...}
         //
-        //         #[cfg(test)]
-        //         ImplItem::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -823,12 +812,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, ImplItem will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -1177,14 +1163,25 @@
         semi_token: Token![;],
     }
 
-    impl Parse for FlexibleItemType {
-        fn parse(input: ParseStream) -> Result<Self> {
+    enum WhereClauseLocation {
+        // type Ty<T> where T: 'static = T;
+        BeforeEq,
+        // type Ty<T> = T where T: 'static;
+        #[allow(dead_code)]
+        AfterEq,
+        // TODO: goes away once the migration period on rust-lang/rust#89122 is over
+        Both,
+    }
+
+    impl FlexibleItemType {
+        fn parse(input: ParseStream, where_clause_location: WhereClauseLocation) -> Result<Self> {
             let vis: Visibility = input.parse()?;
             let defaultness: Option<Token![default]> = input.parse()?;
             let type_token: Token![type] = input.parse()?;
             let ident: Ident = input.parse()?;
             let mut generics: Generics = input.parse()?;
             let colon_token: Option<Token![:]> = input.parse()?;
+
             let mut bounds = Punctuated::new();
             if colon_token.is_some() {
                 loop {
@@ -1198,12 +1195,29 @@
                     bounds.push_punct(input.parse::<Token![+]>()?);
                 }
             }
-            generics.where_clause = input.parse()?;
+
+            match where_clause_location {
+                WhereClauseLocation::BeforeEq | WhereClauseLocation::Both => {
+                    generics.where_clause = input.parse()?;
+                }
+                _ => {}
+            }
+
             let ty = if let Some(eq_token) = input.parse()? {
                 Some((eq_token, input.parse::<Type>()?))
             } else {
                 None
             };
+
+            match where_clause_location {
+                WhereClauseLocation::AfterEq | WhereClauseLocation::Both
+                    if generics.where_clause.is_none() =>
+                {
+                    generics.where_clause = input.parse()?;
+                }
+                _ => {}
+            }
+
             let semi_token: Token![;] = input.parse()?;
 
             Ok(FlexibleItemType {
@@ -1800,9 +1814,7 @@
                 ForeignItem::Macro(item) => &mut item.attrs,
                 ForeignItem::Verbatim(_) => return Ok(item),
 
-                #[cfg(test)]
-                ForeignItem::__TestExhaustive(_) => unimplemented!(),
-                #[cfg(not(test))]
+                #[cfg(syn_no_non_exhaustive)]
                 _ => unreachable!(),
             };
             attrs.append(item_attrs);
@@ -1868,7 +1880,7 @@
             bounds: _,
             ty,
             semi_token,
-        } = input.parse()?;
+        } = FlexibleItemType::parse(input, WhereClauseLocation::BeforeEq)?;
 
         if defaultness.is_some()
             || generics.lt_token.is_some()
@@ -1937,7 +1949,7 @@
             bounds: _,
             ty,
             semi_token,
-        } = input.parse()?;
+        } = FlexibleItemType::parse(input, WhereClauseLocation::BeforeEq)?;
 
         if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
             Ok(Item::Verbatim(verbatim::between(begin, input)))
@@ -1959,13 +1971,12 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for ItemStruct {
         fn parse(input: ParseStream) -> Result<Self> {
-            let mut attrs = input.call(Attribute::parse_outer)?;
+            let attrs = input.call(Attribute::parse_outer)?;
             let vis = input.parse::<Visibility>()?;
             let struct_token = input.parse::<Token![struct]>()?;
             let ident = input.parse::<Ident>()?;
             let generics = input.parse::<Generics>()?;
-            let (where_clause, fields, semi_token) =
-                derive::parsing::data_struct(input, &mut attrs)?;
+            let (where_clause, fields, semi_token) = derive::parsing::data_struct(input)?;
             Ok(ItemStruct {
                 attrs,
                 vis,
@@ -1984,13 +1995,12 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for ItemEnum {
         fn parse(input: ParseStream) -> Result<Self> {
-            let mut attrs = input.call(Attribute::parse_outer)?;
+            let attrs = input.call(Attribute::parse_outer)?;
             let vis = input.parse::<Visibility>()?;
             let enum_token = input.parse::<Token![enum]>()?;
             let ident = input.parse::<Ident>()?;
             let generics = input.parse::<Generics>()?;
-            let (where_clause, brace_token, variants) =
-                derive::parsing::data_enum(input, &mut attrs)?;
+            let (where_clause, brace_token, variants) = derive::parsing::data_enum(input)?;
             Ok(ItemEnum {
                 attrs,
                 vis,
@@ -2009,12 +2019,12 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for ItemUnion {
         fn parse(input: ParseStream) -> Result<Self> {
-            let mut attrs = input.call(Attribute::parse_outer)?;
+            let attrs = input.call(Attribute::parse_outer)?;
             let vis = input.parse::<Visibility>()?;
             let union_token = input.parse::<Token![union]>()?;
             let ident = input.parse::<Ident>()?;
             let generics = input.parse::<Generics>()?;
-            let (where_clause, fields) = derive::parsing::data_union(input, &mut attrs)?;
+            let (where_clause, fields) = derive::parsing::data_union(input)?;
             Ok(ItemUnion {
                 attrs,
                 vis,
@@ -2238,9 +2248,7 @@
                 TraitItem::Macro(item) => &mut item.attrs,
                 TraitItem::Verbatim(_) => unreachable!(),
 
-                #[cfg(test)]
-                TraitItem::__TestExhaustive(_) => unimplemented!(),
-                #[cfg(not(test))]
+                #[cfg(syn_no_non_exhaustive)]
                 _ => unreachable!(),
             };
             attrs.append(item_attrs);
@@ -2328,7 +2336,6 @@
                 }
             }
 
-            generics.where_clause = input.parse()?;
             let default = if input.peek(Token![=]) {
                 let eq_token: Token![=] = input.parse()?;
                 let default: Type = input.parse()?;
@@ -2336,6 +2343,8 @@
             } else {
                 None
             };
+
+            generics.where_clause = input.parse()?;
             let semi_token: Token![;] = input.parse()?;
 
             Ok(TraitItemType {
@@ -2362,7 +2371,7 @@
             bounds,
             ty,
             semi_token,
-        } = input.parse()?;
+        } = FlexibleItemType::parse(input, WhereClauseLocation::Both)?;
 
         if defaultness.is_some() || vis.is_some() {
             Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
@@ -2580,9 +2589,7 @@
                     ImplItem::Macro(item) => &mut item.attrs,
                     ImplItem::Verbatim(_) => return Ok(item),
 
-                    #[cfg(test)]
-                    ImplItem::__TestExhaustive(_) => unimplemented!(),
-                    #[cfg(not(test))]
+                    #[cfg(syn_no_non_exhaustive)]
                     _ => unreachable!(),
                 };
                 attrs.append(item_attrs);
@@ -2661,20 +2668,26 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for ImplItemType {
         fn parse(input: ParseStream) -> Result<Self> {
+            let attrs = input.call(Attribute::parse_outer)?;
+            let vis: Visibility = input.parse()?;
+            let defaultness: Option<Token![default]> = input.parse()?;
+            let type_token: Token![type] = input.parse()?;
+            let ident: Ident = input.parse()?;
+            let mut generics: Generics = input.parse()?;
+            let eq_token: Token![=] = input.parse()?;
+            let ty: Type = input.parse()?;
+            generics.where_clause = input.parse()?;
+            let semi_token: Token![;] = input.parse()?;
             Ok(ImplItemType {
-                attrs: input.call(Attribute::parse_outer)?,
-                vis: input.parse()?,
-                defaultness: input.parse()?,
-                type_token: input.parse()?,
-                ident: input.parse()?,
-                generics: {
-                    let mut generics: Generics = input.parse()?;
-                    generics.where_clause = input.parse()?;
-                    generics
-                },
-                eq_token: input.parse()?,
-                ty: input.parse()?,
-                semi_token: input.parse()?,
+                attrs,
+                vis,
+                defaultness,
+                type_token,
+                ident,
+                generics,
+                eq_token,
+                ty,
+                semi_token,
             })
         }
     }
@@ -2690,7 +2703,7 @@
             bounds: _,
             ty,
             semi_token,
-        } = input.parse()?;
+        } = FlexibleItemType::parse(input, WhereClauseLocation::Both)?;
 
         if colon_token.is_some() || ty.is_none() {
             Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
@@ -2752,7 +2765,6 @@
     use super::*;
     use crate::attr::FilterAttrs;
     use crate::print::TokensOrDefault;
-    use crate::punctuated::Pair;
     use proc_macro2::TokenStream;
     use quote::{ToTokens, TokenStreamExt};
 
@@ -3106,11 +3118,11 @@
                 TokensOrDefault(&self.colon_token).to_tokens(tokens);
                 self.bounds.to_tokens(tokens);
             }
-            self.generics.where_clause.to_tokens(tokens);
             if let Some((eq_token, default)) = &self.default {
                 eq_token.to_tokens(tokens);
                 default.to_tokens(tokens);
             }
+            self.generics.where_clause.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
@@ -3171,9 +3183,9 @@
             self.type_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
-            self.generics.where_clause.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
+            self.generics.where_clause.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
@@ -3270,16 +3282,9 @@
             self.generics.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 let mut last_is_variadic = false;
-                for input in self.inputs.pairs() {
-                    match input {
-                        Pair::Punctuated(input, comma) => {
-                            maybe_variadic_to_tokens(input, tokens);
-                            comma.to_tokens(tokens);
-                        }
-                        Pair::End(input) => {
-                            last_is_variadic = maybe_variadic_to_tokens(input, tokens);
-                        }
-                    }
+                for pair in self.inputs.pairs() {
+                    last_is_variadic = maybe_variadic_to_tokens(pair.value(), tokens);
+                    pair.punct().to_tokens(tokens);
                 }
                 if self.variadic.is_some() && !last_is_variadic {
                     if !self.inputs.empty_or_trailing() {
diff --git a/src/lib.rs b/src/lib.rs
index b3efd15..e47ba28 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -2,7 +2,7 @@
 //!
 //! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
 //! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
-//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
 //!
 //! <br>
 //!
@@ -250,56 +250,42 @@
 //!   dynamic library libproc_macro from rustc toolchain.
 
 // Syn types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/syn/1.0.86")]
+#![doc(html_root_url = "https://docs.rs/syn/1.0.107")]
 #![cfg_attr(doc_cfg, feature(doc_cfg))]
 #![allow(non_camel_case_types)]
-// Ignored clippy lints.
 #![allow(
+    clippy::bool_to_int_with_if,
     clippy::cast_lossless,
-    clippy::collapsible_match, // https://github.com/rust-lang/rust-clippy/issues/7575
+    clippy::cast_possible_truncation,
+    clippy::cast_possible_wrap,
+    clippy::cast_ptr_alignment,
+    clippy::default_trait_access,
     clippy::doc_markdown,
-    clippy::eval_order_dependence,
+    clippy::expl_impl_clone_on_copy,
+    clippy::explicit_auto_deref,
+    clippy::if_not_else,
     clippy::inherent_to_string,
+    clippy::items_after_statements,
     clippy::large_enum_variant,
-    clippy::let_underscore_drop,
     clippy::manual_assert,
-    clippy::manual_map, // https://github.com/rust-lang/rust-clippy/issues/6795
     clippy::match_on_vec_items,
+    clippy::match_same_arms,
+    clippy::match_wildcard_for_single_variants, // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
+    clippy::missing_errors_doc,
     clippy::missing_panics_doc,
+    clippy::module_name_repetitions,
+    clippy::must_use_candidate,
     clippy::needless_doctest_main,
     clippy::needless_pass_by_value,
     clippy::never_loop,
-    clippy::return_self_not_must_use,
-    clippy::too_many_arguments,
-    clippy::trivially_copy_pass_by_ref,
-    clippy::unnecessary_unwrap,
-    // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6983
-    clippy::wrong_self_convention
-)]
-// Ignored clippy_pedantic lints.
-#![allow(
-    clippy::cast_possible_truncation,
-    // clippy bug: https://github.com/rust-lang/rust-clippy/issues/7127
-    clippy::cloned_instead_of_copied,
-    clippy::default_trait_access,
-    clippy::empty_enum,
-    clippy::expl_impl_clone_on_copy,
-    clippy::if_not_else,
-    // clippy bug: https://github.com/rust-lang/rust-clippy/issues/8285
-    clippy::iter_not_returning_iterator,
-    clippy::match_same_arms,
-    // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
-    clippy::match_wildcard_for_single_variants,
-    clippy::missing_errors_doc,
-    clippy::module_name_repetitions,
-    clippy::must_use_candidate,
-    clippy::option_if_let_else,
     clippy::redundant_else,
-    clippy::shadow_unrelated,
+    clippy::return_self_not_must_use,
     clippy::similar_names,
     clippy::single_match_else,
+    clippy::too_many_arguments,
     clippy::too_many_lines,
-    clippy::unseparated_literal_suffix,
+    clippy::trivially_copy_pass_by_ref,
+    clippy::unnecessary_unwrap,
     clippy::used_underscore_binding,
     clippy::wildcard_imports
 )]
@@ -310,7 +296,6 @@
 ))]
 extern crate proc_macro;
 extern crate proc_macro2;
-extern crate unicode_xid;
 
 #[cfg(feature = "printing")]
 extern crate quote;
@@ -318,11 +303,9 @@
 #[macro_use]
 mod macros;
 
-// Not public API.
 #[cfg(feature = "parsing")]
-#[doc(hidden)]
 #[macro_use]
-pub mod group;
+mod group;
 
 #[macro_use]
 pub mod token;
@@ -446,6 +429,7 @@
 #[cfg(feature = "parsing")]
 #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
 pub mod buffer;
+mod drops;
 #[cfg(feature = "parsing")]
 #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
 pub mod ext;
@@ -829,17 +813,6 @@
 #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 mod print;
 
-#[cfg(any(feature = "full", feature = "derive"))]
-use crate::__private::private;
-
-////////////////////////////////////////////////////////////////////////////////
-
-// https://github.com/rust-lang/rust/issues/62830
-#[cfg(feature = "parsing")]
-mod rustdoc_workaround {
-    pub use crate::parse::{self as parse_module};
-}
-
 ////////////////////////////////////////////////////////////////////////////////
 
 mod error;
diff --git a/src/lit.rs b/src/lit.rs
index bb61ee2..130b40e 100644
--- a/src/lit.rs
+++ b/src/lit.rs
@@ -224,7 +224,7 @@
 
         // Parse string literal into a token stream with every span equal to the
         // original literal's span.
-        let mut tokens = crate::parse_str(&self.value())?;
+        let mut tokens = TokenStream::from_str(&self.value())?;
         tokens = respan_token_stream(tokens, self.span());
 
         parser.parse2(tokens)
@@ -1480,7 +1480,7 @@
 
         let mut bytes = input.to_owned().into_bytes();
 
-        let start = (*bytes.get(0)? == b'-') as usize;
+        let start = (*bytes.first()? == b'-') as usize;
         match bytes.get(start)? {
             b'0'..=b'9' => {}
             _ => return None,
diff --git a/src/lookahead.rs b/src/lookahead.rs
index f0ed628..0cf5cf5a8 100644
--- a/src/lookahead.rs
+++ b/src/lookahead.rs
@@ -18,6 +18,9 @@
 /// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
 /// [`ParseStream::lookahead1`]: crate::parse::ParseBuffer::lookahead1
 ///
+/// Consuming tokens from the source stream after constructing a lookahead
+/// object does not also advance the lookahead object.
+///
 /// # Example
 ///
 /// ```
diff --git a/src/macros.rs b/src/macros.rs
index 5097da9..da10a1a 100644
--- a/src/macros.rs
+++ b/src/macros.rs
@@ -1,3 +1,7 @@
+#[cfg_attr(
+    not(any(feature = "full", feature = "derive")),
+    allow(unknown_lints, unused_macro_rules)
+)]
 macro_rules! ast_struct {
     (
         [$($attrs_pub:tt)*]
@@ -55,15 +59,6 @@
 macro_rules! ast_enum_of_structs {
     (
         $(#[$enum_attr:meta])*
-        $pub:ident $enum:ident $name:ident #$tag:ident $body:tt
-        $($remaining:tt)*
-    ) => {
-        ast_enum!($(#[$enum_attr])* $pub $enum $name #$tag $body);
-        ast_enum_of_structs_impl!($pub $enum $name $body $($remaining)*);
-    };
-
-    (
-        $(#[$enum_attr:meta])*
         $pub:ident $enum:ident $name:ident $body:tt
         $($remaining:tt)*
     ) => {
@@ -76,7 +71,8 @@
     (
         $pub:ident $enum:ident $name:ident {
             $(
-                $(#[$variant_attr:meta])*
+                $(#[cfg $cfg_attr:tt])*
+                $(#[doc $($doc_attr:tt)*])*
                 $variant:ident $( ($($member:ident)::+) )*,
             )*
         }
@@ -95,7 +91,13 @@
             $($remaining)*
             ()
             tokens
-            $name { $($variant $($($member)::+)*,)* }
+            $name {
+                $(
+                    $(#[cfg $cfg_attr])*
+                    $(#[doc $($doc_attr)*])*
+                    $variant $($($member)::+)*,
+                )*
+            }
         }
     };
 }
@@ -104,9 +106,6 @@
     // No From<TokenStream> for verbatim variants.
     ($name:ident::Verbatim, $member:ident) => {};
 
-    // No From<TokenStream> for private variants.
-    ($name:ident::$variant:ident, crate::private) => {};
-
     ($name:ident::$variant:ident, $member:ident) => {
         impl From<$member> for $name {
             fn from(e: $member) -> $name {
@@ -117,26 +116,37 @@
 }
 
 #[cfg(feature = "printing")]
+#[cfg_attr(
+    not(any(feature = "full", feature = "derive")),
+    allow(unknown_lints, unused_macro_rules)
+)]
 macro_rules! generate_to_tokens {
     (do_not_generate_to_tokens $($foo:tt)*) => ();
 
-    (($($arms:tt)*) $tokens:ident $name:ident { $variant:ident, $($next:tt)*}) => {
+    (
+        ($($arms:tt)*) $tokens:ident $name:ident {
+            $(#[cfg $cfg_attr:tt])*
+            $(#[doc $($doc_attr:tt)*])*
+            $variant:ident,
+            $($next:tt)*
+        }
+    ) => {
         generate_to_tokens!(
-            ($($arms)* $name::$variant => {})
+            ($($arms)* $(#[cfg $cfg_attr])* $name::$variant => {})
             $tokens $name { $($next)* }
         );
     };
 
-    (($($arms:tt)*) $tokens:ident $name:ident { $variant:ident $member:ident, $($next:tt)*}) => {
+    (
+        ($($arms:tt)*) $tokens:ident $name:ident {
+            $(#[cfg $cfg_attr:tt])*
+            $(#[doc $($doc_attr:tt)*])*
+            $variant:ident $member:ident,
+            $($next:tt)*
+        }
+    ) => {
         generate_to_tokens!(
-            ($($arms)* $name::$variant(_e) => _e.to_tokens($tokens),)
-            $tokens $name { $($next)* }
-        );
-    };
-
-    (($($arms:tt)*) $tokens:ident $name:ident { $variant:ident crate::private, $($next:tt)*}) => {
-        generate_to_tokens!(
-            ($($arms)* $name::$variant(_) => unreachable!(),)
+            ($($arms)* $(#[cfg $cfg_attr])* $name::$variant(_e) => _e.to_tokens($tokens),)
             $tokens $name { $($next)* }
         );
     };
@@ -162,7 +172,6 @@
 }
 
 macro_rules! check_keyword_matches {
-    (struct struct) => {};
     (enum enum) => {};
     (pub pub) => {};
 }
diff --git a/src/parse.rs b/src/parse.rs
index d85968b..bac4ca0 100644
--- a/src/parse.rs
+++ b/src/parse.rs
@@ -199,6 +199,8 @@
 use proc_macro2::{self, Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree};
 use std::cell::Cell;
 use std::fmt::{self, Debug, Display};
+#[cfg(feature = "extra-traits")]
+use std::hash::{Hash, Hasher};
 use std::marker::PhantomData;
 use std::mem;
 use std::ops::Deref;
@@ -245,6 +247,7 @@
 /// - One of [the `syn::parse*` functions][syn-parse]; or
 /// - A method of the [`Parser`] trait.
 ///
+/// [`parse_macro_input!`]: crate::parse_macro_input!
 /// [syn-parse]: self#the-synparse-functions
 pub struct ParseBuffer<'a> {
     scope: Span,
@@ -1215,7 +1218,6 @@
         }
     }
 
-    #[doc(hidden)]
     #[cfg(any(feature = "full", feature = "derive"))]
     fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
         let buf = TokenBuffer::new2(tokens);
@@ -1231,7 +1233,6 @@
         }
     }
 
-    #[doc(hidden)]
     #[cfg(any(feature = "full", feature = "derive"))]
     fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
         self(input)
@@ -1285,3 +1286,29 @@
         Ok(Nothing)
     }
 }
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
+impl Debug for Nothing {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.write_str("Nothing")
+    }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
+impl Eq for Nothing {}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for Nothing {
+    fn eq(&self, _other: &Self) -> bool {
+        true
+    }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
+impl Hash for Nothing {
+    fn hash<H: Hasher>(&self, _state: &mut H) {}
+}
diff --git a/src/parse_macro_input.rs b/src/parse_macro_input.rs
index 8e1a5ec..6163cd7 100644
--- a/src/parse_macro_input.rs
+++ b/src/parse_macro_input.rs
@@ -4,7 +4,7 @@
 /// Refer to the [`parse` module] documentation for more details about parsing
 /// in Syn.
 ///
-/// [`parse` module]: crate::rustdoc_workaround::parse_module
+/// [`parse` module]: mod@crate::parse
 ///
 /// <br>
 ///
@@ -51,7 +51,7 @@
 /// This macro can also be used with the [`Parser` trait] for types that have
 /// multiple ways that they can be parsed.
 ///
-/// [`Parser` trait]: crate::rustdoc_workaround::parse_module::Parser
+/// [`Parser` trait]: crate::parse::Parser
 ///
 /// ```
 /// # extern crate proc_macro;
diff --git a/src/pat.rs b/src/pat.rs
index bfabfe6..b279186 100644
--- a/src/pat.rs
+++ b/src/pat.rs
@@ -14,6 +14,7 @@
     ///
     /// [syntax tree enum]: Expr#syntax-tree-enums
     #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum Pat {
         /// A box pattern: `box v`.
         Box(PatBox),
@@ -72,18 +73,17 @@
         /// A pattern that matches any value: `_`.
         Wild(PatWild),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
         //
-        //     match expr {
-        //         Pat::Box(e) => {...}
-        //         Pat::Ident(e) => {...}
+        // For testing exhaustiveness in downstream code, use the following idiom:
+        //
+        //     match pat {
+        //         Pat::Box(pat) => {...}
+        //         Pat::Ident(pat) => {...}
         //         ...
-        //         Pat::Wild(e) => {...}
+        //         Pat::Wild(pat) => {...}
         //
-        //         #[cfg(test)]
-        //         Pat::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -91,12 +91,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, Pat will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -403,11 +400,11 @@
         }
 
         if input.peek(token::Brace) {
-            let pat = pat_struct(input, path)?;
+            let pat = pat_struct(begin.fork(), input, path)?;
             if qself.is_some() {
                 Ok(Pat::Verbatim(verbatim::between(begin, input)))
             } else {
-                Ok(Pat::Struct(pat))
+                Ok(pat)
             }
         } else if input.peek(token::Paren) {
             let pat = pat_tuple_struct(input, path)?;
@@ -468,13 +465,23 @@
         })
     }
 
-    fn pat_struct(input: ParseStream, path: Path) -> Result<PatStruct> {
+    fn pat_struct(begin: ParseBuffer, input: ParseStream, path: Path) -> Result<Pat> {
         let content;
         let brace_token = braced!(content in input);
 
         let mut fields = Punctuated::new();
-        while !content.is_empty() && !content.peek(Token![..]) {
-            let value = content.call(field_pat)?;
+        let mut dot2_token = None;
+        while !content.is_empty() {
+            let attrs = content.call(Attribute::parse_outer)?;
+            if content.peek(Token![..]) {
+                dot2_token = Some(content.parse()?);
+                if !attrs.is_empty() {
+                    return Ok(Pat::Verbatim(verbatim::between(begin, input)));
+                }
+                break;
+            }
+            let mut value = content.call(field_pat)?;
+            value.attrs = attrs;
             fields.push_value(value);
             if content.is_empty() {
                 break;
@@ -483,19 +490,13 @@
             fields.push_punct(punct);
         }
 
-        let dot2_token = if fields.empty_or_trailing() && content.peek(Token![..]) {
-            Some(content.parse()?)
-        } else {
-            None
-        };
-
-        Ok(PatStruct {
+        Ok(Pat::Struct(PatStruct {
             attrs: Vec::new(),
             path,
             brace_token,
             fields,
             dot2_token,
-        })
+        }))
     }
 
     impl Member {
@@ -508,7 +509,6 @@
     }
 
     fn field_pat(input: ParseStream) -> Result<FieldPat> {
-        let attrs = input.call(Attribute::parse_outer)?;
         let boxed: Option<Token![box]> = input.parse()?;
         let by_ref: Option<Token![ref]> = input.parse()?;
         let mutability: Option<Token![mut]> = input.parse()?;
@@ -518,7 +518,7 @@
             || member.is_unnamed()
         {
             return Ok(FieldPat {
-                attrs,
+                attrs: Vec::new(),
                 member,
                 colon_token: input.parse()?,
                 pat: Box::new(multi_pat_with_leading_vert(input)?),
@@ -547,7 +547,7 @@
         }
 
         Ok(FieldPat {
-            attrs,
+            attrs: Vec::new(),
             member: Member::Named(ident),
             colon_token: None,
             pat: Box::new(pat),
@@ -654,7 +654,7 @@
     fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
         if input.is_empty()
             || input.peek(Token![|])
-            || input.peek(Token![=>])
+            || input.peek(Token![=])
             || input.peek(Token![:]) && !input.peek(Token![::])
             || input.peek(Token![,])
             || input.peek(Token![;])
@@ -827,7 +827,7 @@
     impl ToTokens for PatPath {
         fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
-            private::print_path(tokens, &self.qself, &self.path);
+            path::printing::print_path(tokens, &self.qself, &self.path);
         }
     }
 
@@ -881,10 +881,7 @@
         fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.lo.to_tokens(tokens);
-            match &self.limits {
-                RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
-                RangeLimits::Closed(t) => t.to_tokens(tokens),
-            }
+            self.limits.to_tokens(tokens);
             self.hi.to_tokens(tokens);
         }
     }
diff --git a/src/path.rs b/src/path.rs
index 4867317..6cdb43a 100644
--- a/src/path.rs
+++ b/src/path.rs
@@ -89,9 +89,8 @@
         }
     }
 
-    #[cfg(feature = "parsing")]
-    fn is_none(&self) -> bool {
-        match *self {
+    pub fn is_none(&self) -> bool {
+        match self {
             PathArguments::None => true,
             PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
         }
@@ -109,16 +108,16 @@
         Lifetime(Lifetime),
         /// A type argument.
         Type(Type),
-        /// A binding (equality constraint) on an associated type: the `Item =
-        /// u8` in `Iterator<Item = u8>`.
-        Binding(Binding),
-        /// An associated type bound: `Iterator<Item: Display>`.
-        Constraint(Constraint),
         /// A const expression. Must be inside of a block.
         ///
         /// NOTE: Identity expressions are represented as Type arguments, as
         /// they are indistinguishable syntactically.
         Const(Expr),
+        /// A binding (equality constraint) on an associated type: the `Item =
+        /// u8` in `Iterator<Item = u8>`.
+        Binding(Binding),
+        /// An associated type bound: `Iterator<Item: Display>`.
+        Constraint(Constraint),
     }
 }
 
@@ -231,7 +230,38 @@
             }
 
             if input.peek(Ident) && input.peek2(Token![=]) {
-                return Ok(GenericArgument::Binding(input.parse()?));
+                let ident: Ident = input.parse()?;
+                let eq_token: Token![=] = input.parse()?;
+
+                let ty = if input.peek(Lit) {
+                    let begin = input.fork();
+                    input.parse::<Lit>()?;
+                    Type::Verbatim(verbatim::between(begin, input))
+                } else if input.peek(token::Brace) {
+                    let begin = input.fork();
+
+                    #[cfg(feature = "full")]
+                    {
+                        input.parse::<ExprBlock>()?;
+                    }
+
+                    #[cfg(not(feature = "full"))]
+                    {
+                        let content;
+                        braced!(content in input);
+                        content.parse::<Expr>()?;
+                    }
+
+                    Type::Verbatim(verbatim::between(begin, input))
+                } else {
+                    input.parse()?
+                };
+
+                return Ok(GenericArgument::Binding(Binding {
+                    ident,
+                    eq_token,
+                    ty,
+                }));
             }
 
             #[cfg(feature = "full")]
@@ -653,7 +683,7 @@
 }
 
 #[cfg(feature = "printing")]
-mod printing {
+pub(crate) mod printing {
     use super::*;
     use crate::print::TokensOrDefault;
     use proc_macro2::TokenStream;
@@ -698,8 +728,6 @@
             match self {
                 GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
                 GenericArgument::Type(ty) => ty.to_tokens(tokens),
-                GenericArgument::Binding(tb) => tb.to_tokens(tokens),
-                GenericArgument::Constraint(tc) => tc.to_tokens(tokens),
                 GenericArgument::Const(e) => match *e {
                     Expr::Lit(_) => e.to_tokens(tokens),
 
@@ -715,6 +743,8 @@
                         e.to_tokens(tokens);
                     }),
                 },
+                GenericArgument::Binding(tb) => tb.to_tokens(tokens),
+                GenericArgument::Constraint(tc) => tc.to_tokens(tokens),
             }
         }
     }
@@ -725,11 +755,8 @@
             self.colon2_token.to_tokens(tokens);
             self.lt_token.to_tokens(tokens);
 
-            // Print lifetimes before types and consts, all before bindings,
-            // regardless of their order in self.args.
-            //
-            // TODO: ordering rules for const arguments vs type arguments have
-            // not been settled yet. https://github.com/rust-lang/rust/issues/44580
+            // Print lifetimes before types/consts/bindings, regardless of their
+            // order in self.args.
             let mut trailing_or_empty = true;
             for param in self.args.pairs() {
                 match **param.value() {
@@ -738,37 +765,24 @@
                         trailing_or_empty = param.punct().is_some();
                     }
                     GenericArgument::Type(_)
-                    | GenericArgument::Binding(_)
-                    | GenericArgument::Constraint(_)
-                    | GenericArgument::Const(_) => {}
-                }
-            }
-            for param in self.args.pairs() {
-                match **param.value() {
-                    GenericArgument::Type(_) | GenericArgument::Const(_) => {
-                        if !trailing_or_empty {
-                            <Token![,]>::default().to_tokens(tokens);
-                        }
-                        param.to_tokens(tokens);
-                        trailing_or_empty = param.punct().is_some();
-                    }
-                    GenericArgument::Lifetime(_)
+                    | GenericArgument::Const(_)
                     | GenericArgument::Binding(_)
                     | GenericArgument::Constraint(_) => {}
                 }
             }
             for param in self.args.pairs() {
                 match **param.value() {
-                    GenericArgument::Binding(_) | GenericArgument::Constraint(_) => {
+                    GenericArgument::Type(_)
+                    | GenericArgument::Const(_)
+                    | GenericArgument::Binding(_)
+                    | GenericArgument::Constraint(_) => {
                         if !trailing_or_empty {
                             <Token![,]>::default().to_tokens(tokens);
                         }
                         param.to_tokens(tokens);
                         trailing_or_empty = param.punct().is_some();
                     }
-                    GenericArgument::Lifetime(_)
-                    | GenericArgument::Type(_)
-                    | GenericArgument::Const(_) => {}
+                    GenericArgument::Lifetime(_) => {}
                 }
             }
 
@@ -804,39 +818,37 @@
         }
     }
 
-    impl private {
-        pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) {
-            let qself = match qself {
-                Some(qself) => qself,
-                None => {
-                    path.to_tokens(tokens);
-                    return;
-                }
-            };
-            qself.lt_token.to_tokens(tokens);
-            qself.ty.to_tokens(tokens);
+    pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) {
+        let qself = match qself {
+            Some(qself) => qself,
+            None => {
+                path.to_tokens(tokens);
+                return;
+            }
+        };
+        qself.lt_token.to_tokens(tokens);
+        qself.ty.to_tokens(tokens);
 
-            let pos = cmp::min(qself.position, path.segments.len());
-            let mut segments = path.segments.pairs();
-            if pos > 0 {
-                TokensOrDefault(&qself.as_token).to_tokens(tokens);
-                path.leading_colon.to_tokens(tokens);
-                for (i, segment) in segments.by_ref().take(pos).enumerate() {
-                    if i + 1 == pos {
-                        segment.value().to_tokens(tokens);
-                        qself.gt_token.to_tokens(tokens);
-                        segment.punct().to_tokens(tokens);
-                    } else {
-                        segment.to_tokens(tokens);
-                    }
+        let pos = cmp::min(qself.position, path.segments.len());
+        let mut segments = path.segments.pairs();
+        if pos > 0 {
+            TokensOrDefault(&qself.as_token).to_tokens(tokens);
+            path.leading_colon.to_tokens(tokens);
+            for (i, segment) in segments.by_ref().take(pos).enumerate() {
+                if i + 1 == pos {
+                    segment.value().to_tokens(tokens);
+                    qself.gt_token.to_tokens(tokens);
+                    segment.punct().to_tokens(tokens);
+                } else {
+                    segment.to_tokens(tokens);
                 }
-            } else {
-                qself.gt_token.to_tokens(tokens);
-                path.leading_colon.to_tokens(tokens);
             }
-            for segment in segments {
-                segment.to_tokens(tokens);
-            }
+        } else {
+            qself.gt_token.to_tokens(tokens);
+            path.leading_colon.to_tokens(tokens);
+        }
+        for segment in segments {
+            segment.to_tokens(tokens);
         }
     }
 }
diff --git a/src/punctuated.rs b/src/punctuated.rs
index 5e9a34b..b7d0185 100644
--- a/src/punctuated.rs
+++ b/src/punctuated.rs
@@ -32,6 +32,7 @@
 use std::slice;
 use std::vec;
 
+use crate::drops::{NoDrop, TrivialDrop};
 #[cfg(feature = "parsing")]
 use crate::parse::{Parse, ParseStream, Result};
 #[cfg(feature = "parsing")]
@@ -104,10 +105,10 @@
     /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
     pub fn iter(&self) -> Iter<T> {
         Iter {
-            inner: Box::new(PrivateIter {
+            inner: Box::new(NoDrop::new(PrivateIter {
                 inner: self.inner.iter(),
                 last: self.last.as_ref().map(Box::as_ref).into_iter(),
-            }),
+            })),
         }
     }
 
@@ -115,10 +116,10 @@
     /// `&mut T`.
     pub fn iter_mut(&mut self) -> IterMut<T> {
         IterMut {
-            inner: Box::new(PrivateIterMut {
+            inner: Box::new(NoDrop::new(PrivateIterMut {
                 inner: self.inner.iter_mut(),
                 last: self.last.as_mut().map(Box::as_mut).into_iter(),
-            }),
+            })),
         }
     }
 
@@ -721,13 +722,13 @@
     // The `Item = &'a T` needs to be specified to support rustc 1.31 and older.
     // On modern compilers we would be able to write just IterTrait<'a, T> where
     // Item can be inferred unambiguously from the supertrait.
-    inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
+    inner: Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>,
 }
 
 trait IterTrait<'a, T: 'a>:
     DoubleEndedIterator<Item = &'a T> + ExactSizeIterator<Item = &'a T>
 {
-    fn clone_box(&self) -> Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>;
+    fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>;
 }
 
 struct PrivateIter<'a, T: 'a, P: 'a> {
@@ -735,10 +736,17 @@
     last: option::IntoIter<&'a T>,
 }
 
+impl<'a, T, P> TrivialDrop for PrivateIter<'a, T, P>
+where
+    slice::Iter<'a, (T, P)>: TrivialDrop,
+    option::IntoIter<&'a T>: TrivialDrop,
+{
+}
+
 #[cfg(any(feature = "full", feature = "derive"))]
 pub(crate) fn empty_punctuated_iter<'a, T>() -> Iter<'a, T> {
     Iter {
-        inner: Box::new(iter::empty()),
+        inner: Box::new(NoDrop::new(iter::empty())),
     }
 }
 
@@ -810,12 +818,17 @@
     }
 }
 
-impl<'a, T: 'a, I: 'a> IterTrait<'a, T> for I
+impl<'a, T, I> IterTrait<'a, T> for I
 where
-    I: DoubleEndedIterator<Item = &'a T> + ExactSizeIterator<Item = &'a T> + Clone,
+    T: 'a,
+    I: DoubleEndedIterator<Item = &'a T>
+        + ExactSizeIterator<Item = &'a T>
+        + Clone
+        + TrivialDrop
+        + 'a,
 {
-    fn clone_box(&self) -> Box<dyn IterTrait<'a, T, Item = &'a T> + 'a> {
-        Box::new(self.clone())
+    fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>> {
+        Box::new(NoDrop::new(self.clone()))
     }
 }
 
@@ -825,7 +838,7 @@
 ///
 /// [module documentation]: self
 pub struct IterMut<'a, T: 'a> {
-    inner: Box<dyn IterMutTrait<'a, T, Item = &'a mut T> + 'a>,
+    inner: Box<NoDrop<dyn IterMutTrait<'a, T, Item = &'a mut T> + 'a>>,
 }
 
 trait IterMutTrait<'a, T: 'a>:
@@ -838,10 +851,17 @@
     last: option::IntoIter<&'a mut T>,
 }
 
+impl<'a, T, P> TrivialDrop for PrivateIterMut<'a, T, P>
+where
+    slice::IterMut<'a, (T, P)>: TrivialDrop,
+    option::IntoIter<&'a mut T>: TrivialDrop,
+{
+}
+
 #[cfg(any(feature = "full", feature = "derive"))]
 pub(crate) fn empty_punctuated_iter_mut<'a, T>() -> IterMut<'a, T> {
     IterMut {
-        inner: Box::new(iter::empty()),
+        inner: Box::new(NoDrop::new(iter::empty())),
     }
 }
 
@@ -894,8 +914,10 @@
     }
 }
 
-impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
-    I: DoubleEndedIterator<Item = &'a mut T> + ExactSizeIterator<Item = &'a mut T>
+impl<'a, T, I> IterMutTrait<'a, T> for I
+where
+    T: 'a,
+    I: DoubleEndedIterator<Item = &'a mut T> + ExactSizeIterator<Item = &'a mut T> + 'a,
 {
 }
 
@@ -942,6 +964,31 @@
         }
     }
 
+    /// Mutably borrows the punctuation from this punctuated pair, unless the
+    /// pair is the final one and there is no trailing punctuation.
+    ///
+    /// # Example
+    ///
+    /// ```
+    /// # use proc_macro2::Span;
+    /// # use syn::punctuated::Punctuated;
+    /// # use syn::{parse_quote, Token, TypeParamBound};
+    /// #
+    /// # let mut punctuated = Punctuated::<TypeParamBound, Token![+]>::new();
+    /// # let span = Span::call_site();
+    /// #
+    /// punctuated.insert(0, parse_quote!('lifetime));
+    /// if let Some(punct) = punctuated.pairs_mut().next().unwrap().punct_mut() {
+    ///     punct.span = span;
+    /// }
+    /// ```
+    pub fn punct_mut(&mut self) -> Option<&mut P> {
+        match self {
+            Pair::Punctuated(_, p) => Some(p),
+            Pair::End(_) => None,
+        }
+    }
+
     /// Creates a punctuated pair out of a syntax tree node and an optional
     /// following punctuation.
     pub fn new(t: T, p: Option<P>) -> Self {
diff --git a/src/stmt.rs b/src/stmt.rs
index 3e2c71b..58bd013 100644
--- a/src/stmt.rs
+++ b/src/stmt.rs
@@ -175,7 +175,11 @@
             || input.peek(Token![crate]) && !input.peek2(Token![::])
             || input.peek(Token![extern])
             || input.peek(Token![use])
-            || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
+            || input.peek(Token![static])
+                && (input.peek2(Token![mut])
+                    || input.peek2(Ident)
+                        && !(input.peek2(Token![async])
+                            && (input.peek3(Token![move]) || input.peek3(Token![|]))))
             || input.peek(Token![const]) && !input.peek2(token::Brace)
             || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
             || input.peek(Token![async])
diff --git a/src/ty.rs b/src/ty.rs
index c38200f..8c841e2 100644
--- a/src/ty.rs
+++ b/src/ty.rs
@@ -14,6 +14,7 @@
     ///
     /// [syntax tree enum]: Expr#syntax-tree-enums
     #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
+    #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
     pub enum Type {
         /// A fixed size array type: `[T; n]`.
         Array(TypeArray),
@@ -53,7 +54,7 @@
         /// A dynamically sized slice type: `[T]`.
         Slice(TypeSlice),
 
-        /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+        /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a
         /// trait or a lifetime.
         TraitObject(TypeTraitObject),
 
@@ -63,18 +64,17 @@
         /// Tokens in type position not interpreted by Syn.
         Verbatim(TokenStream),
 
-        // The following is the only supported idiom for exhaustive matching of
-        // this enum.
+        // Not public API.
         //
-        //     match expr {
-        //         Type::Array(e) => {...}
-        //         Type::BareFn(e) => {...}
+        // For testing exhaustiveness in downstream code, use the following idiom:
+        //
+        //     match ty {
+        //         Type::Array(ty) => {...}
+        //         Type::BareFn(ty) => {...}
         //         ...
-        //         Type::Verbatim(e) => {...}
+        //         Type::Verbatim(ty) => {...}
         //
-        //         #[cfg(test)]
-        //         Type::__TestExhaustive(_) => unimplemented!(),
-        //         #[cfg(not(test))]
+        //         #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
         //         _ => { /* some sane fallback */ }
         //     }
         //
@@ -82,12 +82,9 @@
         // a variant. You will be notified by a test failure when a variant is
         // added, so that you can add code to handle it, but your library will
         // continue to compile and work for downstream users in the interim.
-        //
-        // Once `deny(reachable)` is available in rustc, Type will be
-        // reimplemented as a non_exhaustive enum.
-        // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
+        #[cfg(syn_no_non_exhaustive)]
         #[doc(hidden)]
-        __TestExhaustive(crate::private),
+        __NonExhaustive,
     }
 }
 
@@ -247,7 +244,7 @@
 }
 
 ast_struct! {
-    /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+    /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a
     /// trait or a lifetime.
     ///
     /// *This type is available only if Syn is built with the `"derive"` or
@@ -340,13 +337,14 @@
     use crate::ext::IdentExt;
     use crate::parse::{Parse, ParseStream, Result};
     use crate::path;
-    use proc_macro2::{Punct, Spacing, TokenTree};
+    use proc_macro2::{Punct, Spacing, Span, TokenTree};
 
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for Type {
         fn parse(input: ParseStream) -> Result<Self> {
             let allow_plus = true;
-            ambig_ty(input, allow_plus)
+            let allow_group_generic = true;
+            ambig_ty(input, allow_plus, allow_group_generic)
         }
     }
 
@@ -359,11 +357,16 @@
         #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
         pub fn without_plus(input: ParseStream) -> Result<Self> {
             let allow_plus = false;
-            ambig_ty(input, allow_plus)
+            let allow_group_generic = true;
+            ambig_ty(input, allow_plus, allow_group_generic)
         }
     }
 
-    fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+    pub(crate) fn ambig_ty(
+        input: ParseStream,
+        allow_plus: bool,
+        allow_group_generic: bool,
+    ) -> Result<Type> {
         let begin = input.fork();
 
         if input.peek(token::Group) {
@@ -384,7 +387,9 @@
                         path: Path::parse_helper(input, false)?,
                     }));
                 }
-            } else if input.peek(Token![<]) || input.peek(Token![::]) && input.peek3(Token![<]) {
+            } else if input.peek(Token![<]) && allow_group_generic
+                || input.peek(Token![::]) && input.peek3(Token![<])
+            {
                 if let Type::Path(mut ty) = *group.elem {
                     let arguments = &mut ty.path.segments.last_mut().unwrap().arguments;
                     if let PathArguments::None = arguments {
@@ -540,9 +545,19 @@
             || lookahead.peek(Token![::])
             || lookahead.peek(Token![<])
         {
-            if input.peek(Token![dyn]) {
-                let trait_object = TypeTraitObject::parse(input, allow_plus)?;
-                return Ok(Type::TraitObject(trait_object));
+            let dyn_token: Option<Token![dyn]> = input.parse()?;
+            if let Some(dyn_token) = dyn_token {
+                let dyn_span = dyn_token.span;
+                let star_token: Option<Token![*]> = input.parse()?;
+                let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?;
+                return Ok(if star_token.is_some() {
+                    Type::Verbatim(verbatim::between(begin, input))
+                } else {
+                    Type::TraitObject(TypeTraitObject {
+                        dyn_token: Some(dyn_token),
+                        bounds,
+                    })
+                });
             }
 
             let ty: TypePath = input.parse()?;
@@ -740,7 +755,10 @@
                         break;
                     }
 
-                    inputs.push_punct(args.parse()?);
+                    let comma = args.parse()?;
+                    if !has_mut_self {
+                        inputs.push_punct(comma);
+                    }
                 }
 
                 inputs
@@ -819,15 +837,28 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
     impl Parse for TypePath {
         fn parse(input: ParseStream) -> Result<Self> {
-            let (qself, mut path) = path::parsing::qpath(input, false)?;
+            let expr_style = false;
+            let (qself, mut path) = path::parsing::qpath(input, expr_style)?;
 
-            if path.segments.last().unwrap().arguments.is_empty()
+            while path.segments.last().unwrap().arguments.is_empty()
                 && (input.peek(token::Paren) || input.peek(Token![::]) && input.peek3(token::Paren))
             {
                 input.parse::<Option<Token![::]>>()?;
                 let args: ParenthesizedGenericArguments = input.parse()?;
+                let allow_associated_type = cfg!(feature = "full")
+                    && match &args.output {
+                        ReturnType::Default => true,
+                        ReturnType::Type(_, ty) => match **ty {
+                            // TODO: probably some of the other kinds allow this too.
+                            Type::Paren(_) => true,
+                            _ => false,
+                        },
+                    };
                 let parenthesized = PathArguments::Parenthesized(args);
                 path.segments.last_mut().unwrap().arguments = parenthesized;
+                if allow_associated_type {
+                    Path::parse_rest(input, &mut path, expr_style)?;
+                }
             }
 
             Ok(TypePath { qself, path })
@@ -844,7 +875,8 @@
         pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
             if input.peek(Token![->]) {
                 let arrow = input.parse()?;
-                let ty = ambig_ty(input, allow_plus)?;
+                let allow_group_generic = true;
+                let ty = ambig_ty(input, allow_plus, allow_group_generic)?;
                 Ok(ReturnType::Type(arrow, Box::new(ty)))
             } else {
                 Ok(ReturnType::Default)
@@ -868,15 +900,6 @@
         }
     }
 
-    fn at_least_one_type(bounds: &Punctuated<TypeParamBound, Token![+]>) -> bool {
-        for bound in bounds {
-            if let TypeParamBound::Trait(_) = *bound {
-                return true;
-            }
-        }
-        false
-    }
-
     impl TypeTraitObject {
         #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
         pub fn without_plus(input: ParseStream) -> Result<Self> {
@@ -886,35 +909,38 @@
 
         // Only allow multiple trait references if allow_plus is true.
         pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
-            Ok(TypeTraitObject {
-                dyn_token: input.parse()?,
-                bounds: Self::parse_bounds(input, allow_plus)?,
-            })
+            let dyn_token: Option<Token![dyn]> = input.parse()?;
+            let dyn_span = match &dyn_token {
+                Some(token) => token.span,
+                None => input.span(),
+            };
+            let bounds = Self::parse_bounds(dyn_span, input, allow_plus)?;
+            Ok(TypeTraitObject { dyn_token, bounds })
         }
 
         fn parse_bounds(
+            dyn_span: Span,
             input: ParseStream,
             allow_plus: bool,
         ) -> Result<Punctuated<TypeParamBound, Token![+]>> {
-            let mut bounds = Punctuated::new();
-            loop {
-                bounds.push_value(input.parse()?);
-                if !(allow_plus && input.peek(Token![+])) {
-                    break;
-                }
-                bounds.push_punct(input.parse()?);
-                if !(input.peek(Ident::peek_any)
-                    || input.peek(Token![::])
-                    || input.peek(Token![?])
-                    || input.peek(Lifetime)
-                    || input.peek(token::Paren))
-                {
-                    break;
+            let bounds = TypeParamBound::parse_multiple(input, allow_plus)?;
+            let mut last_lifetime_span = None;
+            let mut at_least_one_trait = false;
+            for bound in &bounds {
+                match bound {
+                    TypeParamBound::Trait(_) => {
+                        at_least_one_trait = true;
+                        break;
+                    }
+                    TypeParamBound::Lifetime(lifetime) => {
+                        last_lifetime_span = Some(lifetime.ident.span());
+                    }
                 }
             }
             // Just lifetimes like `'a + 'b` is not a TraitObject.
-            if !at_least_one_type(&bounds) {
-                return Err(input.error("expected at least one type"));
+            if !at_least_one_trait {
+                let msg = "at least one trait is required for an object type";
+                return Err(error::new2(dyn_span, last_lifetime_span.unwrap(), msg));
             }
             Ok(bounds)
         }
@@ -936,10 +962,30 @@
         }
 
         pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
-            Ok(TypeImplTrait {
-                impl_token: input.parse()?,
-                bounds: TypeTraitObject::parse_bounds(input, allow_plus)?,
-            })
+            let impl_token: Token![impl] = input.parse()?;
+            let bounds = TypeParamBound::parse_multiple(input, allow_plus)?;
+            let mut last_lifetime_span = None;
+            let mut at_least_one_trait = false;
+            for bound in &bounds {
+                match bound {
+                    TypeParamBound::Trait(_) => {
+                        at_least_one_trait = true;
+                        break;
+                    }
+                    TypeParamBound::Lifetime(lifetime) => {
+                        last_lifetime_span = Some(lifetime.ident.span());
+                    }
+                }
+            }
+            if !at_least_one_trait {
+                let msg = "at least one trait must be specified";
+                return Err(error::new2(
+                    impl_token.span,
+                    last_lifetime_span.unwrap(),
+                    msg,
+                ));
+            }
+            Ok(TypeImplTrait { impl_token, bounds })
         }
     }
 
@@ -967,7 +1013,10 @@
             let content;
             Ok(TypeParen {
                 paren_token: parenthesized!(content in input),
-                elem: Box::new(ambig_ty(&content, allow_plus)?),
+                elem: Box::new({
+                    let allow_group_generic = true;
+                    ambig_ty(&content, allow_plus, allow_group_generic)?
+                }),
             })
         }
     }
@@ -1160,7 +1209,7 @@
     #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
     impl ToTokens for TypePath {
         fn to_tokens(&self, tokens: &mut TokenStream) {
-            private::print_path(tokens, &self.qself, &self.path);
+            path::printing::print_path(tokens, &self.qself, &self.path);
         }
     }
 
diff --git a/src/verbatim.rs b/src/verbatim.rs
index 0686352..58cf68d 100644
--- a/src/verbatim.rs
+++ b/src/verbatim.rs
@@ -1,13 +1,31 @@
 use crate::parse::{ParseBuffer, ParseStream};
-use proc_macro2::TokenStream;
+use proc_macro2::{Delimiter, TokenStream};
+use std::cmp::Ordering;
 use std::iter;
 
 pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
     let end = end.cursor();
     let mut cursor = begin.cursor();
+    assert!(crate::buffer::same_buffer(end, cursor));
+
     let mut tokens = TokenStream::new();
     while cursor != end {
         let (tt, next) = cursor.token_tree().unwrap();
+
+        if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less {
+            // A syntax node can cross the boundary of a None-delimited group
+            // due to such groups being transparent to the parser in most cases.
+            // Any time this occurs the group is known to be semantically
+            // irrelevant. https://github.com/dtolnay/syn/issues/1235
+            if let Some((inside, _span, after)) = cursor.group(Delimiter::None) {
+                assert!(next == after);
+                cursor = inside;
+                continue;
+            } else {
+                panic!("verbatim end must not be inside a delimited group");
+            }
+        }
+
         tokens.extend(iter::once(tt));
         cursor = next;
     }
diff --git a/tests/.gitignore b/tests/.gitignore
deleted file mode 100644
index 291ed43..0000000
--- a/tests/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/*.pending-snap
diff --git a/tests/common/eq.rs b/tests/common/eq.rs
index 5e3dd44..41d6d41 100644
--- a/tests/common/eq.rs
+++ b/tests/common/eq.rs
@@ -1,35 +1,146 @@
+#![allow(unused_macro_rules)]
+
 extern crate rustc_ast;
 extern crate rustc_data_structures;
 extern crate rustc_span;
+extern crate thin_vec;
 
-use rustc_ast::ast::{
-    AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocConstraint, AssocConstraintKind,
-    AssocItemKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy, BinOpKind,
-    BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
-    Defaultness, EnumDef, Expr, ExprField, ExprKind, Extern, FieldDef, FloatTy, Fn, FnDecl,
-    FnHeader, FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
-    GenericParam, GenericParamKind, Generics, Impl, ImplPolarity, Inline, InlineAsm,
-    InlineAsmOperand, InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy,
-    IsAuto, Item, ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, Local,
-    LocalKind, MacArgs, MacCall, MacCallStmt, MacDelimiter, MacStmtStyle, MacroDef, ModKind,
-    Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs, Pat, PatField, PatKind, Path,
-    PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt, StmtKind, StrLit,
-    StrStyle, StructExpr, StructRest, Term, Trait, TraitBoundModifier, TraitObjectSyntax, TraitRef,
-    Ty, TyAlias, TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant,
-    VariantData, Visibility, VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate,
-    WherePredicate, WhereRegionPredicate,
-};
+use rustc_ast::ast::AngleBracketedArg;
+use rustc_ast::ast::AngleBracketedArgs;
+use rustc_ast::ast::AnonConst;
+use rustc_ast::ast::Arm;
+use rustc_ast::ast::AssocConstraint;
+use rustc_ast::ast::AssocConstraintKind;
+use rustc_ast::ast::AssocItemKind;
+use rustc_ast::ast::Async;
+use rustc_ast::ast::AttrArgs;
+use rustc_ast::ast::AttrArgsEq;
+use rustc_ast::ast::AttrId;
+use rustc_ast::ast::AttrItem;
+use rustc_ast::ast::AttrKind;
+use rustc_ast::ast::AttrStyle;
+use rustc_ast::ast::Attribute;
+use rustc_ast::ast::BareFnTy;
+use rustc_ast::ast::BinOpKind;
+use rustc_ast::ast::BindingAnnotation;
+use rustc_ast::ast::Block;
+use rustc_ast::ast::BlockCheckMode;
+use rustc_ast::ast::BorrowKind;
+use rustc_ast::ast::ByRef;
+use rustc_ast::ast::CaptureBy;
+use rustc_ast::ast::Closure;
+use rustc_ast::ast::ClosureBinder;
+use rustc_ast::ast::Const;
+use rustc_ast::ast::Crate;
+use rustc_ast::ast::Defaultness;
+use rustc_ast::ast::DelimArgs;
+use rustc_ast::ast::EnumDef;
+use rustc_ast::ast::Expr;
+use rustc_ast::ast::ExprField;
+use rustc_ast::ast::ExprKind;
+use rustc_ast::ast::Extern;
+use rustc_ast::ast::FieldDef;
+use rustc_ast::ast::FloatTy;
+use rustc_ast::ast::Fn;
+use rustc_ast::ast::FnDecl;
+use rustc_ast::ast::FnHeader;
+use rustc_ast::ast::FnRetTy;
+use rustc_ast::ast::FnSig;
+use rustc_ast::ast::ForeignItemKind;
+use rustc_ast::ast::ForeignMod;
+use rustc_ast::ast::GenericArg;
+use rustc_ast::ast::GenericArgs;
+use rustc_ast::ast::GenericBound;
+use rustc_ast::ast::GenericParam;
+use rustc_ast::ast::GenericParamKind;
+use rustc_ast::ast::Generics;
+use rustc_ast::ast::Impl;
+use rustc_ast::ast::ImplPolarity;
+use rustc_ast::ast::Inline;
+use rustc_ast::ast::InlineAsm;
+use rustc_ast::ast::InlineAsmOperand;
+use rustc_ast::ast::InlineAsmOptions;
+use rustc_ast::ast::InlineAsmRegOrRegClass;
+use rustc_ast::ast::InlineAsmSym;
+use rustc_ast::ast::InlineAsmTemplatePiece;
+use rustc_ast::ast::IntTy;
+use rustc_ast::ast::IsAuto;
+use rustc_ast::ast::Item;
+use rustc_ast::ast::ItemKind;
+use rustc_ast::ast::Label;
+use rustc_ast::ast::Lifetime;
+use rustc_ast::ast::LitFloatType;
+use rustc_ast::ast::LitIntType;
+use rustc_ast::ast::LitKind;
+use rustc_ast::ast::Local;
+use rustc_ast::ast::LocalKind;
+use rustc_ast::ast::MacCall;
+use rustc_ast::ast::MacCallStmt;
+use rustc_ast::ast::MacDelimiter;
+use rustc_ast::ast::MacStmtStyle;
+use rustc_ast::ast::MacroDef;
+use rustc_ast::ast::MetaItemLit;
+use rustc_ast::ast::MethodCall;
+use rustc_ast::ast::ModKind;
+use rustc_ast::ast::ModSpans;
+use rustc_ast::ast::Movability;
+use rustc_ast::ast::MutTy;
+use rustc_ast::ast::Mutability;
+use rustc_ast::ast::NodeId;
+use rustc_ast::ast::NormalAttr;
+use rustc_ast::ast::Param;
+use rustc_ast::ast::ParenthesizedArgs;
+use rustc_ast::ast::Pat;
+use rustc_ast::ast::PatField;
+use rustc_ast::ast::PatKind;
+use rustc_ast::ast::Path;
+use rustc_ast::ast::PathSegment;
+use rustc_ast::ast::PolyTraitRef;
+use rustc_ast::ast::QSelf;
+use rustc_ast::ast::RangeEnd;
+use rustc_ast::ast::RangeLimits;
+use rustc_ast::ast::RangeSyntax;
+use rustc_ast::ast::Stmt;
+use rustc_ast::ast::StmtKind;
+use rustc_ast::ast::StrLit;
+use rustc_ast::ast::StrStyle;
+use rustc_ast::ast::StructExpr;
+use rustc_ast::ast::StructRest;
+use rustc_ast::ast::Term;
+use rustc_ast::ast::Trait;
+use rustc_ast::ast::TraitBoundModifier;
+use rustc_ast::ast::TraitObjectSyntax;
+use rustc_ast::ast::TraitRef;
+use rustc_ast::ast::Ty;
+use rustc_ast::ast::TyAlias;
+use rustc_ast::ast::TyAliasWhereClause;
+use rustc_ast::ast::TyKind;
+use rustc_ast::ast::UintTy;
+use rustc_ast::ast::UnOp;
+use rustc_ast::ast::Unsafe;
+use rustc_ast::ast::UnsafeSource;
+use rustc_ast::ast::UseTree;
+use rustc_ast::ast::UseTreeKind;
+use rustc_ast::ast::Variant;
+use rustc_ast::ast::VariantData;
+use rustc_ast::ast::Visibility;
+use rustc_ast::ast::VisibilityKind;
+use rustc_ast::ast::WhereBoundPredicate;
+use rustc_ast::ast::WhereClause;
+use rustc_ast::ast::WhereEqPredicate;
+use rustc_ast::ast::WherePredicate;
+use rustc_ast::ast::WhereRegionPredicate;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, CommentKind, DelimToken, Nonterminal, Token, TokenKind};
+use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind};
 use rustc_ast::tokenstream::{
-    AttrAnnotatedTokenStream, AttrAnnotatedTokenTree, AttributesData, DelimSpan, LazyTokenStream,
-    Spacing, TokenStream, TokenTree,
+    AttrTokenStream, AttrTokenTree, AttributesData, DelimSpan, LazyAttrTokenStream, Spacing,
+    TokenStream, TokenTree,
 };
 use rustc_data_structures::sync::Lrc;
-use rustc_data_structures::thin_vec::ThinVec;
 use rustc_span::source_map::Spanned;
 use rustc_span::symbol::{sym, Ident};
 use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP};
+use thin_vec::ThinVec;
 
 pub trait SpanlessEq {
     fn eq(&self, other: &Self) -> bool;
@@ -41,7 +152,7 @@
     }
 }
 
-impl<T: SpanlessEq> SpanlessEq for P<T> {
+impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> {
     fn eq(&self, other: &Self) -> bool {
         SpanlessEq::eq(&**self, &**other)
     }
@@ -141,7 +252,7 @@
 spanless_eq_partial_eq!(String);
 spanless_eq_partial_eq!(Symbol);
 spanless_eq_partial_eq!(CommentKind);
-spanless_eq_partial_eq!(DelimToken);
+spanless_eq_partial_eq!(Delimiter);
 spanless_eq_partial_eq!(InlineAsmOptions);
 spanless_eq_partial_eq!(token::LitKind);
 
@@ -296,13 +407,16 @@
 spanless_eq_struct!(AnonConst; id value);
 spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
 spanless_eq_struct!(AssocConstraint; id ident gen_args kind span);
-spanless_eq_struct!(AttrAnnotatedTokenStream; 0);
 spanless_eq_struct!(AttrItem; path args tokens);
+spanless_eq_struct!(AttrTokenStream; 0);
 spanless_eq_struct!(Attribute; kind id style span);
 spanless_eq_struct!(AttributesData; attrs tokens);
-spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
+spanless_eq_struct!(BindingAnnotation; 0 1);
 spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
-spanless_eq_struct!(Crate; attrs items span id is_placeholder);
+spanless_eq_struct!(Closure; binder capture_clause asyncness movability fn_decl body !fn_decl_span);
+spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
+spanless_eq_struct!(DelimArgs; dspan delim tokens);
 spanless_eq_struct!(EnumDef; variants);
 spanless_eq_struct!(Expr; id kind span attrs !tokens);
 spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
@@ -312,19 +426,24 @@
 spanless_eq_struct!(Fn; defaultness generics sig body);
 spanless_eq_struct!(FnSig; header decl span);
 spanless_eq_struct!(ForeignMod; unsafety abi items);
-spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span);
 spanless_eq_struct!(Generics; params where_clause span);
 spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items);
 spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis options line_spans);
+spanless_eq_struct!(InlineAsmSym; id qself path);
 spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
 spanless_eq_struct!(Label; ident);
 spanless_eq_struct!(Lifetime; id ident);
-spanless_eq_struct!(Lit; token kind span);
+spanless_eq_struct!(Lit; kind symbol suffix);
 spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
 spanless_eq_struct!(MacCall; path args prior_type_ascription);
 spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
 spanless_eq_struct!(MacroDef; body macro_rules);
+spanless_eq_struct!(MetaItemLit; token_lit kind span);
+spanless_eq_struct!(MethodCall; seg receiver args !span);
+spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
 spanless_eq_struct!(MutTy; ty mutbl);
+spanless_eq_struct!(NormalAttr; item tokens);
 spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
 spanless_eq_struct!(Pat; id kind span tokens);
 spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
@@ -339,30 +458,32 @@
 spanless_eq_struct!(Trait; unsafety is_auto generics bounds items);
 spanless_eq_struct!(TraitRef; path ref_id);
 spanless_eq_struct!(Ty; id kind span tokens);
-spanless_eq_struct!(TyAlias; defaultness generics bounds ty);
+spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty);
+spanless_eq_struct!(TyAliasWhereClause; !0 1);
 spanless_eq_struct!(UseTree; prefix kind span);
 spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder);
 spanless_eq_struct!(Visibility; kind span tokens);
 spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
 spanless_eq_struct!(WhereClause; has_where_token predicates span);
-spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
 spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
-spanless_eq_struct!(token::Lit; kind symbol suffix);
 spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
-spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) TyAlias(0) MacCall(0));
+spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) Type(0) MacCall(0));
 spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
 spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
-spanless_eq_enum!(AttrAnnotatedTokenTree; Token(0) Delimited(0 1 2) Attributes(0));
+spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1));
+spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0));
 spanless_eq_enum!(AttrStyle; Outer Inner);
+spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2) Attributes(0));
 spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
-spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
 spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
 spanless_eq_enum!(BorrowKind; Ref Raw);
+spanless_eq_enum!(ByRef; Yes No);
 spanless_eq_enum!(CaptureBy; Value Ref);
+spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
 spanless_eq_enum!(Const; Yes(0) No);
-spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
 spanless_eq_enum!(Defaultness; Default(0) Final);
-spanless_eq_enum!(Extern; None Implicit Explicit(0));
+spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
 spanless_eq_enum!(FloatTy; F32 F64);
 spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
 spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
@@ -379,7 +500,6 @@
 spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
 spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
 spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
-spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
 spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
 spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
 spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
@@ -391,32 +511,33 @@
 spanless_eq_enum!(StrStyle; Cooked Raw(0));
 spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
 spanless_eq_enum!(Term; Ty(0) Const(0));
-spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2));
 spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
-spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None);
 spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
 spanless_eq_enum!(UnOp; Deref Not Neg);
 spanless_eq_enum!(Unsafe; Yes(0) No);
 spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
-spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob);
 spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
-spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
 spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
 spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
-    MethodCall(0 1 2) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
-    Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1)
-    Closure(0 1 2 3 4 5) Block(0 1) Async(0 1 2) Await(0) TryBlock(0)
-    Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2)
-    Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0)
-    MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
+    MethodCall(0) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
+    Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1 2) Match(0 1)
+    Closure(0) Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2)
+    AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2) Path(0 1)
+    AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0) MacCall(0)
+    Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) IncludedBytes(0)
+    Err);
 spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
     InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
-    Sym(expr));
+    Sym(sym));
 spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
     Fn(0) Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
     Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
 spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
-    Float(0 1) Bool(0) Err(0));
+    Float(0 1) Bool(0) Err);
 spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
     Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
     Paren(0) MacCall(0));
@@ -500,10 +621,10 @@
                 None => return false,
                 Some(tree) => tree,
             };
-            if SpanlessEq::eq(&this, &other) {
+            if SpanlessEq::eq(this, other) {
                 continue;
             }
-            if let (TokenTree::Token(this), TokenTree::Token(other)) = (this, other) {
+            if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) {
                 if match (&this.kind, &other.kind) {
                     (TokenKind::Literal(this), TokenKind::Literal(other)) => {
                         SpanlessEq::eq(this, other)
@@ -524,86 +645,135 @@
     }
 }
 
-fn doc_comment(
+fn doc_comment<'a>(
     style: AttrStyle,
     unescaped: Symbol,
-    trees: &mut impl Iterator<Item = TokenTree>,
+    trees: &mut impl Iterator<Item = &'a TokenTree>,
 ) -> bool {
     if match style {
         AttrStyle::Outer => false,
         AttrStyle::Inner => true,
     } {
         match trees.next() {
-            Some(TokenTree::Token(Token {
-                kind: TokenKind::Not,
-                span: _,
-            })) => {}
+            Some(TokenTree::Token(
+                Token {
+                    kind: TokenKind::Not,
+                    span: _,
+                },
+                _spacing,
+            )) => {}
             _ => return false,
         }
     }
     let stream = match trees.next() {
-        Some(TokenTree::Delimited(_span, DelimToken::Bracket, stream)) => stream,
+        Some(TokenTree::Delimited(_span, Delimiter::Bracket, stream)) => stream,
         _ => return false,
     };
     let mut trees = stream.trees();
     match trees.next() {
-        Some(TokenTree::Token(Token {
-            kind: TokenKind::Ident(symbol, false),
-            span: _,
-        })) if symbol == sym::doc => {}
+        Some(TokenTree::Token(
+            Token {
+                kind: TokenKind::Ident(symbol, false),
+                span: _,
+            },
+            _spacing,
+        )) if *symbol == sym::doc => {}
         _ => return false,
     }
     match trees.next() {
-        Some(TokenTree::Token(Token {
-            kind: TokenKind::Eq,
-            span: _,
-        })) => {}
+        Some(TokenTree::Token(
+            Token {
+                kind: TokenKind::Eq,
+                span: _,
+            },
+            _spacing,
+        )) => {}
         _ => return false,
     }
     match trees.next() {
-        Some(TokenTree::Token(token)) => {
-            is_escaped_literal(&token, unescaped) && trees.next().is_none()
+        Some(TokenTree::Token(token, _spacing)) => {
+            is_escaped_literal_token(token, unescaped) && trees.next().is_none()
         }
         _ => false,
     }
 }
 
-fn is_escaped_literal(token: &Token, unescaped: Symbol) -> bool {
-    match match token {
+fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
+    match token {
         Token {
             kind: TokenKind::Literal(lit),
             span: _,
-        } => Lit::from_lit_token(*lit, DUMMY_SP),
+        } => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) {
+            Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped),
+            Err(_) => false,
+        },
         Token {
             kind: TokenKind::Interpolated(nonterminal),
             span: _,
         } => match nonterminal.as_ref() {
             Nonterminal::NtExpr(expr) => match &expr.kind {
-                ExprKind::Lit(lit) => Ok(lit.clone()),
-                _ => return false,
+                ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
+                _ => false,
             },
-            _ => return false,
+            _ => false,
         },
-        _ => return false,
-    } {
-        Ok(Lit {
-            token:
-                token::Lit {
-                    kind: token::LitKind::Str,
-                    symbol: _,
-                    suffix: None,
-                },
-            kind: LitKind::Str(symbol, StrStyle::Cooked),
-            span: _,
-        }) => symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', ""),
         _ => false,
     }
 }
 
-impl SpanlessEq for LazyTokenStream {
+fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool {
+    match value {
+        AttrArgsEq::Ast(expr) => match &expr.kind {
+            ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
+            _ => false,
+        },
+        AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped),
+    }
+}
+
+fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool {
+    match lit {
+        MetaItemLit {
+            token_lit:
+                Lit {
+                    kind: token::LitKind::Str,
+                    symbol: _,
+                    suffix: None,
+                },
+            kind,
+            span: _,
+        } => is_escaped_lit_kind(kind, unescaped),
+        _ => false,
+    }
+}
+
+fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool {
+    match lit {
+        Lit {
+            kind: token::LitKind::Str,
+            symbol: _,
+            suffix: None,
+        } => match LitKind::from_token_lit(*lit) {
+            Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped),
+            _ => false,
+        },
+        _ => false,
+    }
+}
+
+fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool {
+    match kind {
+        LitKind::Str(symbol, StrStyle::Cooked) => {
+            symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', "")
+        }
+        _ => false,
+    }
+}
+
+impl SpanlessEq for LazyAttrTokenStream {
     fn eq(&self, other: &Self) -> bool {
-        let this = self.create_token_stream();
-        let other = other.create_token_stream();
+        let this = self.to_attr_token_stream();
+        let other = other.to_attr_token_stream();
         SpanlessEq::eq(&this, &other)
     }
 }
@@ -611,24 +781,26 @@
 impl SpanlessEq for AttrKind {
     fn eq(&self, other: &Self) -> bool {
         match (self, other) {
-            (AttrKind::Normal(item, tokens), AttrKind::Normal(item2, tokens2)) => {
-                SpanlessEq::eq(item, item2) && SpanlessEq::eq(tokens, tokens2)
+            (AttrKind::Normal(normal), AttrKind::Normal(normal2)) => {
+                SpanlessEq::eq(normal, normal2)
             }
             (AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
                 SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
             }
-            (AttrKind::DocComment(kind, unescaped), AttrKind::Normal(item2, _tokens)) => {
+            (AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2)) => {
                 match kind {
                     CommentKind::Line | CommentKind::Block => {}
                 }
                 let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
-                SpanlessEq::eq(&path, &item2.path)
-                    && match &item2.args {
-                        MacArgs::Empty | MacArgs::Delimited(..) => false,
-                        MacArgs::Eq(_span, token) => is_escaped_literal(token, *unescaped),
+                SpanlessEq::eq(&path, &normal2.item.path)
+                    && match &normal2.item.args {
+                        AttrArgs::Empty | AttrArgs::Delimited(_) => false,
+                        AttrArgs::Eq(_span, value) => {
+                            is_escaped_literal_attr_args(value, *unescaped)
+                        }
                     }
             }
-            (AttrKind::Normal(..), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
+            (AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
         }
     }
 }
diff --git a/tests/debug/gen.rs b/tests/debug/gen.rs
index a49ee6c..cfd63d1 100644
--- a/tests/debug/gen.rs
+++ b/tests/debug/gen.rs
@@ -2215,6 +2215,13 @@
                 formatter.write_str(")")?;
                 Ok(())
             }
+            syn::GenericArgument::Const(_val) => {
+                formatter.write_str("Const")?;
+                formatter.write_str("(")?;
+                Debug::fmt(Lite(_val), formatter)?;
+                formatter.write_str(")")?;
+                Ok(())
+            }
             syn::GenericArgument::Binding(_val) => {
                 formatter.write_str("Binding")?;
                 formatter.write_str("(")?;
@@ -2229,13 +2236,6 @@
                 formatter.write_str(")")?;
                 Ok(())
             }
-            syn::GenericArgument::Const(_val) => {
-                formatter.write_str("Const")?;
-                formatter.write_str("(")?;
-                Debug::fmt(Lite(_val), formatter)?;
-                formatter.write_str(")")?;
-                Ok(())
-            }
         }
     }
 }
diff --git a/tests/debug/mod.rs b/tests/debug/mod.rs
index 05de612..0a0991a 100644
--- a/tests/debug/mod.rs
+++ b/tests/debug/mod.rs
@@ -87,7 +87,7 @@
     Lite<T>: Debug,
 {
     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-        Debug::fmt(Lite(&*self.value), formatter)
+        Debug::fmt(Lite(self.value), formatter)
     }
 }
 
diff --git a/tests/macros/mod.rs b/tests/macros/mod.rs
index 86cdb5f..5ca88b0 100644
--- a/tests/macros/mod.rs
+++ b/tests/macros/mod.rs
@@ -1,9 +1,10 @@
+#![allow(unused_macros, unused_macro_rules)]
+
 #[path = "../debug/mod.rs"]
 pub mod debug;
 
 use syn::parse::{Parse, Result};
 
-#[macro_export]
 macro_rules! errorf {
     ($($tt:tt)*) => {{
         use ::std::io::Write;
@@ -12,7 +13,6 @@
     }};
 }
 
-#[macro_export]
 macro_rules! punctuated {
     ($($e:expr,)+) => {{
         let mut seq = ::syn::punctuated::Punctuated::new();
@@ -27,14 +27,12 @@
     };
 }
 
-#[macro_export]
 macro_rules! snapshot {
     ($($args:tt)*) => {
         snapshot_impl!(() $($args)*)
     };
 }
 
-#[macro_export]
 macro_rules! snapshot_impl {
     (($expr:ident) as $t:ty, @$snapshot:literal) => {
         let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
diff --git a/tests/regression.rs b/tests/regression.rs
index 8311a91..fb2b25c 100644
--- a/tests/regression.rs
+++ b/tests/regression.rs
@@ -1,5 +1,3 @@
-#![allow(clippy::let_underscore_drop)]
-
 mod regression {
     automod::dir!("tests/regression");
 }
diff --git a/tests/regression/issue1108.rs b/tests/regression/issue1108.rs
index 11a82ad..4fd30c0 100644
--- a/tests/regression/issue1108.rs
+++ b/tests/regression/issue1108.rs
@@ -1,5 +1,5 @@
 #[test]
 fn issue1108() {
     let data = "impl<x<>>::x for";
-    let _ = syn::parse_file(data);
+    _ = syn::parse_file(data);
 }
diff --git a/tests/regression/issue1235.rs b/tests/regression/issue1235.rs
new file mode 100644
index 0000000..8836030
--- /dev/null
+++ b/tests/regression/issue1235.rs
@@ -0,0 +1,32 @@
+use proc_macro2::{Delimiter, Group};
+use quote::quote;
+
+#[test]
+fn main() {
+    // Okay. Rustc allows top-level `static` with no value syntactically, but
+    // not semantically. Syn parses as Item::Verbatim.
+    let tokens = quote! {
+        pub static FOO: usize;
+        pub static BAR: usize;
+    };
+    let file = syn::parse2::<syn::File>(tokens).unwrap();
+    println!("{:#?}", file);
+
+    // Okay.
+    let inner = Group::new(
+        Delimiter::None,
+        quote!(static FOO: usize = 0; pub static BAR: usize = 0),
+    );
+    let tokens = quote!(pub #inner;);
+    let file = syn::parse2::<syn::File>(tokens).unwrap();
+    println!("{:#?}", file);
+
+    // Formerly parser crash.
+    let inner = Group::new(
+        Delimiter::None,
+        quote!(static FOO: usize; pub static BAR: usize),
+    );
+    let tokens = quote!(pub #inner;);
+    let file = syn::parse2::<syn::File>(tokens).unwrap();
+    println!("{:#?}", file);
+}
diff --git a/tests/repo/mod.rs b/tests/repo/mod.rs
index 79e3018..8418b87 100644
--- a/tests/repo/mod.rs
+++ b/tests/repo/mod.rs
@@ -10,10 +10,10 @@
 use tar::Archive;
 use walkdir::DirEntry;
 
-const REVISION: &str = "5e57faa78aa7661c6000204591558f6665f11abc";
+const REVISION: &str = "98ad6a5519651af36e246c0335c964dd52c554ba";
 
 #[rustfmt::skip]
-static EXCLUDE: &[&str] = &[
+static EXCLUDE_FILES: &[&str] = &[
     // TODO: impl ~const T {}
     // https://github.com/dtolnay/syn/issues/1051
     "src/test/ui/rfc-2632-const-trait-impl/syntax.rs",
@@ -22,6 +22,9 @@
     "src/test/ui/const-generics/early/closing-args-token.rs",
     "src/test/ui/const-generics/early/const-expression-parameter.rs",
 
+    // Need at least one trait in impl Trait, no such type as impl 'static
+    "src/test/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
+
     // Deprecated anonymous parameter syntax in traits
     "src/test/ui/issues/issue-13105.rs",
     "src/test/ui/issues/issue-13775.rs",
@@ -30,10 +33,32 @@
     "src/tools/rustfmt/tests/source/trait.rs",
     "src/tools/rustfmt/tests/target/trait.rs",
 
+    // Various extensions to Rust syntax made up by rust-analyzer
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs",
+    "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs",
+    "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs",
+    "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs",
+
+    // Placeholder syntax for "throw expressions"
+    "src/test/pretty/yeet-expr.rs",
+    "src/test/ui/try-trait/yeet-for-option.rs",
+    "src/test/ui/try-trait/yeet-for-result.rs",
+
     // Excessive nesting
     "src/test/ui/issues/issue-74564-if-expr-stack-overflow.rs",
 
-    // Testing rustfmt on invalid syntax
+    // Testing tools on invalid syntax
+    "src/test/run-make/translation/test.rs",
+    "src/test/ui/generics/issue-94432-garbage-ice.rs",
     "src/tools/rustfmt/tests/coverage/target/comments.rs",
     "src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
     "src/tools/rustfmt/tests/parser/issue_4418.rs",
@@ -46,6 +71,9 @@
     "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
     "src/tools/rustfmt/tests/target/type.rs",
 
+    // Generated file containing a top-level expression, used with `include!`
+    "compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
+
     // Clippy lint lists represented as expressions
     "src/tools/clippy/clippy_lints/src/lib.deprecated.rs",
     "src/tools/clippy/clippy_lints/src/lib.register_all.rs",
@@ -62,9 +90,6 @@
     "src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs",
 
     // Not actually test cases
-    "src/test/rustdoc-ui/test-compile-fail2.rs",
-    "src/test/rustdoc-ui/test-compile-fail3.rs",
-    "src/test/ui/json-bom-plus-crlf-multifile-aux.rs",
     "src/test/ui/lint/expansion-time-include.rs",
     "src/test/ui/macros/auxiliary/macro-comma-support.rs",
     "src/test/ui/macros/auxiliary/macro-include-items-expr.rs",
@@ -73,38 +98,52 @@
     "src/test/ui/parser/issues/auxiliary/issue-21146-inc.rs",
 ];
 
+#[rustfmt::skip]
+static EXCLUDE_DIRS: &[&str] = &[
+    // Inputs that intentionally do not parse
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/err",
+    "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err",
+
+    // Inputs that lex but do not necessarily parse
+    "src/tools/rust-analyzer/crates/parser/test_data/lexer",
+
+    // Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse
+    "src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures",
+    "src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures",
+];
+
 pub fn base_dir_filter(entry: &DirEntry) -> bool {
     let path = entry.path();
-    if path.is_dir() {
-        return true; // otherwise walkdir does not visit the files
-    }
-    if path.extension().map_or(true, |e| e != "rs") {
-        return false;
-    }
 
     let mut path_string = path.to_string_lossy();
     if cfg!(windows) {
         path_string = path_string.replace('\\', "/").into();
     }
-    let path = if let Some(path) = path_string.strip_prefix("tests/rust/") {
+    let path_string = if path_string == "tests/rust" {
+        return true;
+    } else if let Some(path) = path_string.strip_prefix("tests/rust/") {
         path
     } else {
         panic!("unexpected path in Rust dist: {}", path_string);
     };
 
-    if path.starts_with("src/test/compile-fail") || path.starts_with("src/test/rustfix") {
+    if path.is_dir() {
+        return !EXCLUDE_DIRS.contains(&path_string);
+    }
+
+    if path.extension().map_or(true, |e| e != "rs") {
         return false;
     }
 
-    if path.starts_with("src/test/ui") {
-        let stderr_path = entry.path().with_extension("stderr");
+    if path_string.starts_with("src/test/ui") || path_string.starts_with("src/test/rustdoc-ui") {
+        let stderr_path = path.with_extension("stderr");
         if stderr_path.exists() {
             // Expected to fail in some way
             return false;
         }
     }
 
-    !EXCLUDE.contains(&path)
+    !EXCLUDE_FILES.contains(&path_string)
 }
 
 #[allow(dead_code)]
@@ -126,12 +165,19 @@
     }
     let mut missing = String::new();
     let test_src = Path::new("tests/rust");
-    for exclude in EXCLUDE {
-        if !test_src.join(exclude).exists() {
+    for exclude in EXCLUDE_FILES {
+        if !test_src.join(exclude).is_file() {
             missing += "\ntests/rust/";
             missing += exclude;
         }
     }
+    for exclude in EXCLUDE_DIRS {
+        if !test_src.join(exclude).is_dir() {
+            missing += "\ntests/rust/";
+            missing += exclude;
+            missing += "/";
+        }
+    }
     if !missing.is_empty() {
         panic!("excluded test file does not exist:{}\n", missing);
     }
@@ -142,7 +188,7 @@
         "https://github.com/rust-lang/rust/archive/{}.tar.gz",
         REVISION
     );
-    let response = reqwest::blocking::get(&url)?.error_for_status()?;
+    let response = reqwest::blocking::get(url)?.error_for_status()?;
     let progress = Progress::new(response);
     let decoder = GzDecoder::new(progress);
     let mut archive = Archive::new(decoder);
diff --git a/tests/test_derive_input.rs b/tests/test_derive_input.rs
index 93634e5..1eff011 100644
--- a/tests/test_derive_input.rs
+++ b/tests/test_derive_input.rs
@@ -1,4 +1,4 @@
-#![allow(clippy::too_many_lines)]
+#![allow(clippy::assertions_on_result_states, clippy::too_many_lines)]
 
 #[macro_use]
 mod macros;
diff --git a/tests/test_expr.rs b/tests/test_expr.rs
index 8c1cc73..e5b151f 100644
--- a/tests/test_expr.rs
+++ b/tests/test_expr.rs
@@ -53,36 +53,37 @@
 #[rustfmt::skip]
 #[test]
 fn test_tuple_multi_index() {
+    let expected = snapshot!("tuple.0.0" as Expr, @r###"
+    Expr::Field {
+        base: Expr::Field {
+            base: Expr::Path {
+                path: Path {
+                    segments: [
+                        PathSegment {
+                            ident: "tuple",
+                            arguments: None,
+                        },
+                    ],
+                },
+            },
+            member: Unnamed(Index {
+                index: 0,
+            }),
+        },
+        member: Unnamed(Index {
+            index: 0,
+        }),
+    }
+    "###);
+
     for &input in &[
-        "tuple.0.0",
         "tuple .0.0",
         "tuple. 0.0",
         "tuple.0 .0",
         "tuple.0. 0",
         "tuple . 0 . 0",
     ] {
-        snapshot!(input as Expr, @r###"
-        Expr::Field {
-            base: Expr::Field {
-                base: Expr::Path {
-                    path: Path {
-                        segments: [
-                            PathSegment {
-                                ident: "tuple",
-                                arguments: None,
-                            },
-                        ],
-                    },
-                },
-                member: Unnamed(Index {
-                    index: 0,
-                }),
-            },
-            member: Unnamed(Index {
-                index: 0,
-            }),
-        }
-        "###);
+        assert_eq!(expected, syn::parse_str(input).unwrap());
     }
 
     for tokens in vec![
@@ -93,28 +94,7 @@
         quote!(tuple.0. 0),
         quote!(tuple . 0 . 0),
     ] {
-        snapshot!(tokens as Expr, @r###"
-        Expr::Field {
-            base: Expr::Field {
-                base: Expr::Path {
-                    path: Path {
-                        segments: [
-                            PathSegment {
-                                ident: "tuple",
-                                arguments: None,
-                            },
-                        ],
-                    },
-                },
-                member: Unnamed(Index {
-                    index: 0,
-                }),
-            },
-            member: Unnamed(Index {
-                index: 0,
-            }),
-        }
-        "###);
+        assert_eq!(expected, syn::parse2(tokens).unwrap());
     }
 }
 
@@ -318,3 +298,9 @@
     }
     "###);
 }
+
+#[test]
+fn test_postfix_operator_after_cast() {
+    syn::parse_str::<Expr>("|| &x as T[0]").unwrap_err();
+    syn::parse_str::<Expr>("|| () as ()()").unwrap_err();
+}
diff --git a/tests/test_iterators.rs b/tests/test_iterators.rs
index 2c8359c..0ab0fb9 100644
--- a/tests/test_iterators.rs
+++ b/tests/test_iterators.rs
@@ -47,3 +47,22 @@
     assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
     assert_eq!(p.into_iter().next_back(), Some(4));
 }
+
+#[test]
+fn may_dangle() {
+    let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+    for element in &p {
+        if *element == 2 {
+            drop(p);
+            break;
+        }
+    }
+
+    let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+    for element in &mut p {
+        if *element == 2 {
+            drop(p);
+            break;
+        }
+    }
+}
diff --git a/tests/test_parse_stream.rs b/tests/test_parse_stream.rs
index 76bd065..cc14fa0 100644
--- a/tests/test_parse_stream.rs
+++ b/tests/test_parse_stream.rs
@@ -4,9 +4,9 @@
 
 #[test]
 fn test_peek() {
-    let _ = |input: ParseStream| {
-        let _ = input.peek(Ident);
-        let _ = input.peek(Ident::peek_any);
-        let _ = input.peek(Token![::]);
+    _ = |input: ParseStream| {
+        _ = input.peek(Ident);
+        _ = input.peek(Ident::peek_any);
+        _ = input.peek(Token![::]);
     };
 }
diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs
index 7f8128e..dbcd74f 100644
--- a/tests/test_precedence.rs
+++ b/tests/test_precedence.rs
@@ -24,6 +24,7 @@
 extern crate rustc_ast;
 extern crate rustc_data_structures;
 extern crate rustc_span;
+extern crate thin_vec;
 
 use crate::common::eq::SpanlessEq;
 use crate::common::parse;
@@ -196,7 +197,7 @@
 }
 
 /// Wrap every expression which is not already wrapped in parens with parens, to
-/// reveal the precidence of the parsed expressions, and produce a stringified
+/// reveal the precedence of the parsed expressions, and produce a stringified
 /// form of the resulting expression.
 ///
 /// This method operates on librustc objects.
@@ -207,10 +208,10 @@
     };
     use rustc_ast::mut_visit::{noop_visit_generic_arg, noop_visit_local, MutVisitor};
     use rustc_data_structures::map_in_place::MapInPlace;
-    use rustc_data_structures::thin_vec::ThinVec;
     use rustc_span::DUMMY_SP;
     use std::mem;
     use std::ops::DerefMut;
+    use thin_vec::ThinVec;
 
     struct BracketsVisitor {
         failed: bool,
@@ -243,7 +244,7 @@
     }
 
     fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
-        use rustc_ast::mut_visit::{noop_visit_expr, visit_thin_attrs};
+        use rustc_ast::mut_visit::{noop_visit_expr, visit_attrs};
         match &mut e.kind {
             ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
             ExprKind::Struct(expr) => {
@@ -261,7 +262,7 @@
                 }
                 vis.visit_id(&mut e.id);
                 vis.visit_span(&mut e.span);
-                visit_thin_attrs(&mut e.attrs, vis);
+                visit_attrs(&mut e.attrs, vis);
             }
             _ => noop_visit_expr(e, vis),
         }
@@ -322,15 +323,15 @@
         // types yet. We'll look into comparing those in the future. For now
         // focus on expressions appearing in other places.
         fn visit_pat(&mut self, pat: &mut P<Pat>) {
-            let _ = pat;
+            _ = pat;
         }
 
         fn visit_ty(&mut self, ty: &mut P<Ty>) {
-            let _ = ty;
+            _ = ty;
         }
 
         fn visit_attribute(&mut self, attr: &mut Attribute) {
-            let _ = attr;
+            _ = attr;
         }
     }
 
@@ -426,7 +427,7 @@
 fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
     use syn::fold::Fold;
     use syn::punctuated::Punctuated;
-    use syn::{token, Expr, ExprTuple};
+    use syn::{token, ConstParam, Expr, ExprTuple, Path};
 
     struct CollectExprs(Vec<Expr>);
     impl Fold for CollectExprs {
@@ -442,6 +443,15 @@
                 paren_token: token::Paren::default(),
             })
         }
+
+        fn fold_path(&mut self, path: Path) -> Path {
+            // Skip traversing into const generic path arguments
+            path
+        }
+
+        fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam {
+            const_param
+        }
     }
 
     let mut folder = CollectExprs(vec![]);
diff --git a/tests/test_round_trip.rs b/tests/test_round_trip.rs
index 6c1cf22..9a5801d 100644
--- a/tests/test_round_trip.rs
+++ b/tests/test_round_trip.rs
@@ -5,6 +5,8 @@
 #![allow(clippy::manual_assert)]
 
 extern crate rustc_ast;
+extern crate rustc_data_structures;
+extern crate rustc_error_messages;
 extern crate rustc_errors;
 extern crate rustc_expand;
 extern crate rustc_parse as parse;
@@ -19,7 +21,8 @@
     WhereClause,
 };
 use rustc_ast::mut_visit::{self, MutVisitor};
-use rustc_errors::PResult;
+use rustc_error_messages::{DiagnosticMessage, LazyFallbackBundle};
+use rustc_errors::{translation, Diagnostic, PResult};
 use rustc_session::parse::ParseSess;
 use rustc_span::source_map::FilePathMapping;
 use rustc_span::FileName;
@@ -93,20 +96,13 @@
             let sess = ParseSess::new(FilePathMapping::empty());
             let before = match librustc_parse(content, &sess) {
                 Ok(before) => before,
-                Err(mut diagnostic) => {
+                Err(diagnostic) => {
+                    errorf!(
+                        "=== {}: ignore - librustc failed to parse original content: {}\n",
+                        path.display(),
+                        translate_message(&diagnostic),
+                    );
                     diagnostic.cancel();
-                    if diagnostic
-                        .message()
-                        .starts_with("file not found for module")
-                    {
-                        errorf!("=== {}: ignore\n", path.display());
-                    } else {
-                        errorf!(
-                            "=== {}: ignore - librustc failed to parse original content: {}\n",
-                            path.display(),
-                            diagnostic.message(),
-                        );
-                    }
                     return Err(true);
                 }
             };
@@ -162,6 +158,42 @@
     parse::parse_crate_from_source_str(name, content, sess)
 }
 
+fn translate_message(diagnostic: &Diagnostic) -> String {
+    thread_local! {
+        static FLUENT_BUNDLE: LazyFallbackBundle = {
+            let resources = rustc_error_messages::DEFAULT_LOCALE_RESOURCES;
+            let with_directionality_markers = false;
+            rustc_error_messages::fallback_fluent_bundle(resources, with_directionality_markers)
+        };
+    }
+
+    let message = &diagnostic.message[0].0;
+    let args = translation::to_fluent_args(diagnostic.args());
+
+    let (identifier, attr) = match message {
+        DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => return msg.clone(),
+        DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
+    };
+
+    FLUENT_BUNDLE.with(|fluent_bundle| {
+        let message = fluent_bundle
+            .get_message(identifier)
+            .expect("missing diagnostic in fluent bundle");
+        let value = match attr {
+            Some(attr) => message
+                .get_attribute(attr)
+                .expect("missing attribute in fluent message")
+                .value(),
+            None => message.value().expect("missing value in fluent message"),
+        };
+
+        let mut err = Vec::new();
+        let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err);
+        assert!(err.is_empty());
+        translated.into_owned()
+    })
+}
+
 fn normalize(krate: &mut Crate) {
     struct NormalizeVisitor;
 
diff --git a/tests/test_size.rs b/tests/test_size.rs
index e172df2..32c6eda 100644
--- a/tests/test_size.rs
+++ b/tests/test_size.rs
@@ -5,17 +5,17 @@
 
 #[test]
 fn test_expr_size() {
-    assert_eq!(mem::size_of::<Expr>(), 280);
+    assert_eq!(mem::size_of::<Expr>(), 272);
 }
 
 #[test]
 fn test_item_size() {
-    assert_eq!(mem::size_of::<Item>(), 344);
+    assert_eq!(mem::size_of::<Item>(), 320);
 }
 
 #[test]
 fn test_type_size() {
-    assert_eq!(mem::size_of::<Type>(), 304);
+    assert_eq!(mem::size_of::<Type>(), 288);
 }
 
 #[test]
@@ -25,5 +25,5 @@
 
 #[test]
 fn test_lit_size() {
-    assert_eq!(mem::size_of::<Lit>(), 40);
+    assert_eq!(mem::size_of::<Lit>(), 32);
 }
diff --git a/tests/test_stmt.rs b/tests/test_stmt.rs
index dbe1c76..f444e5b 100644
--- a/tests/test_stmt.rs
+++ b/tests/test_stmt.rs
@@ -1,9 +1,10 @@
-#![allow(clippy::non_ascii_literal)]
+#![allow(clippy::assertions_on_result_states, clippy::non_ascii_literal)]
 
 #[macro_use]
 mod macros;
 
 use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+use quote::quote;
 use std::iter::FromIterator;
 use syn::Stmt;
 
@@ -74,3 +75,19 @@
     })
     "###);
 }
+
+#[test]
+fn test_let_dot_dot() {
+    let tokens = quote! {
+        let .. = 10;
+    };
+
+    snapshot!(tokens as Stmt, @r###"
+    Local(Local {
+        pat: Pat::Rest,
+        init: Some(Expr::Lit {
+            lit: 10,
+        }),
+    })
+    "###);
+}
diff --git a/tests/test_ty.rs b/tests/test_ty.rs
index a03abaa..335cafa 100644
--- a/tests/test_ty.rs
+++ b/tests/test_ty.rs
@@ -9,6 +9,7 @@
 #[test]
 fn test_mut_self() {
     syn::parse_str::<Type>("fn(mut self)").unwrap();
+    syn::parse_str::<Type>("fn(mut self,)").unwrap();
     syn::parse_str::<Type>("fn(mut self: ())").unwrap();
     syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
     syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();