35277 lines
1.1 MiB
35277 lines
1.1 MiB
|
|
# HG changeset patch
|
|
# User Emilio Cobos Álvarez <emilio@crisal.io>
|
|
# Date 1599584448 0
|
|
# Node ID 85c38ea4d34969797eb5d24265cd90cc6841e6ae
|
|
# Parent 5aa243a2fe9d77578dd95ce3ab3a2aa6c1e92604
|
|
Bug 1663715 - Update syn and proc-macro2 so that Firefox can build on Rust nightly again. r=froydnj, a=RyanVM
|
|
|
|
Generated with:
|
|
|
|
cargo update -p syn --precise 1.0.40
|
|
./mach vendor rust
|
|
|
|
Rust issue: https://github.com/rust-lang/rust/issues/76482
|
|
|
|
Differential Revision: https://phabricator.services.mozilla.com/D89473
|
|
|
|
diff --git a/Cargo.lock b/Cargo.lock
|
|
--- a/Cargo.lock
|
|
+++ b/Cargo.lock
|
|
@@ -3712,19 +3712,19 @@ checksum = "ecd45702f76d6d3c75a80564378a
|
|
dependencies = [
|
|
"proc-macro2",
|
|
"quote",
|
|
"syn",
|
|
]
|
|
|
|
[[package]]
|
|
name = "proc-macro2"
|
|
-version = "1.0.5"
|
|
-source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
|
|
+version = "1.0.20"
|
|
+source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
+checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
|
|
dependencies = [
|
|
"unicode-xid",
|
|
]
|
|
|
|
[[package]]
|
|
name = "procedural-masquerade"
|
|
version = "0.1.1"
|
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
@@ -4642,19 +4642,19 @@ dependencies = [
|
|
"cc",
|
|
"gleam",
|
|
"glsl-to-cxx",
|
|
"webrender_build",
|
|
]
|
|
|
|
[[package]]
|
|
name = "syn"
|
|
-version = "1.0.5"
|
|
-source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
|
|
+version = "1.0.40"
|
|
+source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
+checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
|
|
dependencies = [
|
|
"proc-macro2",
|
|
"quote",
|
|
"unicode-xid",
|
|
]
|
|
|
|
[[package]]
|
|
name = "sync-guid"
|
|
diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
|
|
--- a/third_party/rust/proc-macro2/.cargo-checksum.json
|
|
+++ b/third_party/rust/proc-macro2/.cargo-checksum.json
|
|
@@ -1,1 +1,1 @@
|
|
-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
|
|
\ No newline at end of file
|
|
+{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}
|
|
\ No newline at end of file
|
|
diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
|
|
--- a/third_party/rust/proc-macro2/Cargo.toml
|
|
+++ b/third_party/rust/proc-macro2/Cargo.toml
|
|
@@ -8,36 +8,35 @@
|
|
# If you believe there's an error in this file please file an
|
|
# issue against the rust-lang/cargo repository. If you're
|
|
# editing this file be aware that the upstream Cargo.toml
|
|
# will likely look very different (and much more reasonable)
|
|
|
|
[package]
|
|
edition = "2018"
|
|
name = "proc-macro2"
|
|
-version = "1.0.5"
|
|
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
|
-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
|
|
-homepage = "https://github.com/alexcrichton/proc-macro2"
|
|
+version = "1.0.20"
|
|
+authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
|
|
+description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
|
|
documentation = "https://docs.rs/proc-macro2"
|
|
readme = "README.md"
|
|
keywords = ["macros"]
|
|
+categories = ["development-tools::procedural-macro-helpers"]
|
|
license = "MIT OR Apache-2.0"
|
|
repository = "https://github.com/alexcrichton/proc-macro2"
|
|
[package.metadata.docs.rs]
|
|
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
|
|
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
|
|
+targets = ["x86_64-unknown-linux-gnu"]
|
|
|
|
-[lib]
|
|
-name = "proc_macro2"
|
|
+[package.metadata.playground]
|
|
+features = ["span-locations"]
|
|
[dependencies.unicode-xid]
|
|
version = "0.2"
|
|
[dev-dependencies.quote]
|
|
version = "1.0"
|
|
default_features = false
|
|
|
|
[features]
|
|
default = ["proc-macro"]
|
|
nightly = []
|
|
proc-macro = []
|
|
span-locations = []
|
|
-[badges.travis-ci]
|
|
-repository = "alexcrichton/proc-macro2"
|
|
diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
|
|
--- a/third_party/rust/proc-macro2/README.md
|
|
+++ b/third_party/rust/proc-macro2/README.md
|
|
@@ -1,11 +1,11 @@
|
|
# proc-macro2
|
|
|
|
-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
|
|
+[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
|
|
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
|
|
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
|
|
|
|
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
|
|
This library serves two purposes:
|
|
|
|
- **Bring proc-macro-like functionality to other contexts like build.rs and
|
|
main.rs.** Types from `proc_macro` are entirely specific to procedural macros
|
|
diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
|
|
--- a/third_party/rust/proc-macro2/build.rs
|
|
+++ b/third_party/rust/proc-macro2/build.rs
|
|
@@ -9,16 +9,20 @@
|
|
// "wrap_proc_macro"
|
|
// Wrap types from libproc_macro rather than polyfilling the whole API.
|
|
// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
|
|
// because we can't emulate the unstable API without emulating everything
|
|
// else. Also enabled unconditionally on nightly, in which case the
|
|
// procmacro2_semver_exempt surface area is implemented by using the
|
|
// nightly-only proc_macro API.
|
|
//
|
|
+// "hygiene"
|
|
+// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
|
|
+// and Span::located_at. Enabled on Rust 1.45+.
|
|
+//
|
|
// "proc_macro_span"
|
|
// Enable non-dummy behavior of Span::start and Span::end methods which
|
|
// requires an unstable compiler feature. Enabled when building with
|
|
// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
|
|
// features.
|
|
//
|
|
// "super_unstable"
|
|
// Implement the semver exempt API in terms of the nightly-only proc_macro
|
|
@@ -52,16 +56,24 @@ fn main() {
|
|
// https://github.com/alexcrichton/proc-macro2/issues/147
|
|
println!("cargo:rustc-cfg=procmacro2_semver_exempt");
|
|
}
|
|
|
|
if semver_exempt || cfg!(feature = "span-locations") {
|
|
println!("cargo:rustc-cfg=span_locations");
|
|
}
|
|
|
|
+ if version.minor < 39 {
|
|
+ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
|
|
+ }
|
|
+
|
|
+ if version.minor >= 45 {
|
|
+ println!("cargo:rustc-cfg=hygiene");
|
|
+ }
|
|
+
|
|
let target = env::var("TARGET").unwrap();
|
|
if !enable_use_proc_macro(&target) {
|
|
return;
|
|
}
|
|
|
|
println!("cargo:rustc-cfg=use_proc_macro");
|
|
|
|
if version.nightly || !semver_exempt {
|
|
diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/proc-macro2/src/detection.rs
|
|
@@ -0,0 +1,67 @@
|
|
+use std::panic::{self, PanicInfo};
|
|
+use std::sync::atomic::*;
|
|
+use std::sync::Once;
|
|
+
|
|
+static WORKS: AtomicUsize = AtomicUsize::new(0);
|
|
+static INIT: Once = Once::new();
|
|
+
|
|
+pub(crate) fn inside_proc_macro() -> bool {
|
|
+ match WORKS.load(Ordering::SeqCst) {
|
|
+ 1 => return false,
|
|
+ 2 => return true,
|
|
+ _ => {}
|
|
+ }
|
|
+
|
|
+ INIT.call_once(initialize);
|
|
+ inside_proc_macro()
|
|
+}
|
|
+
|
|
+pub(crate) fn force_fallback() {
|
|
+ WORKS.store(1, Ordering::SeqCst);
|
|
+}
|
|
+
|
|
+pub(crate) fn unforce_fallback() {
|
|
+ initialize();
|
|
+}
|
|
+
|
|
+// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
|
+// then use catch_unwind to determine whether the compiler's proc_macro is
|
|
+// working. When proc-macro2 is used from outside of a procedural macro all
|
|
+// of the proc_macro crate's APIs currently panic.
|
|
+//
|
|
+// The Once is to prevent the possibility of this ordering:
|
|
+//
|
|
+// thread 1 calls take_hook, gets the user's original hook
|
|
+// thread 1 calls set_hook with the null hook
|
|
+// thread 2 calls take_hook, thinks null hook is the original hook
|
|
+// thread 2 calls set_hook with the null hook
|
|
+// thread 1 calls set_hook with the actual original hook
|
|
+// thread 2 calls set_hook with what it thinks is the original hook
|
|
+//
|
|
+// in which the user's hook has been lost.
|
|
+//
|
|
+// There is still a race condition where a panic in a different thread can
|
|
+// happen during the interval that the user's original panic hook is
|
|
+// unregistered such that their hook is incorrectly not called. This is
|
|
+// sufficiently unlikely and less bad than printing panic messages to stderr
|
|
+// on correct use of this crate. Maybe there is a libstd feature request
|
|
+// here. For now, if a user needs to guarantee that this failure mode does
|
|
+// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
|
+// the main thread before launching any other threads.
|
|
+fn initialize() {
|
|
+ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
|
+
|
|
+ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
|
+ let sanity_check = &*null_hook as *const PanicHook;
|
|
+ let original_hook = panic::take_hook();
|
|
+ panic::set_hook(null_hook);
|
|
+
|
|
+ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
|
|
+ WORKS.store(works as usize + 1, Ordering::SeqCst);
|
|
+
|
|
+ let hopefully_null_hook = panic::take_hook();
|
|
+ panic::set_hook(original_hook);
|
|
+ if sanity_check != &*hopefully_null_hook {
|
|
+ panic!("observed race condition in proc_macro2::inside_proc_macro");
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
|
|
--- a/third_party/rust/proc-macro2/src/fallback.rs
|
|
+++ b/third_party/rust/proc-macro2/src/fallback.rs
|
|
@@ -1,41 +1,121 @@
|
|
+use crate::parse::{token_stream, Cursor};
|
|
+use crate::{Delimiter, Spacing, TokenTree};
|
|
#[cfg(span_locations)]
|
|
use std::cell::RefCell;
|
|
#[cfg(span_locations)]
|
|
use std::cmp;
|
|
-use std::fmt;
|
|
-use std::iter;
|
|
+use std::fmt::{self, Debug, Display};
|
|
+use std::iter::FromIterator;
|
|
+use std::mem;
|
|
use std::ops::RangeBounds;
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
use std::path::Path;
|
|
use std::path::PathBuf;
|
|
use std::str::FromStr;
|
|
use std::vec;
|
|
-
|
|
-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
|
|
-use crate::{Delimiter, Punct, Spacing, TokenTree};
|
|
use unicode_xid::UnicodeXID;
|
|
|
|
+/// Force use of proc-macro2's fallback implementation of the API for now, even
|
|
+/// if the compiler's implementation is available.
|
|
+pub fn force() {
|
|
+ #[cfg(wrap_proc_macro)]
|
|
+ crate::detection::force_fallback();
|
|
+}
|
|
+
|
|
+/// Resume using the compiler's implementation of the proc macro API if it is
|
|
+/// available.
|
|
+pub fn unforce() {
|
|
+ #[cfg(wrap_proc_macro)]
|
|
+ crate::detection::unforce_fallback();
|
|
+}
|
|
+
|
|
#[derive(Clone)]
|
|
-pub struct TokenStream {
|
|
- inner: Vec<TokenTree>,
|
|
+pub(crate) struct TokenStream {
|
|
+ pub(crate) inner: Vec<TokenTree>,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
-pub struct LexError;
|
|
+pub(crate) struct LexError;
|
|
|
|
impl TokenStream {
|
|
pub fn new() -> TokenStream {
|
|
TokenStream { inner: Vec::new() }
|
|
}
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
self.inner.len() == 0
|
|
}
|
|
+
|
|
+ fn take_inner(&mut self) -> Vec<TokenTree> {
|
|
+ mem::replace(&mut self.inner, Vec::new())
|
|
+ }
|
|
+
|
|
+ fn push_token(&mut self, token: TokenTree) {
|
|
+ // https://github.com/alexcrichton/proc-macro2/issues/235
|
|
+ match token {
|
|
+ #[cfg(not(no_bind_by_move_pattern_guard))]
|
|
+ TokenTree::Literal(crate::Literal {
|
|
+ #[cfg(wrap_proc_macro)]
|
|
+ inner: crate::imp::Literal::Fallback(literal),
|
|
+ #[cfg(not(wrap_proc_macro))]
|
|
+ inner: literal,
|
|
+ ..
|
|
+ }) if literal.text.starts_with('-') => {
|
|
+ push_negative_literal(self, literal);
|
|
+ }
|
|
+ #[cfg(no_bind_by_move_pattern_guard)]
|
|
+ TokenTree::Literal(crate::Literal {
|
|
+ #[cfg(wrap_proc_macro)]
|
|
+ inner: crate::imp::Literal::Fallback(literal),
|
|
+ #[cfg(not(wrap_proc_macro))]
|
|
+ inner: literal,
|
|
+ ..
|
|
+ }) => {
|
|
+ if literal.text.starts_with('-') {
|
|
+ push_negative_literal(self, literal);
|
|
+ } else {
|
|
+ self.inner
|
|
+ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
|
|
+ }
|
|
+ }
|
|
+ _ => self.inner.push(token),
|
|
+ }
|
|
+
|
|
+ #[cold]
|
|
+ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
|
|
+ literal.text.remove(0);
|
|
+ let mut punct = crate::Punct::new('-', Spacing::Alone);
|
|
+ punct.set_span(crate::Span::_new_stable(literal.span));
|
|
+ stream.inner.push(TokenTree::Punct(punct));
|
|
+ stream
|
|
+ .inner
|
|
+ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+// Nonrecursive to prevent stack overflow.
|
|
+impl Drop for TokenStream {
|
|
+ fn drop(&mut self) {
|
|
+ while let Some(token) = self.inner.pop() {
|
|
+ let group = match token {
|
|
+ TokenTree::Group(group) => group.inner,
|
|
+ _ => continue,
|
|
+ };
|
|
+ #[cfg(wrap_proc_macro)]
|
|
+ let group = match group {
|
|
+ crate::imp::Group::Fallback(group) => group,
|
|
+ _ => continue,
|
|
+ };
|
|
+ let mut group = group;
|
|
+ self.inner.extend(group.stream.take_inner());
|
|
+ }
|
|
+ }
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
fn get_cursor(src: &str) -> Cursor {
|
|
// Create a dummy file & add it to the source map
|
|
SOURCE_MAP.with(|cm| {
|
|
let mut cm = cm.borrow_mut();
|
|
let name = format!("<parsed string {}>", cm.files.len());
|
|
@@ -54,68 +134,49 @@ fn get_cursor(src: &str) -> Cursor {
|
|
|
|
impl FromStr for TokenStream {
|
|
type Err = LexError;
|
|
|
|
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
|
// Create a dummy file & add it to the source map
|
|
let cursor = get_cursor(src);
|
|
|
|
- match token_stream(cursor) {
|
|
- Ok((input, output)) => {
|
|
- if skip_whitespace(input).len() != 0 {
|
|
- Err(LexError)
|
|
- } else {
|
|
- Ok(output)
|
|
- }
|
|
- }
|
|
- Err(LexError) => Err(LexError),
|
|
+ let (rest, tokens) = token_stream(cursor)?;
|
|
+ if rest.is_empty() {
|
|
+ Ok(tokens)
|
|
+ } else {
|
|
+ Err(LexError)
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for TokenStream {
|
|
+impl Display for TokenStream {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
let mut joint = false;
|
|
for (i, tt) in self.inner.iter().enumerate() {
|
|
if i != 0 && !joint {
|
|
write!(f, " ")?;
|
|
}
|
|
joint = false;
|
|
- match *tt {
|
|
- TokenTree::Group(ref tt) => {
|
|
- let (start, end) = match tt.delimiter() {
|
|
- Delimiter::Parenthesis => ("(", ")"),
|
|
- Delimiter::Brace => ("{", "}"),
|
|
- Delimiter::Bracket => ("[", "]"),
|
|
- Delimiter::None => ("", ""),
|
|
- };
|
|
- if tt.stream().into_iter().next().is_none() {
|
|
- write!(f, "{} {}", start, end)?
|
|
- } else {
|
|
- write!(f, "{} {} {}", start, tt.stream(), end)?
|
|
- }
|
|
+ match tt {
|
|
+ TokenTree::Group(tt) => Display::fmt(tt, f),
|
|
+ TokenTree::Ident(tt) => Display::fmt(tt, f),
|
|
+ TokenTree::Punct(tt) => {
|
|
+ joint = tt.spacing() == Spacing::Joint;
|
|
+ Display::fmt(tt, f)
|
|
}
|
|
- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
|
|
- TokenTree::Punct(ref tt) => {
|
|
- write!(f, "{}", tt.as_char())?;
|
|
- match tt.spacing() {
|
|
- Spacing::Alone => {}
|
|
- Spacing::Joint => joint = true,
|
|
- }
|
|
- }
|
|
- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
|
|
- }
|
|
+ TokenTree::Literal(tt) => Display::fmt(tt, f),
|
|
+ }?
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for TokenStream {
|
|
+impl Debug for TokenStream {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
f.write_str("TokenStream ")?;
|
|
f.debug_list().entries(self.clone()).finish()
|
|
}
|
|
}
|
|
|
|
#[cfg(use_proc_macro)]
|
|
impl From<proc_macro::TokenStream> for TokenStream {
|
|
@@ -134,122 +195,107 @@ impl From<TokenStream> for proc_macro::T
|
|
.to_string()
|
|
.parse()
|
|
.expect("failed to parse to compiler tokens")
|
|
}
|
|
}
|
|
|
|
impl From<TokenTree> for TokenStream {
|
|
fn from(tree: TokenTree) -> TokenStream {
|
|
- TokenStream { inner: vec![tree] }
|
|
+ let mut stream = TokenStream::new();
|
|
+ stream.push_token(tree);
|
|
+ stream
|
|
}
|
|
}
|
|
|
|
-impl iter::FromIterator<TokenTree> for TokenStream {
|
|
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
|
|
- let mut v = Vec::new();
|
|
-
|
|
- for token in streams.into_iter() {
|
|
- v.push(token);
|
|
- }
|
|
-
|
|
- TokenStream { inner: v }
|
|
+impl FromIterator<TokenTree> for TokenStream {
|
|
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
|
|
+ let mut stream = TokenStream::new();
|
|
+ stream.extend(tokens);
|
|
+ stream
|
|
}
|
|
}
|
|
|
|
-impl iter::FromIterator<TokenStream> for TokenStream {
|
|
+impl FromIterator<TokenStream> for TokenStream {
|
|
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
|
let mut v = Vec::new();
|
|
|
|
- for stream in streams.into_iter() {
|
|
- v.extend(stream.inner);
|
|
+ for mut stream in streams {
|
|
+ v.extend(stream.take_inner());
|
|
}
|
|
|
|
TokenStream { inner: v }
|
|
}
|
|
}
|
|
|
|
impl Extend<TokenTree> for TokenStream {
|
|
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
|
- self.inner.extend(streams);
|
|
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
|
|
+ tokens.into_iter().for_each(|token| self.push_token(token));
|
|
}
|
|
}
|
|
|
|
impl Extend<TokenStream> for TokenStream {
|
|
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
|
- self.inner
|
|
- .extend(streams.into_iter().flat_map(|stream| stream));
|
|
+ self.inner.extend(streams.into_iter().flatten());
|
|
}
|
|
}
|
|
|
|
-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
|
|
+pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
|
|
|
|
impl IntoIterator for TokenStream {
|
|
type Item = TokenTree;
|
|
type IntoIter = TokenTreeIter;
|
|
|
|
- fn into_iter(self) -> TokenTreeIter {
|
|
- self.inner.into_iter()
|
|
+ fn into_iter(mut self) -> TokenTreeIter {
|
|
+ self.take_inner().into_iter()
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, PartialEq, Eq)]
|
|
-pub struct SourceFile {
|
|
+pub(crate) struct SourceFile {
|
|
path: PathBuf,
|
|
}
|
|
|
|
impl SourceFile {
|
|
/// Get the path to this source file as a string.
|
|
pub fn path(&self) -> PathBuf {
|
|
self.path.clone()
|
|
}
|
|
|
|
pub fn is_real(&self) -> bool {
|
|
// XXX(nika): Support real files in the future?
|
|
false
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for SourceFile {
|
|
+impl Debug for SourceFile {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
f.debug_struct("SourceFile")
|
|
.field("path", &self.path())
|
|
.field("is_real", &self.is_real())
|
|
.finish()
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
|
-pub struct LineColumn {
|
|
+pub(crate) struct LineColumn {
|
|
pub line: usize,
|
|
pub column: usize,
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
thread_local! {
|
|
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
|
|
// NOTE: We start with a single dummy file which all call_site() and
|
|
// def_site() spans reference.
|
|
- files: vec![{
|
|
+ files: vec![FileInfo {
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
- {
|
|
- FileInfo {
|
|
- name: "<unspecified>".to_owned(),
|
|
- span: Span { lo: 0, hi: 0 },
|
|
- lines: vec![0],
|
|
- }
|
|
- }
|
|
-
|
|
- #[cfg(not(procmacro2_semver_exempt))]
|
|
- {
|
|
- FileInfo {
|
|
- span: Span { lo: 0, hi: 0 },
|
|
- lines: vec![0],
|
|
- }
|
|
- }
|
|
+ name: "<unspecified>".to_owned(),
|
|
+ span: Span { lo: 0, hi: 0 },
|
|
+ lines: vec![0],
|
|
}],
|
|
});
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
struct FileInfo {
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
name: String,
|
|
@@ -277,26 +323,31 @@ impl FileInfo {
|
|
}
|
|
}
|
|
|
|
fn span_within(&self, span: Span) -> bool {
|
|
span.lo >= self.span.lo && span.hi <= self.span.hi
|
|
}
|
|
}
|
|
|
|
-/// Computesthe offsets of each line in the given source string.
|
|
+/// Computes the offsets of each line in the given source string
|
|
+/// and the total number of characters
|
|
#[cfg(span_locations)]
|
|
-fn lines_offsets(s: &str) -> Vec<usize> {
|
|
+fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
|
|
let mut lines = vec![0];
|
|
- let mut prev = 0;
|
|
- while let Some(len) = s[prev..].find('\n') {
|
|
- prev += len + 1;
|
|
- lines.push(prev);
|
|
+ let mut total = 0;
|
|
+
|
|
+ for ch in s.chars() {
|
|
+ total += 1;
|
|
+ if ch == '\n' {
|
|
+ lines.push(total);
|
|
+ }
|
|
}
|
|
- lines
|
|
+
|
|
+ (total, lines)
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
struct SourceMap {
|
|
files: Vec<FileInfo>,
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
@@ -305,81 +356,83 @@ impl SourceMap {
|
|
// Add 1 so there's always space between files.
|
|
//
|
|
// We'll always have at least 1 file, as we initialize our files list
|
|
// with a dummy file.
|
|
self.files.last().unwrap().span.hi + 1
|
|
}
|
|
|
|
fn add_file(&mut self, name: &str, src: &str) -> Span {
|
|
- let lines = lines_offsets(src);
|
|
+ let (len, lines) = lines_offsets(src);
|
|
let lo = self.next_start_pos();
|
|
// XXX(nika): Shouild we bother doing a checked cast or checked add here?
|
|
let span = Span {
|
|
lo,
|
|
- hi: lo + (src.len() as u32),
|
|
+ hi: lo + (len as u32),
|
|
};
|
|
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
self.files.push(FileInfo {
|
|
+ #[cfg(procmacro2_semver_exempt)]
|
|
name: name.to_owned(),
|
|
span,
|
|
lines,
|
|
});
|
|
|
|
#[cfg(not(procmacro2_semver_exempt))]
|
|
- self.files.push(FileInfo { span, lines });
|
|
let _ = name;
|
|
|
|
span
|
|
}
|
|
|
|
fn fileinfo(&self, span: Span) -> &FileInfo {
|
|
for file in &self.files {
|
|
if file.span_within(span) {
|
|
return file;
|
|
}
|
|
}
|
|
panic!("Invalid span with no related FileInfo!");
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq)]
|
|
-pub struct Span {
|
|
+pub(crate) struct Span {
|
|
#[cfg(span_locations)]
|
|
- lo: u32,
|
|
+ pub(crate) lo: u32,
|
|
#[cfg(span_locations)]
|
|
- hi: u32,
|
|
+ pub(crate) hi: u32,
|
|
}
|
|
|
|
impl Span {
|
|
#[cfg(not(span_locations))]
|
|
pub fn call_site() -> Span {
|
|
Span {}
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
pub fn call_site() -> Span {
|
|
Span { lo: 0, hi: 0 }
|
|
}
|
|
|
|
+ #[cfg(hygiene)]
|
|
+ pub fn mixed_site() -> Span {
|
|
+ Span::call_site()
|
|
+ }
|
|
+
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
pub fn def_site() -> Span {
|
|
Span::call_site()
|
|
}
|
|
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
pub fn resolved_at(&self, _other: Span) -> Span {
|
|
// Stable spans consist only of line/column information, so
|
|
// `resolved_at` and `located_at` only select which span the
|
|
// caller wants line/column information from.
|
|
*self
|
|
}
|
|
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
pub fn located_at(&self, other: Span) -> Span {
|
|
other
|
|
}
|
|
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
pub fn source_file(&self) -> SourceFile {
|
|
SOURCE_MAP.with(|cm| {
|
|
let cm = cm.borrow();
|
|
@@ -422,36 +475,69 @@ impl Span {
|
|
return None;
|
|
}
|
|
Some(Span {
|
|
lo: cmp::min(self.lo, other.lo),
|
|
hi: cmp::max(self.hi, other.hi),
|
|
})
|
|
})
|
|
}
|
|
+
|
|
+ #[cfg(not(span_locations))]
|
|
+ fn first_byte(self) -> Self {
|
|
+ self
|
|
+ }
|
|
+
|
|
+ #[cfg(span_locations)]
|
|
+ fn first_byte(self) -> Self {
|
|
+ Span {
|
|
+ lo: self.lo,
|
|
+ hi: cmp::min(self.lo.saturating_add(1), self.hi),
|
|
+ }
|
|
+ }
|
|
+
|
|
+ #[cfg(not(span_locations))]
|
|
+ fn last_byte(self) -> Self {
|
|
+ self
|
|
+ }
|
|
+
|
|
+ #[cfg(span_locations)]
|
|
+ fn last_byte(self) -> Self {
|
|
+ Span {
|
|
+ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
|
|
+ hi: self.hi,
|
|
+ }
|
|
+ }
|
|
}
|
|
|
|
-impl fmt::Debug for Span {
|
|
+impl Debug for Span {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
+ #[cfg(span_locations)]
|
|
return write!(f, "bytes({}..{})", self.lo, self.hi);
|
|
|
|
- #[cfg(not(procmacro2_semver_exempt))]
|
|
+ #[cfg(not(span_locations))]
|
|
write!(f, "Span")
|
|
}
|
|
}
|
|
|
|
-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
|
- if cfg!(procmacro2_semver_exempt) {
|
|
+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
|
+ #[cfg(span_locations)]
|
|
+ {
|
|
+ if span.lo == 0 && span.hi == 0 {
|
|
+ return;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ if cfg!(span_locations) {
|
|
debug.field("span", &span);
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub struct Group {
|
|
+pub(crate) struct Group {
|
|
delimiter: Delimiter,
|
|
stream: TokenStream,
|
|
span: Span,
|
|
}
|
|
|
|
impl Group {
|
|
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
|
Group {
|
|
@@ -469,58 +555,67 @@ impl Group {
|
|
self.stream.clone()
|
|
}
|
|
|
|
pub fn span(&self) -> Span {
|
|
self.span
|
|
}
|
|
|
|
pub fn span_open(&self) -> Span {
|
|
- self.span
|
|
+ self.span.first_byte()
|
|
}
|
|
|
|
pub fn span_close(&self) -> Span {
|
|
- self.span
|
|
+ self.span.last_byte()
|
|
}
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
self.span = span;
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Group {
|
|
+impl Display for Group {
|
|
+ // We attempt to match libproc_macro's formatting.
|
|
+ // Empty parens: ()
|
|
+ // Nonempty parens: (...)
|
|
+ // Empty brackets: []
|
|
+ // Nonempty brackets: [...]
|
|
+ // Empty braces: { }
|
|
+ // Nonempty braces: { ... }
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- let (left, right) = match self.delimiter {
|
|
+ let (open, close) = match self.delimiter {
|
|
Delimiter::Parenthesis => ("(", ")"),
|
|
- Delimiter::Brace => ("{", "}"),
|
|
+ Delimiter::Brace => ("{ ", "}"),
|
|
Delimiter::Bracket => ("[", "]"),
|
|
Delimiter::None => ("", ""),
|
|
};
|
|
|
|
- f.write_str(left)?;
|
|
- self.stream.fmt(f)?;
|
|
- f.write_str(right)?;
|
|
+ f.write_str(open)?;
|
|
+ Display::fmt(&self.stream, f)?;
|
|
+ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
|
|
+ f.write_str(" ")?;
|
|
+ }
|
|
+ f.write_str(close)?;
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Group {
|
|
+impl Debug for Group {
|
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
|
let mut debug = fmt.debug_struct("Group");
|
|
debug.field("delimiter", &self.delimiter);
|
|
debug.field("stream", &self.stream);
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
- debug.field("span", &self.span);
|
|
+ debug_span_field_if_nontrivial(&mut debug, self.span);
|
|
debug.finish()
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub struct Ident {
|
|
+pub(crate) struct Ident {
|
|
sym: String,
|
|
span: Span,
|
|
raw: bool,
|
|
}
|
|
|
|
impl Ident {
|
|
fn _new(string: &str, raw: bool, span: Span) -> Ident {
|
|
validate_ident(string);
|
|
@@ -544,26 +639,24 @@ impl Ident {
|
|
self.span
|
|
}
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
self.span = span;
|
|
}
|
|
}
|
|
|
|
-#[inline]
|
|
-fn is_ident_start(c: char) -> bool {
|
|
+pub(crate) fn is_ident_start(c: char) -> bool {
|
|
('a' <= c && c <= 'z')
|
|
|| ('A' <= c && c <= 'Z')
|
|
|| c == '_'
|
|
|| (c > '\x7f' && UnicodeXID::is_xid_start(c))
|
|
}
|
|
|
|
-#[inline]
|
|
-fn is_ident_continue(c: char) -> bool {
|
|
+pub(crate) fn is_ident_continue(c: char) -> bool {
|
|
('a' <= c && c <= 'z')
|
|
|| ('A' <= c && c <= 'Z')
|
|
|| c == '_'
|
|
|| ('0' <= c && c <= '9')
|
|
|| (c > '\x7f' && UnicodeXID::is_xid_continue(c))
|
|
}
|
|
|
|
fn validate_ident(string: &str) {
|
|
@@ -610,49 +703,49 @@ where
|
|
if self.raw {
|
|
other.starts_with("r#") && self.sym == other[2..]
|
|
} else {
|
|
self.sym == other
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Ident {
|
|
+impl Display for Ident {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
if self.raw {
|
|
- "r#".fmt(f)?;
|
|
+ f.write_str("r#")?;
|
|
}
|
|
- self.sym.fmt(f)
|
|
+ Display::fmt(&self.sym, f)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Ident {
|
|
+impl Debug for Ident {
|
|
// Ident(proc_macro), Ident(r#union)
|
|
- #[cfg(not(procmacro2_semver_exempt))]
|
|
+ #[cfg(not(span_locations))]
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
let mut debug = f.debug_tuple("Ident");
|
|
debug.field(&format_args!("{}", self));
|
|
debug.finish()
|
|
}
|
|
|
|
// Ident {
|
|
// sym: proc_macro,
|
|
// span: bytes(128..138)
|
|
// }
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
+ #[cfg(span_locations)]
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
let mut debug = f.debug_struct("Ident");
|
|
debug.field("sym", &format_args!("{}", self));
|
|
- debug.field("span", &self.span);
|
|
+ debug_span_field_if_nontrivial(&mut debug, self.span);
|
|
debug.finish()
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub struct Literal {
|
|
+pub(crate) struct Literal {
|
|
text: String,
|
|
span: Span,
|
|
}
|
|
|
|
macro_rules! suffixed_numbers {
|
|
($($name:ident => $kind:ident,)*) => ($(
|
|
pub fn $name(n: $kind) -> Literal {
|
|
Literal::_new(format!(concat!("{}", stringify!($kind)), n))
|
|
@@ -664,17 +757,17 @@ macro_rules! unsuffixed_numbers {
|
|
($($name:ident => $kind:ident,)*) => ($(
|
|
pub fn $name(n: $kind) -> Literal {
|
|
Literal::_new(n.to_string())
|
|
}
|
|
)*)
|
|
}
|
|
|
|
impl Literal {
|
|
- fn _new(text: String) -> Literal {
|
|
+ pub(crate) fn _new(text: String) -> Literal {
|
|
Literal {
|
|
text,
|
|
span: Span::call_site(),
|
|
}
|
|
}
|
|
|
|
suffixed_numbers! {
|
|
u8_suffixed => u8,
|
|
@@ -706,61 +799,62 @@ impl Literal {
|
|
i32_unsuffixed => i32,
|
|
i64_unsuffixed => i64,
|
|
i128_unsuffixed => i128,
|
|
isize_unsuffixed => isize,
|
|
}
|
|
|
|
pub fn f32_unsuffixed(f: f32) -> Literal {
|
|
let mut s = f.to_string();
|
|
- if !s.contains(".") {
|
|
+ if !s.contains('.') {
|
|
s.push_str(".0");
|
|
}
|
|
Literal::_new(s)
|
|
}
|
|
|
|
pub fn f64_unsuffixed(f: f64) -> Literal {
|
|
let mut s = f.to_string();
|
|
- if !s.contains(".") {
|
|
+ if !s.contains('.') {
|
|
s.push_str(".0");
|
|
}
|
|
Literal::_new(s)
|
|
}
|
|
|
|
pub fn string(t: &str) -> Literal {
|
|
let mut text = String::with_capacity(t.len() + 2);
|
|
text.push('"');
|
|
for c in t.chars() {
|
|
if c == '\'' {
|
|
- // escape_default turns this into "\'" which is unnecessary.
|
|
+ // escape_debug turns this into "\'" which is unnecessary.
|
|
text.push(c);
|
|
} else {
|
|
- text.extend(c.escape_default());
|
|
+ text.extend(c.escape_debug());
|
|
}
|
|
}
|
|
text.push('"');
|
|
Literal::_new(text)
|
|
}
|
|
|
|
pub fn character(t: char) -> Literal {
|
|
let mut text = String::new();
|
|
text.push('\'');
|
|
if t == '"' {
|
|
- // escape_default turns this into '\"' which is unnecessary.
|
|
+ // escape_debug turns this into '\"' which is unnecessary.
|
|
text.push(t);
|
|
} else {
|
|
- text.extend(t.escape_default());
|
|
+ text.extend(t.escape_debug());
|
|
}
|
|
text.push('\'');
|
|
Literal::_new(text)
|
|
}
|
|
|
|
pub fn byte_string(bytes: &[u8]) -> Literal {
|
|
let mut escaped = "b\"".to_string();
|
|
for b in bytes {
|
|
+ #[allow(clippy::match_overlapping_arm)]
|
|
match *b {
|
|
b'\0' => escaped.push_str(r"\0"),
|
|
b'\t' => escaped.push_str(r"\t"),
|
|
b'\n' => escaped.push_str(r"\n"),
|
|
b'\r' => escaped.push_str(r"\r"),
|
|
b'"' => escaped.push_str("\\\""),
|
|
b'\\' => escaped.push_str("\\\\"),
|
|
b'\x20'..=b'\x7E' => escaped.push(*b as char),
|
|
@@ -779,656 +873,22 @@ impl Literal {
|
|
self.span = span;
|
|
}
|
|
|
|
pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
|
|
None
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Literal {
|
|
+impl Display for Literal {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.text.fmt(f)
|
|
- }
|
|
-}
|
|
-
|
|
-impl fmt::Debug for Literal {
|
|
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
|
- let mut debug = fmt.debug_struct("Literal");
|
|
- debug.field("lit", &format_args!("{}", self.text));
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
- debug.field("span", &self.span);
|
|
- debug.finish()
|
|
- }
|
|
-}
|
|
-
|
|
-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
|
|
- let mut trees = Vec::new();
|
|
- loop {
|
|
- let input_no_ws = skip_whitespace(input);
|
|
- if input_no_ws.rest.len() == 0 {
|
|
- break;
|
|
- }
|
|
- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
|
|
- input = a;
|
|
- trees.extend(tokens);
|
|
- continue;
|
|
- }
|
|
-
|
|
- let (a, tt) = match token_tree(input_no_ws) {
|
|
- Ok(p) => p,
|
|
- Err(_) => break,
|
|
- };
|
|
- trees.push(tt);
|
|
- input = a;
|
|
- }
|
|
- Ok((input, TokenStream { inner: trees }))
|
|
-}
|
|
-
|
|
-#[cfg(not(span_locations))]
|
|
-fn spanned<'a, T>(
|
|
- input: Cursor<'a>,
|
|
- f: fn(Cursor<'a>) -> PResult<'a, T>,
|
|
-) -> PResult<'a, (T, crate::Span)> {
|
|
- let (a, b) = f(skip_whitespace(input))?;
|
|
- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
|
|
-}
|
|
-
|
|
-#[cfg(span_locations)]
|
|
-fn spanned<'a, T>(
|
|
- input: Cursor<'a>,
|
|
- f: fn(Cursor<'a>) -> PResult<'a, T>,
|
|
-) -> PResult<'a, (T, crate::Span)> {
|
|
- let input = skip_whitespace(input);
|
|
- let lo = input.off;
|
|
- let (a, b) = f(input)?;
|
|
- let hi = a.off;
|
|
- let span = crate::Span::_new_stable(Span { lo, hi });
|
|
- Ok((a, (b, span)))
|
|
-}
|
|
-
|
|
-fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
|
- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
|
|
- tt.set_span(span);
|
|
- Ok((rest, tt))
|
|
-}
|
|
-
|
|
-named!(token_kind -> TokenTree, alt!(
|
|
- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
|
|
- |
|
|
- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
|
|
- |
|
|
- map!(op, TokenTree::Punct)
|
|
- |
|
|
- symbol_leading_ws
|
|
-));
|
|
-
|
|
-named!(group -> Group, alt!(
|
|
- delimited!(
|
|
- punct!("("),
|
|
- token_stream,
|
|
- punct!(")")
|
|
- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
|
|
- |
|
|
- delimited!(
|
|
- punct!("["),
|
|
- token_stream,
|
|
- punct!("]")
|
|
- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
|
|
- |
|
|
- delimited!(
|
|
- punct!("{"),
|
|
- token_stream,
|
|
- punct!("}")
|
|
- ) => { |ts| Group::new(Delimiter::Brace, ts) }
|
|
-));
|
|
-
|
|
-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
|
|
- symbol(skip_whitespace(input))
|
|
-}
|
|
-
|
|
-fn symbol(input: Cursor) -> PResult<TokenTree> {
|
|
- let raw = input.starts_with("r#");
|
|
- let rest = input.advance((raw as usize) << 1);
|
|
-
|
|
- let (rest, sym) = symbol_not_raw(rest)?;
|
|
-
|
|
- if !raw {
|
|
- let ident = crate::Ident::new(sym, crate::Span::call_site());
|
|
- return Ok((rest, ident.into()));
|
|
- }
|
|
-
|
|
- if sym == "_" {
|
|
- return Err(LexError);
|
|
- }
|
|
-
|
|
- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
|
|
- Ok((rest, ident.into()))
|
|
-}
|
|
-
|
|
-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
|
|
- let mut chars = input.char_indices();
|
|
-
|
|
- match chars.next() {
|
|
- Some((_, ch)) if is_ident_start(ch) => {}
|
|
- _ => return Err(LexError),
|
|
- }
|
|
-
|
|
- let mut end = input.len();
|
|
- for (i, ch) in chars {
|
|
- if !is_ident_continue(ch) {
|
|
- end = i;
|
|
- break;
|
|
- }
|
|
- }
|
|
-
|
|
- Ok((input.advance(end), &input.rest[..end]))
|
|
-}
|
|
-
|
|
-fn literal(input: Cursor) -> PResult<Literal> {
|
|
- let input_no_ws = skip_whitespace(input);
|
|
-
|
|
- match literal_nocapture(input_no_ws) {
|
|
- Ok((a, ())) => {
|
|
- let start = input.len() - input_no_ws.len();
|
|
- let len = input_no_ws.len() - a.len();
|
|
- let end = start + len;
|
|
- Ok((a, Literal::_new(input.rest[start..end].to_string())))
|
|
- }
|
|
- Err(LexError) => Err(LexError),
|
|
+ Display::fmt(&self.text, f)
|
|
}
|
|
}
|
|
|
|
-named!(literal_nocapture -> (), alt!(
|
|
- string
|
|
- |
|
|
- byte_string
|
|
- |
|
|
- byte
|
|
- |
|
|
- character
|
|
- |
|
|
- float
|
|
- |
|
|
- int
|
|
-));
|
|
-
|
|
-named!(string -> (), alt!(
|
|
- quoted_string
|
|
- |
|
|
- preceded!(
|
|
- punct!("r"),
|
|
- raw_string
|
|
- ) => { |_| () }
|
|
-));
|
|
-
|
|
-named!(quoted_string -> (), do_parse!(
|
|
- punct!("\"") >>
|
|
- cooked_string >>
|
|
- tag!("\"") >>
|
|
- option!(symbol_not_raw) >>
|
|
- (())
|
|
-));
|
|
-
|
|
-fn cooked_string(input: Cursor) -> PResult<()> {
|
|
- let mut chars = input.char_indices().peekable();
|
|
- while let Some((byte_offset, ch)) = chars.next() {
|
|
- match ch {
|
|
- '"' => {
|
|
- return Ok((input.advance(byte_offset), ()));
|
|
- }
|
|
- '\r' => {
|
|
- if let Some((_, '\n')) = chars.next() {
|
|
- // ...
|
|
- } else {
|
|
- break;
|
|
- }
|
|
- }
|
|
- '\\' => match chars.next() {
|
|
- Some((_, 'x')) => {
|
|
- if !backslash_x_char(&mut chars) {
|
|
- break;
|
|
- }
|
|
- }
|
|
- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
|
|
- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
|
|
- Some((_, 'u')) => {
|
|
- if !backslash_u(&mut chars) {
|
|
- break;
|
|
- }
|
|
- }
|
|
- Some((_, '\n')) | Some((_, '\r')) => {
|
|
- while let Some(&(_, ch)) = chars.peek() {
|
|
- if ch.is_whitespace() {
|
|
- chars.next();
|
|
- } else {
|
|
- break;
|
|
- }
|
|
- }
|
|
- }
|
|
- _ => break,
|
|
- },
|
|
- _ch => {}
|
|
- }
|
|
- }
|
|
- Err(LexError)
|
|
-}
|
|
-
|
|
-named!(byte_string -> (), alt!(
|
|
- delimited!(
|
|
- punct!("b\""),
|
|
- cooked_byte_string,
|
|
- tag!("\"")
|
|
- ) => { |_| () }
|
|
- |
|
|
- preceded!(
|
|
- punct!("br"),
|
|
- raw_string
|
|
- ) => { |_| () }
|
|
-));
|
|
-
|
|
-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
|
|
- let mut bytes = input.bytes().enumerate();
|
|
- 'outer: while let Some((offset, b)) = bytes.next() {
|
|
- match b {
|
|
- b'"' => {
|
|
- return Ok((input.advance(offset), ()));
|
|
- }
|
|
- b'\r' => {
|
|
- if let Some((_, b'\n')) = bytes.next() {
|
|
- // ...
|
|
- } else {
|
|
- break;
|
|
- }
|
|
- }
|
|
- b'\\' => match bytes.next() {
|
|
- Some((_, b'x')) => {
|
|
- if !backslash_x_byte(&mut bytes) {
|
|
- break;
|
|
- }
|
|
- }
|
|
- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
|
|
- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
|
|
- Some((newline, b'\n')) | Some((newline, b'\r')) => {
|
|
- let rest = input.advance(newline + 1);
|
|
- for (offset, ch) in rest.char_indices() {
|
|
- if !ch.is_whitespace() {
|
|
- input = rest.advance(offset);
|
|
- bytes = input.bytes().enumerate();
|
|
- continue 'outer;
|
|
- }
|
|
- }
|
|
- break;
|
|
- }
|
|
- _ => break,
|
|
- },
|
|
- b if b < 0x80 => {}
|
|
- _ => break,
|
|
- }
|
|
- }
|
|
- Err(LexError)
|
|
-}
|
|
-
|
|
-fn raw_string(input: Cursor) -> PResult<()> {
|
|
- let mut chars = input.char_indices();
|
|
- let mut n = 0;
|
|
- while let Some((byte_offset, ch)) = chars.next() {
|
|
- match ch {
|
|
- '"' => {
|
|
- n = byte_offset;
|
|
- break;
|
|
- }
|
|
- '#' => {}
|
|
- _ => return Err(LexError),
|
|
- }
|
|
- }
|
|
- for (byte_offset, ch) in chars {
|
|
- match ch {
|
|
- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
|
|
- let rest = input.advance(byte_offset + 1 + n);
|
|
- return Ok((rest, ()));
|
|
- }
|
|
- '\r' => {}
|
|
- _ => {}
|
|
- }
|
|
- }
|
|
- Err(LexError)
|
|
-}
|
|
-
|
|
-named!(byte -> (), do_parse!(
|
|
- punct!("b") >>
|
|
- tag!("'") >>
|
|
- cooked_byte >>
|
|
- tag!("'") >>
|
|
- (())
|
|
-));
|
|
-
|
|
-fn cooked_byte(input: Cursor) -> PResult<()> {
|
|
- let mut bytes = input.bytes().enumerate();
|
|
- let ok = match bytes.next().map(|(_, b)| b) {
|
|
- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
|
|
- Some(b'x') => backslash_x_byte(&mut bytes),
|
|
- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
|
|
- | Some(b'"') => true,
|
|
- _ => false,
|
|
- },
|
|
- b => b.is_some(),
|
|
- };
|
|
- if ok {
|
|
- match bytes.next() {
|
|
- Some((offset, _)) => {
|
|
- if input.chars().as_str().is_char_boundary(offset) {
|
|
- Ok((input.advance(offset), ()))
|
|
- } else {
|
|
- Err(LexError)
|
|
- }
|
|
- }
|
|
- None => Ok((input.advance(input.len()), ())),
|
|
- }
|
|
- } else {
|
|
- Err(LexError)
|
|
- }
|
|
-}
|
|
-
|
|
-named!(character -> (), do_parse!(
|
|
- punct!("'") >>
|
|
- cooked_char >>
|
|
- tag!("'") >>
|
|
- (())
|
|
-));
|
|
-
|
|
-fn cooked_char(input: Cursor) -> PResult<()> {
|
|
- let mut chars = input.char_indices();
|
|
- let ok = match chars.next().map(|(_, ch)| ch) {
|
|
- Some('\\') => match chars.next().map(|(_, ch)| ch) {
|
|
- Some('x') => backslash_x_char(&mut chars),
|
|
- Some('u') => backslash_u(&mut chars),
|
|
- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
|
|
- true
|
|
- }
|
|
- _ => false,
|
|
- },
|
|
- ch => ch.is_some(),
|
|
- };
|
|
- if ok {
|
|
- match chars.next() {
|
|
- Some((idx, _)) => Ok((input.advance(idx), ())),
|
|
- None => Ok((input.advance(input.len()), ())),
|
|
- }
|
|
- } else {
|
|
- Err(LexError)
|
|
+impl Debug for Literal {
|
|
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut debug = fmt.debug_struct("Literal");
|
|
+ debug.field("lit", &format_args!("{}", self.text));
|
|
+ debug_span_field_if_nontrivial(&mut debug, self.span);
|
|
+ debug.finish()
|
|
}
|
|
}
|
|
-
|
|
-macro_rules! next_ch {
|
|
- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
|
|
- match $chars.next() {
|
|
- Some((_, ch)) => match ch {
|
|
- $pat $(| $rest)* => ch,
|
|
- _ => return false,
|
|
- },
|
|
- None => return false
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-fn backslash_x_char<I>(chars: &mut I) -> bool
|
|
-where
|
|
- I: Iterator<Item = (usize, char)>,
|
|
-{
|
|
- next_ch!(chars @ '0'..='7');
|
|
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
|
- true
|
|
-}
|
|
-
|
|
-fn backslash_x_byte<I>(chars: &mut I) -> bool
|
|
-where
|
|
- I: Iterator<Item = (usize, u8)>,
|
|
-{
|
|
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
|
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
|
- true
|
|
-}
|
|
-
|
|
-fn backslash_u<I>(chars: &mut I) -> bool
|
|
-where
|
|
- I: Iterator<Item = (usize, char)>,
|
|
-{
|
|
- next_ch!(chars @ '{');
|
|
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
|
- loop {
|
|
- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
|
|
- if c == '}' {
|
|
- return true;
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-fn float(input: Cursor) -> PResult<()> {
|
|
- let (mut rest, ()) = float_digits(input)?;
|
|
- if let Some(ch) = rest.chars().next() {
|
|
- if is_ident_start(ch) {
|
|
- rest = symbol_not_raw(rest)?.0;
|
|
- }
|
|
- }
|
|
- word_break(rest)
|
|
-}
|
|
-
|
|
-fn float_digits(input: Cursor) -> PResult<()> {
|
|
- let mut chars = input.chars().peekable();
|
|
- match chars.next() {
|
|
- Some(ch) if ch >= '0' && ch <= '9' => {}
|
|
- _ => return Err(LexError),
|
|
- }
|
|
-
|
|
- let mut len = 1;
|
|
- let mut has_dot = false;
|
|
- let mut has_exp = false;
|
|
- while let Some(&ch) = chars.peek() {
|
|
- match ch {
|
|
- '0'..='9' | '_' => {
|
|
- chars.next();
|
|
- len += 1;
|
|
- }
|
|
- '.' => {
|
|
- if has_dot {
|
|
- break;
|
|
- }
|
|
- chars.next();
|
|
- if chars
|
|
- .peek()
|
|
- .map(|&ch| ch == '.' || is_ident_start(ch))
|
|
- .unwrap_or(false)
|
|
- {
|
|
- return Err(LexError);
|
|
- }
|
|
- len += 1;
|
|
- has_dot = true;
|
|
- }
|
|
- 'e' | 'E' => {
|
|
- chars.next();
|
|
- len += 1;
|
|
- has_exp = true;
|
|
- break;
|
|
- }
|
|
- _ => break,
|
|
- }
|
|
- }
|
|
-
|
|
- let rest = input.advance(len);
|
|
- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
|
|
- return Err(LexError);
|
|
- }
|
|
-
|
|
- if has_exp {
|
|
- let mut has_exp_value = false;
|
|
- while let Some(&ch) = chars.peek() {
|
|
- match ch {
|
|
- '+' | '-' => {
|
|
- if has_exp_value {
|
|
- break;
|
|
- }
|
|
- chars.next();
|
|
- len += 1;
|
|
- }
|
|
- '0'..='9' => {
|
|
- chars.next();
|
|
- len += 1;
|
|
- has_exp_value = true;
|
|
- }
|
|
- '_' => {
|
|
- chars.next();
|
|
- len += 1;
|
|
- }
|
|
- _ => break,
|
|
- }
|
|
- }
|
|
- if !has_exp_value {
|
|
- return Err(LexError);
|
|
- }
|
|
- }
|
|
-
|
|
- Ok((input.advance(len), ()))
|
|
-}
|
|
-
|
|
-fn int(input: Cursor) -> PResult<()> {
|
|
- let (mut rest, ()) = digits(input)?;
|
|
- if let Some(ch) = rest.chars().next() {
|
|
- if is_ident_start(ch) {
|
|
- rest = symbol_not_raw(rest)?.0;
|
|
- }
|
|
- }
|
|
- word_break(rest)
|
|
-}
|
|
-
|
|
-fn digits(mut input: Cursor) -> PResult<()> {
|
|
- let base = if input.starts_with("0x") {
|
|
- input = input.advance(2);
|
|
- 16
|
|
- } else if input.starts_with("0o") {
|
|
- input = input.advance(2);
|
|
- 8
|
|
- } else if input.starts_with("0b") {
|
|
- input = input.advance(2);
|
|
- 2
|
|
- } else {
|
|
- 10
|
|
- };
|
|
-
|
|
- let mut len = 0;
|
|
- let mut empty = true;
|
|
- for b in input.bytes() {
|
|
- let digit = match b {
|
|
- b'0'..=b'9' => (b - b'0') as u64,
|
|
- b'a'..=b'f' => 10 + (b - b'a') as u64,
|
|
- b'A'..=b'F' => 10 + (b - b'A') as u64,
|
|
- b'_' => {
|
|
- if empty && base == 10 {
|
|
- return Err(LexError);
|
|
- }
|
|
- len += 1;
|
|
- continue;
|
|
- }
|
|
- _ => break,
|
|
- };
|
|
- if digit >= base {
|
|
- return Err(LexError);
|
|
- }
|
|
- len += 1;
|
|
- empty = false;
|
|
- }
|
|
- if empty {
|
|
- Err(LexError)
|
|
- } else {
|
|
- Ok((input.advance(len), ()))
|
|
- }
|
|
-}
|
|
-
|
|
-fn op(input: Cursor) -> PResult<Punct> {
|
|
- let input = skip_whitespace(input);
|
|
- match op_char(input) {
|
|
- Ok((rest, '\'')) => {
|
|
- symbol(rest)?;
|
|
- Ok((rest, Punct::new('\'', Spacing::Joint)))
|
|
- }
|
|
- Ok((rest, ch)) => {
|
|
- let kind = match op_char(rest) {
|
|
- Ok(_) => Spacing::Joint,
|
|
- Err(LexError) => Spacing::Alone,
|
|
- };
|
|
- Ok((rest, Punct::new(ch, kind)))
|
|
- }
|
|
- Err(LexError) => Err(LexError),
|
|
- }
|
|
-}
|
|
-
|
|
-fn op_char(input: Cursor) -> PResult<char> {
|
|
- if input.starts_with("//") || input.starts_with("/*") {
|
|
- // Do not accept `/` of a comment as an op.
|
|
- return Err(LexError);
|
|
- }
|
|
-
|
|
- let mut chars = input.chars();
|
|
- let first = match chars.next() {
|
|
- Some(ch) => ch,
|
|
- None => {
|
|
- return Err(LexError);
|
|
- }
|
|
- };
|
|
- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
|
|
- if recognized.contains(first) {
|
|
- Ok((input.advance(first.len_utf8()), first))
|
|
- } else {
|
|
- Err(LexError)
|
|
- }
|
|
-}
|
|
-
|
|
-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
|
|
- let mut trees = Vec::new();
|
|
- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
|
|
- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
|
|
- if inner {
|
|
- trees.push(Punct::new('!', Spacing::Alone).into());
|
|
- }
|
|
- let mut stream = vec![
|
|
- TokenTree::Ident(crate::Ident::new("doc", span)),
|
|
- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
|
- TokenTree::Literal(crate::Literal::string(comment)),
|
|
- ];
|
|
- for tt in stream.iter_mut() {
|
|
- tt.set_span(span);
|
|
- }
|
|
- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
|
|
- trees.push(crate::Group::_new_stable(group).into());
|
|
- for tt in trees.iter_mut() {
|
|
- tt.set_span(span);
|
|
- }
|
|
- Ok((rest, trees))
|
|
-}
|
|
-
|
|
-named!(doc_comment_contents -> (&str, bool), alt!(
|
|
- do_parse!(
|
|
- punct!("//!") >>
|
|
- s: take_until_newline_or_eof!() >>
|
|
- ((s, true))
|
|
- )
|
|
- |
|
|
- do_parse!(
|
|
- option!(whitespace) >>
|
|
- peek!(tag!("/*!")) >>
|
|
- s: block_comment >>
|
|
- ((s, true))
|
|
- )
|
|
- |
|
|
- do_parse!(
|
|
- punct!("///") >>
|
|
- not!(tag!("/")) >>
|
|
- s: take_until_newline_or_eof!() >>
|
|
- ((s, false))
|
|
- )
|
|
- |
|
|
- do_parse!(
|
|
- option!(whitespace) >>
|
|
- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
|
|
- s: block_comment >>
|
|
- ((s, false))
|
|
- )
|
|
-));
|
|
diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
|
|
--- a/third_party/rust/proc-macro2/src/lib.rs
|
|
+++ b/third_party/rust/proc-macro2/src/lib.rs
|
|
@@ -73,37 +73,44 @@
|
|
//!
|
|
//! # Thread-Safety
|
|
//!
|
|
//! Most types in this crate are `!Sync` because the underlying compiler
|
|
//! types make use of thread-local memory, meaning they cannot be accessed from
|
|
//! a different thread.
|
|
|
|
// Proc-macro2 types in rustdoc of other crates get linked to here.
|
|
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
|
|
+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
|
|
#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
|
|
#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
|
|
+#![allow(clippy::needless_doctest_main)]
|
|
|
|
#[cfg(use_proc_macro)]
|
|
extern crate proc_macro;
|
|
|
|
use std::cmp::Ordering;
|
|
-use std::fmt;
|
|
+use std::fmt::{self, Debug, Display};
|
|
use std::hash::{Hash, Hasher};
|
|
use std::iter::FromIterator;
|
|
use std::marker;
|
|
use std::ops::RangeBounds;
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
use std::path::PathBuf;
|
|
use std::rc::Rc;
|
|
use std::str::FromStr;
|
|
|
|
-#[macro_use]
|
|
-mod strnom;
|
|
-mod fallback;
|
|
+mod parse;
|
|
+
|
|
+#[cfg(wrap_proc_macro)]
|
|
+mod detection;
|
|
+
|
|
+// Public for proc_macro2::fallback::force() and unforce(), but those are quite
|
|
+// a niche use case so we omit it from rustdoc.
|
|
+#[doc(hidden)]
|
|
+pub mod fallback;
|
|
|
|
#[cfg(not(wrap_proc_macro))]
|
|
use crate::fallback as imp;
|
|
#[path = "wrapper.rs"]
|
|
#[cfg(wrap_proc_macro)]
|
|
mod imp;
|
|
|
|
/// An abstract stream of tokens, or more concretely a sequence of token trees.
|
|
@@ -223,32 +230,32 @@ impl FromIterator<TokenStream> for Token
|
|
TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
|
|
}
|
|
}
|
|
|
|
/// Prints the token stream as a string that is supposed to be losslessly
|
|
/// convertible back into the same token stream (modulo spans), except for
|
|
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
|
|
/// numeric literals.
|
|
-impl fmt::Display for TokenStream {
|
|
+impl Display for TokenStream {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Display::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
/// Prints token in a form convenient for debugging.
|
|
-impl fmt::Debug for TokenStream {
|
|
+impl Debug for TokenStream {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for LexError {
|
|
+impl Debug for LexError {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
/// The source file of a given `Span`.
|
|
///
|
|
/// This type is semver exempt and not exposed by default.
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
#[derive(Clone, PartialEq, Eq)]
|
|
@@ -286,19 +293,19 @@ impl SourceFile {
|
|
/// Returns `true` if this source file is a real source file, and not
|
|
/// generated by an external macro's expansion.
|
|
pub fn is_real(&self) -> bool {
|
|
self.inner.is_real()
|
|
}
|
|
}
|
|
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
-impl fmt::Debug for SourceFile {
|
|
+impl Debug for SourceFile {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
/// A line-column pair representing the start or end of a `Span`.
|
|
///
|
|
/// This type is semver exempt and not exposed by default.
|
|
#[cfg(span_locations)]
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
|
@@ -306,16 +313,32 @@ pub struct LineColumn {
|
|
/// The 1-indexed line in the source file on which the span starts or ends
|
|
/// (inclusive).
|
|
pub line: usize,
|
|
/// The 0-indexed column (in UTF-8 characters) in the source file on which
|
|
/// the span starts or ends (inclusive).
|
|
pub column: usize,
|
|
}
|
|
|
|
+#[cfg(span_locations)]
|
|
+impl Ord for LineColumn {
|
|
+ fn cmp(&self, other: &Self) -> Ordering {
|
|
+ self.line
|
|
+ .cmp(&other.line)
|
|
+ .then(self.column.cmp(&other.column))
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(span_locations)]
|
|
+impl PartialOrd for LineColumn {
|
|
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
+ Some(self.cmp(other))
|
|
+ }
|
|
+}
|
|
+
|
|
/// A region of source code, along with macro expansion information.
|
|
#[derive(Copy, Clone)]
|
|
pub struct Span {
|
|
inner: imp::Span,
|
|
_marker: marker::PhantomData<Rc<()>>,
|
|
}
|
|
|
|
impl Span {
|
|
@@ -337,38 +360,42 @@ impl Span {
|
|
///
|
|
/// Identifiers created with this span will be resolved as if they were
|
|
/// written directly at the macro call location (call-site hygiene) and
|
|
/// other code at the macro call site will be able to refer to them as well.
|
|
pub fn call_site() -> Span {
|
|
Span::_new(imp::Span::call_site())
|
|
}
|
|
|
|
+ /// The span located at the invocation of the procedural macro, but with
|
|
+ /// local variables, labels, and `$crate` resolved at the definition site
|
|
+ /// of the macro. This is the same hygiene behavior as `macro_rules`.
|
|
+ ///
|
|
+ /// This function requires Rust 1.45 or later.
|
|
+ #[cfg(hygiene)]
|
|
+ pub fn mixed_site() -> Span {
|
|
+ Span::_new(imp::Span::mixed_site())
|
|
+ }
|
|
+
|
|
/// A span that resolves at the macro definition site.
|
|
///
|
|
/// This method is semver exempt and not exposed by default.
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
pub fn def_site() -> Span {
|
|
Span::_new(imp::Span::def_site())
|
|
}
|
|
|
|
/// Creates a new span with the same line/column information as `self` but
|
|
/// that resolves symbols as though it were at `other`.
|
|
- ///
|
|
- /// This method is semver exempt and not exposed by default.
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
pub fn resolved_at(&self, other: Span) -> Span {
|
|
Span::_new(self.inner.resolved_at(other.inner))
|
|
}
|
|
|
|
/// Creates a new span with the same name resolution behavior as `self` but
|
|
/// with the line/column information of `other`.
|
|
- ///
|
|
- /// This method is semver exempt and not exposed by default.
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
pub fn located_at(&self, other: Span) -> Span {
|
|
Span::_new(self.inner.located_at(other.inner))
|
|
}
|
|
|
|
/// Convert `proc_macro2::Span` to `proc_macro::Span`.
|
|
///
|
|
/// This method is available when building with a nightly compiler, or when
|
|
/// building with rustc 1.29+ *without* semver exempt features.
|
|
@@ -434,19 +461,19 @@ impl Span {
|
|
/// This method is semver exempt and not exposed by default.
|
|
#[cfg(procmacro2_semver_exempt)]
|
|
pub fn eq(&self, other: &Span) -> bool {
|
|
self.inner.eq(&other.inner)
|
|
}
|
|
}
|
|
|
|
/// Prints a span in a form convenient for debugging.
|
|
-impl fmt::Debug for Span {
|
|
+impl Debug for Span {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
|
|
#[derive(Clone)]
|
|
pub enum TokenTree {
|
|
/// A token stream surrounded by bracket delimiters.
|
|
Group(Group),
|
|
@@ -457,35 +484,35 @@ pub enum TokenTree {
|
|
/// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
|
|
Literal(Literal),
|
|
}
|
|
|
|
impl TokenTree {
|
|
/// Returns the span of this tree, delegating to the `span` method of
|
|
/// the contained token or a delimited stream.
|
|
pub fn span(&self) -> Span {
|
|
- match *self {
|
|
- TokenTree::Group(ref t) => t.span(),
|
|
- TokenTree::Ident(ref t) => t.span(),
|
|
- TokenTree::Punct(ref t) => t.span(),
|
|
- TokenTree::Literal(ref t) => t.span(),
|
|
+ match self {
|
|
+ TokenTree::Group(t) => t.span(),
|
|
+ TokenTree::Ident(t) => t.span(),
|
|
+ TokenTree::Punct(t) => t.span(),
|
|
+ TokenTree::Literal(t) => t.span(),
|
|
}
|
|
}
|
|
|
|
/// Configures the span for *only this token*.
|
|
///
|
|
/// Note that if this token is a `Group` then this method will not configure
|
|
/// the span of each of the internal tokens, this will simply delegate to
|
|
/// the `set_span` method of each variant.
|
|
pub fn set_span(&mut self, span: Span) {
|
|
- match *self {
|
|
- TokenTree::Group(ref mut t) => t.set_span(span),
|
|
- TokenTree::Ident(ref mut t) => t.set_span(span),
|
|
- TokenTree::Punct(ref mut t) => t.set_span(span),
|
|
- TokenTree::Literal(ref mut t) => t.set_span(span),
|
|
+ match self {
|
|
+ TokenTree::Group(t) => t.set_span(span),
|
|
+ TokenTree::Ident(t) => t.set_span(span),
|
|
+ TokenTree::Punct(t) => t.set_span(span),
|
|
+ TokenTree::Literal(t) => t.set_span(span),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl From<Group> for TokenTree {
|
|
fn from(g: Group) -> TokenTree {
|
|
TokenTree::Group(g)
|
|
}
|
|
@@ -508,42 +535,42 @@ impl From<Literal> for TokenTree {
|
|
TokenTree::Literal(g)
|
|
}
|
|
}
|
|
|
|
/// Prints the token tree as a string that is supposed to be losslessly
|
|
/// convertible back into the same token tree (modulo spans), except for
|
|
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
|
|
/// numeric literals.
|
|
-impl fmt::Display for TokenTree {
|
|
+impl Display for TokenTree {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- match *self {
|
|
- TokenTree::Group(ref t) => t.fmt(f),
|
|
- TokenTree::Ident(ref t) => t.fmt(f),
|
|
- TokenTree::Punct(ref t) => t.fmt(f),
|
|
- TokenTree::Literal(ref t) => t.fmt(f),
|
|
+ match self {
|
|
+ TokenTree::Group(t) => Display::fmt(t, f),
|
|
+ TokenTree::Ident(t) => Display::fmt(t, f),
|
|
+ TokenTree::Punct(t) => Display::fmt(t, f),
|
|
+ TokenTree::Literal(t) => Display::fmt(t, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Prints token tree in a form convenient for debugging.
|
|
-impl fmt::Debug for TokenTree {
|
|
+impl Debug for TokenTree {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
// Each of these has the name in the struct type in the derived debug,
|
|
// so don't bother with an extra layer of indirection
|
|
- match *self {
|
|
- TokenTree::Group(ref t) => t.fmt(f),
|
|
- TokenTree::Ident(ref t) => {
|
|
+ match self {
|
|
+ TokenTree::Group(t) => Debug::fmt(t, f),
|
|
+ TokenTree::Ident(t) => {
|
|
let mut debug = f.debug_struct("Ident");
|
|
debug.field("sym", &format_args!("{}", t));
|
|
imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
|
|
debug.finish()
|
|
}
|
|
- TokenTree::Punct(ref t) => t.fmt(f),
|
|
- TokenTree::Literal(ref t) => t.fmt(f),
|
|
+ TokenTree::Punct(t) => Debug::fmt(t, f),
|
|
+ TokenTree::Literal(t) => Debug::fmt(t, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
/// A delimited token stream.
|
|
///
|
|
/// A `Group` internally contains a `TokenStream` which is surrounded by
|
|
/// `Delimiter`s.
|
|
@@ -646,25 +673,25 @@ impl Group {
|
|
pub fn set_span(&mut self, span: Span) {
|
|
self.inner.set_span(span.inner)
|
|
}
|
|
}
|
|
|
|
/// Prints the group as a string that should be losslessly convertible back
|
|
/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
|
|
/// with `Delimiter::None` delimiters.
|
|
-impl fmt::Display for Group {
|
|
+impl Display for Group {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
- fmt::Display::fmt(&self.inner, formatter)
|
|
+ Display::fmt(&self.inner, formatter)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Group {
|
|
+impl Debug for Group {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
- fmt::Debug::fmt(&self.inner, formatter)
|
|
+ Debug::fmt(&self.inner, formatter)
|
|
}
|
|
}
|
|
|
|
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
|
|
///
|
|
/// Multicharacter operators like `+=` are represented as two instances of
|
|
/// `Punct` with different forms of `Spacing` returned.
|
|
#[derive(Clone)]
|
|
@@ -725,23 +752,23 @@ impl Punct {
|
|
/// Configure the span for this punctuation character.
|
|
pub fn set_span(&mut self, span: Span) {
|
|
self.span = span;
|
|
}
|
|
}
|
|
|
|
/// Prints the punctuation character as a string that should be losslessly
|
|
/// convertible back into the same character.
|
|
-impl fmt::Display for Punct {
|
|
+impl Display for Punct {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.op.fmt(f)
|
|
+ Display::fmt(&self.op, f)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Punct {
|
|
+impl Debug for Punct {
|
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
|
let mut debug = fmt.debug_struct("Punct");
|
|
debug.field("op", &self.op);
|
|
debug.field("spacing", &self.spacing);
|
|
imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
|
|
debug.finish()
|
|
}
|
|
}
|
|
@@ -915,25 +942,25 @@ impl Ord for Ident {
|
|
impl Hash for Ident {
|
|
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
|
self.to_string().hash(hasher)
|
|
}
|
|
}
|
|
|
|
/// Prints the identifier as a string that should be losslessly convertible back
|
|
/// into the same identifier.
|
|
-impl fmt::Display for Ident {
|
|
+impl Display for Ident {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Display::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Ident {
|
|
+impl Debug for Ident {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
|
|
/// byte character (`b'a'`), an integer or floating point number with or without
|
|
/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
|
|
///
|
|
/// Boolean literals like `true` and `false` do not belong here, they are
|
|
@@ -1135,36 +1162,36 @@ impl Literal {
|
|
/// nightly compiler, this method will always return `None`.
|
|
///
|
|
/// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
|
|
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
|
self.inner.subspan(range).map(Span::_new)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Literal {
|
|
+impl Debug for Literal {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Literal {
|
|
+impl Display for Literal {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Display::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
/// Public implementation details for the `TokenStream` type, such as iterators.
|
|
pub mod token_stream {
|
|
- use std::fmt;
|
|
+ use crate::{imp, TokenTree};
|
|
+ use std::fmt::{self, Debug};
|
|
use std::marker;
|
|
use std::rc::Rc;
|
|
|
|
pub use crate::TokenStream;
|
|
- use crate::{imp, TokenTree};
|
|
|
|
/// An iterator over `TokenStream`'s `TokenTree`s.
|
|
///
|
|
/// The iteration is "shallow", e.g. the iterator doesn't recurse into
|
|
/// delimited groups, and returns whole groups as token trees.
|
|
#[derive(Clone)]
|
|
pub struct IntoIter {
|
|
inner: imp::TokenTreeIter,
|
|
@@ -1174,19 +1201,19 @@ pub mod token_stream {
|
|
impl Iterator for IntoIter {
|
|
type Item = TokenTree;
|
|
|
|
fn next(&mut self) -> Option<TokenTree> {
|
|
self.inner.next()
|
|
}
|
|
}
|
|
|
|
- impl fmt::Debug for IntoIter {
|
|
+ impl Debug for IntoIter {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
- self.inner.fmt(f)
|
|
+ Debug::fmt(&self.inner, f)
|
|
}
|
|
}
|
|
|
|
impl IntoIterator for TokenStream {
|
|
type Item = TokenTree;
|
|
type IntoIter = IntoIter;
|
|
|
|
fn into_iter(self) -> IntoIter {
|
|
diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/proc-macro2/src/parse.rs
|
|
@@ -0,0 +1,791 @@
|
|
+use crate::fallback::{
|
|
+ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
|
|
+};
|
|
+use crate::{Delimiter, Punct, Spacing, TokenTree};
|
|
+use std::str::{Bytes, CharIndices, Chars};
|
|
+use unicode_xid::UnicodeXID;
|
|
+
|
|
+#[derive(Copy, Clone, Eq, PartialEq)]
|
|
+pub(crate) struct Cursor<'a> {
|
|
+ pub rest: &'a str,
|
|
+ #[cfg(span_locations)]
|
|
+ pub off: u32,
|
|
+}
|
|
+
|
|
+impl<'a> Cursor<'a> {
|
|
+ fn advance(&self, bytes: usize) -> Cursor<'a> {
|
|
+ let (_front, rest) = self.rest.split_at(bytes);
|
|
+ Cursor {
|
|
+ rest,
|
|
+ #[cfg(span_locations)]
|
|
+ off: self.off + _front.chars().count() as u32,
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn starts_with(&self, s: &str) -> bool {
|
|
+ self.rest.starts_with(s)
|
|
+ }
|
|
+
|
|
+ pub(crate) fn is_empty(&self) -> bool {
|
|
+ self.rest.is_empty()
|
|
+ }
|
|
+
|
|
+ fn len(&self) -> usize {
|
|
+ self.rest.len()
|
|
+ }
|
|
+
|
|
+ fn as_bytes(&self) -> &'a [u8] {
|
|
+ self.rest.as_bytes()
|
|
+ }
|
|
+
|
|
+ fn bytes(&self) -> Bytes<'a> {
|
|
+ self.rest.bytes()
|
|
+ }
|
|
+
|
|
+ fn chars(&self) -> Chars<'a> {
|
|
+ self.rest.chars()
|
|
+ }
|
|
+
|
|
+ fn char_indices(&self) -> CharIndices<'a> {
|
|
+ self.rest.char_indices()
|
|
+ }
|
|
+
|
|
+ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
|
|
+ if self.starts_with(tag) {
|
|
+ Ok(self.advance(tag.len()))
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
|
|
+
|
|
+fn skip_whitespace(input: Cursor) -> Cursor {
|
|
+ let mut s = input;
|
|
+
|
|
+ while !s.is_empty() {
|
|
+ let byte = s.as_bytes()[0];
|
|
+ if byte == b'/' {
|
|
+ if s.starts_with("//")
|
|
+ && (!s.starts_with("///") || s.starts_with("////"))
|
|
+ && !s.starts_with("//!")
|
|
+ {
|
|
+ let (cursor, _) = take_until_newline_or_eof(s);
|
|
+ s = cursor;
|
|
+ continue;
|
|
+ } else if s.starts_with("/**/") {
|
|
+ s = s.advance(4);
|
|
+ continue;
|
|
+ } else if s.starts_with("/*")
|
|
+ && (!s.starts_with("/**") || s.starts_with("/***"))
|
|
+ && !s.starts_with("/*!")
|
|
+ {
|
|
+ match block_comment(s) {
|
|
+ Ok((rest, _)) => {
|
|
+ s = rest;
|
|
+ continue;
|
|
+ }
|
|
+ Err(LexError) => return s,
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+ match byte {
|
|
+ b' ' | 0x09..=0x0d => {
|
|
+ s = s.advance(1);
|
|
+ continue;
|
|
+ }
|
|
+ b if b <= 0x7f => {}
|
|
+ _ => {
|
|
+ let ch = s.chars().next().unwrap();
|
|
+ if is_whitespace(ch) {
|
|
+ s = s.advance(ch.len_utf8());
|
|
+ continue;
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+ return s;
|
|
+ }
|
|
+ s
|
|
+}
|
|
+
|
|
+fn block_comment(input: Cursor) -> PResult<&str> {
|
|
+ if !input.starts_with("/*") {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+
|
|
+ let mut depth = 0;
|
|
+ let bytes = input.as_bytes();
|
|
+ let mut i = 0;
|
|
+ let upper = bytes.len() - 1;
|
|
+
|
|
+ while i < upper {
|
|
+ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
|
+ depth += 1;
|
|
+ i += 1; // eat '*'
|
|
+ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
|
+ depth -= 1;
|
|
+ if depth == 0 {
|
|
+ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
|
+ }
|
|
+ i += 1; // eat '/'
|
|
+ }
|
|
+ i += 1;
|
|
+ }
|
|
+
|
|
+ Err(LexError)
|
|
+}
|
|
+
|
|
+fn is_whitespace(ch: char) -> bool {
|
|
+ // Rust treats left-to-right mark and right-to-left mark as whitespace
|
|
+ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
|
+}
|
|
+
|
|
+fn word_break(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ match input.chars().next() {
|
|
+ Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
|
|
+ Some(_) | None => Ok(input),
|
|
+ }
|
|
+}
|
|
+
|
|
+pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
|
|
+ let mut trees = Vec::new();
|
|
+ let mut stack = Vec::new();
|
|
+
|
|
+ loop {
|
|
+ input = skip_whitespace(input);
|
|
+
|
|
+ if let Ok((rest, tt)) = doc_comment(input) {
|
|
+ trees.extend(tt);
|
|
+ input = rest;
|
|
+ continue;
|
|
+ }
|
|
+
|
|
+ #[cfg(span_locations)]
|
|
+ let lo = input.off;
|
|
+
|
|
+ let first = match input.bytes().next() {
|
|
+ Some(first) => first,
|
|
+ None => break,
|
|
+ };
|
|
+
|
|
+ if let Some(open_delimiter) = match first {
|
|
+ b'(' => Some(Delimiter::Parenthesis),
|
|
+ b'[' => Some(Delimiter::Bracket),
|
|
+ b'{' => Some(Delimiter::Brace),
|
|
+ _ => None,
|
|
+ } {
|
|
+ input = input.advance(1);
|
|
+ let frame = (open_delimiter, trees);
|
|
+ #[cfg(span_locations)]
|
|
+ let frame = (lo, frame);
|
|
+ stack.push(frame);
|
|
+ trees = Vec::new();
|
|
+ } else if let Some(close_delimiter) = match first {
|
|
+ b')' => Some(Delimiter::Parenthesis),
|
|
+ b']' => Some(Delimiter::Bracket),
|
|
+ b'}' => Some(Delimiter::Brace),
|
|
+ _ => None,
|
|
+ } {
|
|
+ input = input.advance(1);
|
|
+ let frame = stack.pop().ok_or(LexError)?;
|
|
+ #[cfg(span_locations)]
|
|
+ let (lo, frame) = frame;
|
|
+ let (open_delimiter, outer) = frame;
|
|
+ if open_delimiter != close_delimiter {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
|
|
+ g.set_span(Span {
|
|
+ #[cfg(span_locations)]
|
|
+ lo,
|
|
+ #[cfg(span_locations)]
|
|
+ hi: input.off,
|
|
+ });
|
|
+ trees = outer;
|
|
+ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
|
|
+ } else {
|
|
+ let (rest, mut tt) = leaf_token(input)?;
|
|
+ tt.set_span(crate::Span::_new_stable(Span {
|
|
+ #[cfg(span_locations)]
|
|
+ lo,
|
|
+ #[cfg(span_locations)]
|
|
+ hi: rest.off,
|
|
+ }));
|
|
+ trees.push(tt);
|
|
+ input = rest;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ if stack.is_empty() {
|
|
+ Ok((input, TokenStream { inner: trees }))
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn leaf_token(input: Cursor) -> PResult<TokenTree> {
|
|
+ if let Ok((input, l)) = literal(input) {
|
|
+ // must be parsed before ident
|
|
+ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
|
|
+ } else if let Ok((input, p)) = op(input) {
|
|
+ Ok((input, TokenTree::Punct(p)))
|
|
+ } else if let Ok((input, i)) = ident(input) {
|
|
+ Ok((input, TokenTree::Ident(i)))
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn ident(input: Cursor) -> PResult<crate::Ident> {
|
|
+ let raw = input.starts_with("r#");
|
|
+ let rest = input.advance((raw as usize) << 1);
|
|
+
|
|
+ let (rest, sym) = ident_not_raw(rest)?;
|
|
+
|
|
+ if !raw {
|
|
+ let ident = crate::Ident::new(sym, crate::Span::call_site());
|
|
+ return Ok((rest, ident));
|
|
+ }
|
|
+
|
|
+ if sym == "_" {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+
|
|
+ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
|
|
+ Ok((rest, ident))
|
|
+}
|
|
+
|
|
+fn ident_not_raw(input: Cursor) -> PResult<&str> {
|
|
+ let mut chars = input.char_indices();
|
|
+
|
|
+ match chars.next() {
|
|
+ Some((_, ch)) if is_ident_start(ch) => {}
|
|
+ _ => return Err(LexError),
|
|
+ }
|
|
+
|
|
+ let mut end = input.len();
|
|
+ for (i, ch) in chars {
|
|
+ if !is_ident_continue(ch) {
|
|
+ end = i;
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ Ok((input.advance(end), &input.rest[..end]))
|
|
+}
|
|
+
|
|
+fn literal(input: Cursor) -> PResult<Literal> {
|
|
+ match literal_nocapture(input) {
|
|
+ Ok(a) => {
|
|
+ let end = input.len() - a.len();
|
|
+ Ok((a, Literal::_new(input.rest[..end].to_string())))
|
|
+ }
|
|
+ Err(LexError) => Err(LexError),
|
|
+ }
|
|
+}
|
|
+
|
|
+fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ if let Ok(ok) = string(input) {
|
|
+ Ok(ok)
|
|
+ } else if let Ok(ok) = byte_string(input) {
|
|
+ Ok(ok)
|
|
+ } else if let Ok(ok) = byte(input) {
|
|
+ Ok(ok)
|
|
+ } else if let Ok(ok) = character(input) {
|
|
+ Ok(ok)
|
|
+ } else if let Ok(ok) = float(input) {
|
|
+ Ok(ok)
|
|
+ } else if let Ok(ok) = int(input) {
|
|
+ Ok(ok)
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn literal_suffix(input: Cursor) -> Cursor {
|
|
+ match ident_not_raw(input) {
|
|
+ Ok((input, _)) => input,
|
|
+ Err(LexError) => input,
|
|
+ }
|
|
+}
|
|
+
|
|
+fn string(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ if let Ok(input) = input.parse("\"") {
|
|
+ cooked_string(input)
|
|
+ } else if let Ok(input) = input.parse("r") {
|
|
+ raw_string(input)
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let mut chars = input.char_indices().peekable();
|
|
+
|
|
+ while let Some((i, ch)) = chars.next() {
|
|
+ match ch {
|
|
+ '"' => {
|
|
+ let input = input.advance(i + 1);
|
|
+ return Ok(literal_suffix(input));
|
|
+ }
|
|
+ '\r' => {
|
|
+ if let Some((_, '\n')) = chars.next() {
|
|
+ // ...
|
|
+ } else {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ '\\' => match chars.next() {
|
|
+ Some((_, 'x')) => {
|
|
+ if !backslash_x_char(&mut chars) {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
|
|
+ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
|
|
+ Some((_, 'u')) => {
|
|
+ if !backslash_u(&mut chars) {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ Some((_, '\n')) | Some((_, '\r')) => {
|
|
+ while let Some(&(_, ch)) = chars.peek() {
|
|
+ if ch.is_whitespace() {
|
|
+ chars.next();
|
|
+ } else {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+ _ => break,
|
|
+ },
|
|
+ _ch => {}
|
|
+ }
|
|
+ }
|
|
+ Err(LexError)
|
|
+}
|
|
+
|
|
+fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ if let Ok(input) = input.parse("b\"") {
|
|
+ cooked_byte_string(input)
|
|
+ } else if let Ok(input) = input.parse("br") {
|
|
+ raw_string(input)
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let mut bytes = input.bytes().enumerate();
|
|
+ 'outer: while let Some((offset, b)) = bytes.next() {
|
|
+ match b {
|
|
+ b'"' => {
|
|
+ let input = input.advance(offset + 1);
|
|
+ return Ok(literal_suffix(input));
|
|
+ }
|
|
+ b'\r' => {
|
|
+ if let Some((_, b'\n')) = bytes.next() {
|
|
+ // ...
|
|
+ } else {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ b'\\' => match bytes.next() {
|
|
+ Some((_, b'x')) => {
|
|
+ if !backslash_x_byte(&mut bytes) {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
|
|
+ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
|
|
+ Some((newline, b'\n')) | Some((newline, b'\r')) => {
|
|
+ let rest = input.advance(newline + 1);
|
|
+ for (offset, ch) in rest.char_indices() {
|
|
+ if !ch.is_whitespace() {
|
|
+ input = rest.advance(offset);
|
|
+ bytes = input.bytes().enumerate();
|
|
+ continue 'outer;
|
|
+ }
|
|
+ }
|
|
+ break;
|
|
+ }
|
|
+ _ => break,
|
|
+ },
|
|
+ b if b < 0x80 => {}
|
|
+ _ => break,
|
|
+ }
|
|
+ }
|
|
+ Err(LexError)
|
|
+}
|
|
+
|
|
+fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let mut chars = input.char_indices();
|
|
+ let mut n = 0;
|
|
+ while let Some((i, ch)) = chars.next() {
|
|
+ match ch {
|
|
+ '"' => {
|
|
+ n = i;
|
|
+ break;
|
|
+ }
|
|
+ '#' => {}
|
|
+ _ => return Err(LexError),
|
|
+ }
|
|
+ }
|
|
+ for (i, ch) in chars {
|
|
+ match ch {
|
|
+ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
|
|
+ let rest = input.advance(i + 1 + n);
|
|
+ return Ok(literal_suffix(rest));
|
|
+ }
|
|
+ '\r' => {}
|
|
+ _ => {}
|
|
+ }
|
|
+ }
|
|
+ Err(LexError)
|
|
+}
|
|
+
|
|
+fn byte(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let input = input.parse("b'")?;
|
|
+ let mut bytes = input.bytes().enumerate();
|
|
+ let ok = match bytes.next().map(|(_, b)| b) {
|
|
+ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
|
|
+ Some(b'x') => backslash_x_byte(&mut bytes),
|
|
+ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
|
|
+ | Some(b'"') => true,
|
|
+ _ => false,
|
|
+ },
|
|
+ b => b.is_some(),
|
|
+ };
|
|
+ if !ok {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ let (offset, _) = bytes.next().ok_or(LexError)?;
|
|
+ if !input.chars().as_str().is_char_boundary(offset) {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ let input = input.advance(offset).parse("'")?;
|
|
+ Ok(literal_suffix(input))
|
|
+}
|
|
+
|
|
+fn character(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let input = input.parse("'")?;
|
|
+ let mut chars = input.char_indices();
|
|
+ let ok = match chars.next().map(|(_, ch)| ch) {
|
|
+ Some('\\') => match chars.next().map(|(_, ch)| ch) {
|
|
+ Some('x') => backslash_x_char(&mut chars),
|
|
+ Some('u') => backslash_u(&mut chars),
|
|
+ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
|
|
+ true
|
|
+ }
|
|
+ _ => false,
|
|
+ },
|
|
+ ch => ch.is_some(),
|
|
+ };
|
|
+ if !ok {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ let (idx, _) = chars.next().ok_or(LexError)?;
|
|
+ let input = input.advance(idx).parse("'")?;
|
|
+ Ok(literal_suffix(input))
|
|
+}
|
|
+
|
|
+macro_rules! next_ch {
|
|
+ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
|
|
+ match $chars.next() {
|
|
+ Some((_, ch)) => match ch {
|
|
+ $pat $(| $rest)* => ch,
|
|
+ _ => return false,
|
|
+ },
|
|
+ None => return false,
|
|
+ }
|
|
+ };
|
|
+}
|
|
+
|
|
+fn backslash_x_char<I>(chars: &mut I) -> bool
|
|
+where
|
|
+ I: Iterator<Item = (usize, char)>,
|
|
+{
|
|
+ next_ch!(chars @ '0'..='7');
|
|
+ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
|
+ true
|
|
+}
|
|
+
|
|
+fn backslash_x_byte<I>(chars: &mut I) -> bool
|
|
+where
|
|
+ I: Iterator<Item = (usize, u8)>,
|
|
+{
|
|
+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
|
+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
|
+ true
|
|
+}
|
|
+
|
|
+fn backslash_u<I>(chars: &mut I) -> bool
|
|
+where
|
|
+ I: Iterator<Item = (usize, char)>,
|
|
+{
|
|
+ next_ch!(chars @ '{');
|
|
+ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
|
+ loop {
|
|
+ let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
|
|
+ if c == '}' {
|
|
+ return true;
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+fn float(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let mut rest = float_digits(input)?;
|
|
+ if let Some(ch) = rest.chars().next() {
|
|
+ if is_ident_start(ch) {
|
|
+ rest = ident_not_raw(rest)?.0;
|
|
+ }
|
|
+ }
|
|
+ word_break(rest)
|
|
+}
|
|
+
|
|
+fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let mut chars = input.chars().peekable();
|
|
+ match chars.next() {
|
|
+ Some(ch) if ch >= '0' && ch <= '9' => {}
|
|
+ _ => return Err(LexError),
|
|
+ }
|
|
+
|
|
+ let mut len = 1;
|
|
+ let mut has_dot = false;
|
|
+ let mut has_exp = false;
|
|
+ while let Some(&ch) = chars.peek() {
|
|
+ match ch {
|
|
+ '0'..='9' | '_' => {
|
|
+ chars.next();
|
|
+ len += 1;
|
|
+ }
|
|
+ '.' => {
|
|
+ if has_dot {
|
|
+ break;
|
|
+ }
|
|
+ chars.next();
|
|
+ if chars
|
|
+ .peek()
|
|
+ .map(|&ch| ch == '.' || is_ident_start(ch))
|
|
+ .unwrap_or(false)
|
|
+ {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ len += 1;
|
|
+ has_dot = true;
|
|
+ }
|
|
+ 'e' | 'E' => {
|
|
+ chars.next();
|
|
+ len += 1;
|
|
+ has_exp = true;
|
|
+ break;
|
|
+ }
|
|
+ _ => break,
|
|
+ }
|
|
+ }
|
|
+
|
|
+ let rest = input.advance(len);
|
|
+ if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+
|
|
+ if has_exp {
|
|
+ let mut has_exp_value = false;
|
|
+ while let Some(&ch) = chars.peek() {
|
|
+ match ch {
|
|
+ '+' | '-' => {
|
|
+ if has_exp_value {
|
|
+ break;
|
|
+ }
|
|
+ chars.next();
|
|
+ len += 1;
|
|
+ }
|
|
+ '0'..='9' => {
|
|
+ chars.next();
|
|
+ len += 1;
|
|
+ has_exp_value = true;
|
|
+ }
|
|
+ '_' => {
|
|
+ chars.next();
|
|
+ len += 1;
|
|
+ }
|
|
+ _ => break,
|
|
+ }
|
|
+ }
|
|
+ if !has_exp_value {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ }
|
|
+
|
|
+ Ok(input.advance(len))
|
|
+}
|
|
+
|
|
+fn int(input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let mut rest = digits(input)?;
|
|
+ if let Some(ch) = rest.chars().next() {
|
|
+ if is_ident_start(ch) {
|
|
+ rest = ident_not_raw(rest)?.0;
|
|
+ }
|
|
+ }
|
|
+ word_break(rest)
|
|
+}
|
|
+
|
|
+fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
|
|
+ let base = if input.starts_with("0x") {
|
|
+ input = input.advance(2);
|
|
+ 16
|
|
+ } else if input.starts_with("0o") {
|
|
+ input = input.advance(2);
|
|
+ 8
|
|
+ } else if input.starts_with("0b") {
|
|
+ input = input.advance(2);
|
|
+ 2
|
|
+ } else {
|
|
+ 10
|
|
+ };
|
|
+
|
|
+ let mut len = 0;
|
|
+ let mut empty = true;
|
|
+ for b in input.bytes() {
|
|
+ let digit = match b {
|
|
+ b'0'..=b'9' => (b - b'0') as u64,
|
|
+ b'a'..=b'f' => 10 + (b - b'a') as u64,
|
|
+ b'A'..=b'F' => 10 + (b - b'A') as u64,
|
|
+ b'_' => {
|
|
+ if empty && base == 10 {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ len += 1;
|
|
+ continue;
|
|
+ }
|
|
+ _ => break,
|
|
+ };
|
|
+ if digit >= base {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ len += 1;
|
|
+ empty = false;
|
|
+ }
|
|
+ if empty {
|
|
+ Err(LexError)
|
|
+ } else {
|
|
+ Ok(input.advance(len))
|
|
+ }
|
|
+}
|
|
+
|
|
+fn op(input: Cursor) -> PResult<Punct> {
|
|
+ match op_char(input) {
|
|
+ Ok((rest, '\'')) => {
|
|
+ ident(rest)?;
|
|
+ Ok((rest, Punct::new('\'', Spacing::Joint)))
|
|
+ }
|
|
+ Ok((rest, ch)) => {
|
|
+ let kind = match op_char(rest) {
|
|
+ Ok(_) => Spacing::Joint,
|
|
+ Err(LexError) => Spacing::Alone,
|
|
+ };
|
|
+ Ok((rest, Punct::new(ch, kind)))
|
|
+ }
|
|
+ Err(LexError) => Err(LexError),
|
|
+ }
|
|
+}
|
|
+
|
|
+fn op_char(input: Cursor) -> PResult<char> {
|
|
+ if input.starts_with("//") || input.starts_with("/*") {
|
|
+ // Do not accept `/` of a comment as an op.
|
|
+ return Err(LexError);
|
|
+ }
|
|
+
|
|
+ let mut chars = input.chars();
|
|
+ let first = match chars.next() {
|
|
+ Some(ch) => ch,
|
|
+ None => {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ };
|
|
+ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
|
|
+ if recognized.contains(first) {
|
|
+ Ok((input.advance(first.len_utf8()), first))
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
|
|
+ #[cfg(span_locations)]
|
|
+ let lo = input.off;
|
|
+ let (rest, (comment, inner)) = doc_comment_contents(input)?;
|
|
+ let span = crate::Span::_new_stable(Span {
|
|
+ #[cfg(span_locations)]
|
|
+ lo,
|
|
+ #[cfg(span_locations)]
|
|
+ hi: rest.off,
|
|
+ });
|
|
+
|
|
+ let mut scan_for_bare_cr = comment;
|
|
+ while let Some(cr) = scan_for_bare_cr.find('\r') {
|
|
+ let rest = &scan_for_bare_cr[cr + 1..];
|
|
+ if !rest.starts_with('\n') {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ scan_for_bare_cr = rest;
|
|
+ }
|
|
+
|
|
+ let mut trees = Vec::new();
|
|
+ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
|
|
+ if inner {
|
|
+ trees.push(Punct::new('!', Spacing::Alone).into());
|
|
+ }
|
|
+ let mut stream = vec![
|
|
+ TokenTree::Ident(crate::Ident::new("doc", span)),
|
|
+ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
|
+ TokenTree::Literal(crate::Literal::string(comment)),
|
|
+ ];
|
|
+ for tt in stream.iter_mut() {
|
|
+ tt.set_span(span);
|
|
+ }
|
|
+ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
|
|
+ trees.push(crate::Group::_new_stable(group).into());
|
|
+ for tt in trees.iter_mut() {
|
|
+ tt.set_span(span);
|
|
+ }
|
|
+ Ok((rest, trees))
|
|
+}
|
|
+
|
|
+fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
|
|
+ if input.starts_with("//!") {
|
|
+ let input = input.advance(3);
|
|
+ let (input, s) = take_until_newline_or_eof(input);
|
|
+ Ok((input, (s, true)))
|
|
+ } else if input.starts_with("/*!") {
|
|
+ let (input, s) = block_comment(input)?;
|
|
+ Ok((input, (&s[3..s.len() - 2], true)))
|
|
+ } else if input.starts_with("///") {
|
|
+ let input = input.advance(3);
|
|
+ if input.starts_with("/") {
|
|
+ return Err(LexError);
|
|
+ }
|
|
+ let (input, s) = take_until_newline_or_eof(input);
|
|
+ Ok((input, (s, false)))
|
|
+ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
|
|
+ let (input, s) = block_comment(input)?;
|
|
+ Ok((input, (&s[3..s.len() - 2], false)))
|
|
+ } else {
|
|
+ Err(LexError)
|
|
+ }
|
|
+}
|
|
+
|
|
+fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
|
|
+ let chars = input.char_indices();
|
|
+
|
|
+ for (i, ch) in chars {
|
|
+ if ch == '\n' {
|
|
+ return (input.advance(i), &input.rest[..i]);
|
|
+ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
|
|
+ return (input.advance(i + 1), &input.rest[..i]);
|
|
+ }
|
|
+ }
|
|
+
|
|
+ (input.advance(input.len()), input.rest)
|
|
+}
|
|
diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
|
|
deleted file mode 100644
|
|
--- a/third_party/rust/proc-macro2/src/strnom.rs
|
|
+++ /dev/null
|
|
@@ -1,391 +0,0 @@
|
|
-//! Adapted from [`nom`](https://github.com/Geal/nom).
|
|
-
|
|
-use crate::fallback::LexError;
|
|
-use std::str::{Bytes, CharIndices, Chars};
|
|
-use unicode_xid::UnicodeXID;
|
|
-
|
|
-#[derive(Copy, Clone, Eq, PartialEq)]
|
|
-pub struct Cursor<'a> {
|
|
- pub rest: &'a str,
|
|
- #[cfg(span_locations)]
|
|
- pub off: u32,
|
|
-}
|
|
-
|
|
-impl<'a> Cursor<'a> {
|
|
- #[cfg(not(span_locations))]
|
|
- pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
|
- Cursor {
|
|
- rest: &self.rest[amt..],
|
|
- }
|
|
- }
|
|
- #[cfg(span_locations)]
|
|
- pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
|
- Cursor {
|
|
- rest: &self.rest[amt..],
|
|
- off: self.off + (amt as u32),
|
|
- }
|
|
- }
|
|
-
|
|
- pub fn find(&self, p: char) -> Option<usize> {
|
|
- self.rest.find(p)
|
|
- }
|
|
-
|
|
- pub fn starts_with(&self, s: &str) -> bool {
|
|
- self.rest.starts_with(s)
|
|
- }
|
|
-
|
|
- pub fn is_empty(&self) -> bool {
|
|
- self.rest.is_empty()
|
|
- }
|
|
-
|
|
- pub fn len(&self) -> usize {
|
|
- self.rest.len()
|
|
- }
|
|
-
|
|
- pub fn as_bytes(&self) -> &'a [u8] {
|
|
- self.rest.as_bytes()
|
|
- }
|
|
-
|
|
- pub fn bytes(&self) -> Bytes<'a> {
|
|
- self.rest.bytes()
|
|
- }
|
|
-
|
|
- pub fn chars(&self) -> Chars<'a> {
|
|
- self.rest.chars()
|
|
- }
|
|
-
|
|
- pub fn char_indices(&self) -> CharIndices<'a> {
|
|
- self.rest.char_indices()
|
|
- }
|
|
-}
|
|
-
|
|
-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
|
|
-
|
|
-pub fn whitespace(input: Cursor) -> PResult<()> {
|
|
- if input.is_empty() {
|
|
- return Err(LexError);
|
|
- }
|
|
-
|
|
- let bytes = input.as_bytes();
|
|
- let mut i = 0;
|
|
- while i < bytes.len() {
|
|
- let s = input.advance(i);
|
|
- if bytes[i] == b'/' {
|
|
- if s.starts_with("//")
|
|
- && (!s.starts_with("///") || s.starts_with("////"))
|
|
- && !s.starts_with("//!")
|
|
- {
|
|
- if let Some(len) = s.find('\n') {
|
|
- i += len + 1;
|
|
- continue;
|
|
- }
|
|
- break;
|
|
- } else if s.starts_with("/**/") {
|
|
- i += 4;
|
|
- continue;
|
|
- } else if s.starts_with("/*")
|
|
- && (!s.starts_with("/**") || s.starts_with("/***"))
|
|
- && !s.starts_with("/*!")
|
|
- {
|
|
- let (_, com) = block_comment(s)?;
|
|
- i += com.len();
|
|
- continue;
|
|
- }
|
|
- }
|
|
- match bytes[i] {
|
|
- b' ' | 0x09..=0x0d => {
|
|
- i += 1;
|
|
- continue;
|
|
- }
|
|
- b if b <= 0x7f => {}
|
|
- _ => {
|
|
- let ch = s.chars().next().unwrap();
|
|
- if is_whitespace(ch) {
|
|
- i += ch.len_utf8();
|
|
- continue;
|
|
- }
|
|
- }
|
|
- }
|
|
- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
|
|
- }
|
|
- Ok((input.advance(input.len()), ()))
|
|
-}
|
|
-
|
|
-pub fn block_comment(input: Cursor) -> PResult<&str> {
|
|
- if !input.starts_with("/*") {
|
|
- return Err(LexError);
|
|
- }
|
|
-
|
|
- let mut depth = 0;
|
|
- let bytes = input.as_bytes();
|
|
- let mut i = 0;
|
|
- let upper = bytes.len() - 1;
|
|
- while i < upper {
|
|
- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
|
- depth += 1;
|
|
- i += 1; // eat '*'
|
|
- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
|
- depth -= 1;
|
|
- if depth == 0 {
|
|
- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
|
- }
|
|
- i += 1; // eat '/'
|
|
- }
|
|
- i += 1;
|
|
- }
|
|
- Err(LexError)
|
|
-}
|
|
-
|
|
-pub fn skip_whitespace(input: Cursor) -> Cursor {
|
|
- match whitespace(input) {
|
|
- Ok((rest, _)) => rest,
|
|
- Err(LexError) => input,
|
|
- }
|
|
-}
|
|
-
|
|
-fn is_whitespace(ch: char) -> bool {
|
|
- // Rust treats left-to-right mark and right-to-left mark as whitespace
|
|
- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
|
-}
|
|
-
|
|
-pub fn word_break(input: Cursor) -> PResult<()> {
|
|
- match input.chars().next() {
|
|
- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
|
|
- Some(_) | None => Ok((input, ())),
|
|
- }
|
|
-}
|
|
-
|
|
-macro_rules! named {
|
|
- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
|
|
- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
|
|
- $submac!(i, $($args)*)
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! alt {
|
|
- ($i:expr, $e:ident | $($rest:tt)*) => {
|
|
- alt!($i, call!($e) | $($rest)*)
|
|
- };
|
|
-
|
|
- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
|
|
- match $subrule!($i, $($args)*) {
|
|
- res @ Ok(_) => res,
|
|
- _ => alt!($i, $($rest)*)
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
|
|
- match $subrule!($i, $($args)*) {
|
|
- Ok((i, o)) => Ok((i, $gen(o))),
|
|
- Err(LexError) => alt!($i, $($rest)*)
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
|
|
- alt!($i, call!($e) => { $gen } | $($rest)*)
|
|
- };
|
|
-
|
|
- ($i:expr, $e:ident => { $gen:expr }) => {
|
|
- alt!($i, call!($e) => { $gen })
|
|
- };
|
|
-
|
|
- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
|
|
- match $subrule!($i, $($args)*) {
|
|
- Ok((i, o)) => Ok((i, $gen(o))),
|
|
- Err(LexError) => Err(LexError),
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, $e:ident) => {
|
|
- alt!($i, call!($e))
|
|
- };
|
|
-
|
|
- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
|
|
- $subrule!($i, $($args)*)
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! do_parse {
|
|
- ($i:expr, ( $($rest:expr),* )) => {
|
|
- Ok(($i, ( $($rest),* )))
|
|
- };
|
|
-
|
|
- ($i:expr, $e:ident >> $($rest:tt)*) => {
|
|
- do_parse!($i, call!($e) >> $($rest)*)
|
|
- };
|
|
-
|
|
- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i, _)) => do_parse!(i, $($rest)*),
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
|
|
- do_parse!($i, $field: call!($e) >> $($rest)*)
|
|
- };
|
|
-
|
|
- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i, o)) => {
|
|
- let $field = o;
|
|
- do_parse!(i, $($rest)*)
|
|
- },
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! peek {
|
|
- ($i:expr, $submac:ident!( $($args:tt)* )) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Ok((_, o)) => Ok(($i, o)),
|
|
- Err(LexError) => Err(LexError),
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! call {
|
|
- ($i:expr, $fun:expr $(, $args:expr)*) => {
|
|
- $fun($i $(, $args)*)
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! option {
|
|
- ($i:expr, $f:expr) => {
|
|
- match $f($i) {
|
|
- Ok((i, o)) => Ok((i, Some(o))),
|
|
- Err(LexError) => Ok(($i, None)),
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! take_until_newline_or_eof {
|
|
- ($i:expr,) => {{
|
|
- if $i.len() == 0 {
|
|
- Ok(($i, ""))
|
|
- } else {
|
|
- match $i.find('\n') {
|
|
- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
|
|
- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
|
|
- }
|
|
- }
|
|
- }};
|
|
-}
|
|
-
|
|
-macro_rules! tuple {
|
|
- ($i:expr, $($rest:tt)*) => {
|
|
- tuple_parser!($i, (), $($rest)*)
|
|
- };
|
|
-}
|
|
-
|
|
-/// Do not use directly. Use `tuple!`.
|
|
-macro_rules! tuple_parser {
|
|
- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
|
|
- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
|
|
- };
|
|
-
|
|
- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, ($($parsed:tt),*), $e:ident) => {
|
|
- tuple_parser!($i, ($($parsed),*), call!($e))
|
|
- };
|
|
-
|
|
- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
|
|
- $submac!($i, $($args)*)
|
|
- };
|
|
-
|
|
- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, ($($parsed:expr),*)) => {
|
|
- Ok(($i, ($($parsed),*)))
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! not {
|
|
- ($i:expr, $submac:ident!( $($args:tt)* )) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Ok((_, _)) => Err(LexError),
|
|
- Err(LexError) => Ok(($i, ())),
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! tag {
|
|
- ($i:expr, $tag:expr) => {
|
|
- if $i.starts_with($tag) {
|
|
- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
|
|
- } else {
|
|
- Err(LexError)
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! punct {
|
|
- ($i:expr, $punct:expr) => {
|
|
- $crate::strnom::punct($i, $punct)
|
|
- };
|
|
-}
|
|
-
|
|
-/// Do not use directly. Use `punct!`.
|
|
-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
|
|
- let input = skip_whitespace(input);
|
|
- if input.starts_with(token) {
|
|
- Ok((input.advance(token.len()), token))
|
|
- } else {
|
|
- Err(LexError)
|
|
- }
|
|
-}
|
|
-
|
|
-macro_rules! preceded {
|
|
- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
|
|
- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
|
|
- Ok((remaining, (_, o))) => Ok((remaining, o)),
|
|
- Err(LexError) => Err(LexError),
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
|
|
- preceded!($i, $submac!($($args)*), call!($g))
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! delimited {
|
|
- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
|
|
- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i1, (_, o, _))) => Ok((i1, o))
|
|
- }
|
|
- };
|
|
-}
|
|
-
|
|
-macro_rules! map {
|
|
- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
|
|
- match $submac!($i, $($args)*) {
|
|
- Err(LexError) => Err(LexError),
|
|
- Ok((i, o)) => Ok((i, call!(o, $g)))
|
|
- }
|
|
- };
|
|
-
|
|
- ($i:expr, $f:expr, $g:expr) => {
|
|
- map!($i, call!($f), $g)
|
|
- };
|
|
-}
|
|
diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
|
|
--- a/third_party/rust/proc-macro2/src/wrapper.rs
|
|
+++ b/third_party/rust/proc-macro2/src/wrapper.rs
|
|
@@ -1,96 +1,39 @@
|
|
-use std::fmt;
|
|
-use std::iter;
|
|
+use crate::detection::inside_proc_macro;
|
|
+use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
|
|
+use std::fmt::{self, Debug, Display};
|
|
+use std::iter::FromIterator;
|
|
use std::ops::RangeBounds;
|
|
-use std::panic::{self, PanicInfo};
|
|
+use std::panic;
|
|
#[cfg(super_unstable)]
|
|
use std::path::PathBuf;
|
|
use std::str::FromStr;
|
|
|
|
-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
|
|
-
|
|
#[derive(Clone)]
|
|
-pub enum TokenStream {
|
|
+pub(crate) enum TokenStream {
|
|
Compiler(DeferredTokenStream),
|
|
Fallback(fallback::TokenStream),
|
|
}
|
|
|
|
// Work around https://github.com/rust-lang/rust/issues/65080.
|
|
// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
|
|
// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
|
|
// late as possible to batch together consecutive uses of the Extend impl.
|
|
#[derive(Clone)]
|
|
-pub struct DeferredTokenStream {
|
|
+pub(crate) struct DeferredTokenStream {
|
|
stream: proc_macro::TokenStream,
|
|
extra: Vec<proc_macro::TokenTree>,
|
|
}
|
|
|
|
-pub enum LexError {
|
|
+pub(crate) enum LexError {
|
|
Compiler(proc_macro::LexError),
|
|
Fallback(fallback::LexError),
|
|
}
|
|
|
|
-fn nightly_works() -> bool {
|
|
- use std::sync::atomic::*;
|
|
- use std::sync::Once;
|
|
-
|
|
- static WORKS: AtomicUsize = AtomicUsize::new(0);
|
|
- static INIT: Once = Once::new();
|
|
-
|
|
- match WORKS.load(Ordering::SeqCst) {
|
|
- 1 => return false,
|
|
- 2 => return true,
|
|
- _ => {}
|
|
- }
|
|
-
|
|
- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
|
- // then use catch_unwind to determine whether the compiler's proc_macro is
|
|
- // working. When proc-macro2 is used from outside of a procedural macro all
|
|
- // of the proc_macro crate's APIs currently panic.
|
|
- //
|
|
- // The Once is to prevent the possibility of this ordering:
|
|
- //
|
|
- // thread 1 calls take_hook, gets the user's original hook
|
|
- // thread 1 calls set_hook with the null hook
|
|
- // thread 2 calls take_hook, thinks null hook is the original hook
|
|
- // thread 2 calls set_hook with the null hook
|
|
- // thread 1 calls set_hook with the actual original hook
|
|
- // thread 2 calls set_hook with what it thinks is the original hook
|
|
- //
|
|
- // in which the user's hook has been lost.
|
|
- //
|
|
- // There is still a race condition where a panic in a different thread can
|
|
- // happen during the interval that the user's original panic hook is
|
|
- // unregistered such that their hook is incorrectly not called. This is
|
|
- // sufficiently unlikely and less bad than printing panic messages to stderr
|
|
- // on correct use of this crate. Maybe there is a libstd feature request
|
|
- // here. For now, if a user needs to guarantee that this failure mode does
|
|
- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
|
- // the main thread before launching any other threads.
|
|
- INIT.call_once(|| {
|
|
- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
|
-
|
|
- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
|
- let sanity_check = &*null_hook as *const PanicHook;
|
|
- let original_hook = panic::take_hook();
|
|
- panic::set_hook(null_hook);
|
|
-
|
|
- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
|
|
- WORKS.store(works as usize + 1, Ordering::SeqCst);
|
|
-
|
|
- let hopefully_null_hook = panic::take_hook();
|
|
- panic::set_hook(original_hook);
|
|
- if sanity_check != &*hopefully_null_hook {
|
|
- panic!("observed race condition in proc_macro2::nightly_works");
|
|
- }
|
|
- });
|
|
- nightly_works()
|
|
-}
|
|
-
|
|
fn mismatch() -> ! {
|
|
panic!("stable/nightly mismatch")
|
|
}
|
|
|
|
impl DeferredTokenStream {
|
|
fn new(stream: proc_macro::TokenStream) -> Self {
|
|
DeferredTokenStream {
|
|
stream,
|
|
@@ -98,28 +41,33 @@ impl DeferredTokenStream {
|
|
}
|
|
}
|
|
|
|
fn is_empty(&self) -> bool {
|
|
self.stream.is_empty() && self.extra.is_empty()
|
|
}
|
|
|
|
fn evaluate_now(&mut self) {
|
|
- self.stream.extend(self.extra.drain(..));
|
|
+ // If-check provides a fast short circuit for the common case of `extra`
|
|
+ // being empty, which saves a round trip over the proc macro bridge.
|
|
+ // Improves macro expansion time in winrt by 6% in debug mode.
|
|
+ if !self.extra.is_empty() {
|
|
+ self.stream.extend(self.extra.drain(..));
|
|
+ }
|
|
}
|
|
|
|
fn into_token_stream(mut self) -> proc_macro::TokenStream {
|
|
self.evaluate_now();
|
|
self.stream
|
|
}
|
|
}
|
|
|
|
impl TokenStream {
|
|
pub fn new() -> TokenStream {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
|
|
} else {
|
|
TokenStream::Fallback(fallback::TokenStream::new())
|
|
}
|
|
}
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
match self {
|
|
@@ -142,31 +90,37 @@ impl TokenStream {
|
|
}
|
|
}
|
|
}
|
|
|
|
impl FromStr for TokenStream {
|
|
type Err = LexError;
|
|
|
|
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Ok(TokenStream::Compiler(DeferredTokenStream::new(
|
|
- src.parse()?,
|
|
+ proc_macro_parse(src)?,
|
|
)))
|
|
} else {
|
|
Ok(TokenStream::Fallback(src.parse()?))
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for TokenStream {
|
|
+// Work around https://github.com/rust-lang/rust/issues/58736.
|
|
+fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
|
|
+ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
|
|
+ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
|
|
+}
|
|
+
|
|
+impl Display for TokenStream {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
|
|
- TokenStream::Fallback(tts) => tts.fmt(f),
|
|
+ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
|
|
+ TokenStream::Fallback(tts) => Display::fmt(tts, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl From<proc_macro::TokenStream> for TokenStream {
|
|
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
|
TokenStream::Compiler(DeferredTokenStream::new(inner))
|
|
}
|
|
@@ -182,17 +136,17 @@ impl From<TokenStream> for proc_macro::T
|
|
}
|
|
|
|
impl From<fallback::TokenStream> for TokenStream {
|
|
fn from(inner: fallback::TokenStream) -> TokenStream {
|
|
TokenStream::Fallback(inner)
|
|
}
|
|
}
|
|
|
|
-// Assumes nightly_works().
|
|
+// Assumes inside_proc_macro().
|
|
fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
|
|
match token {
|
|
TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
|
|
TokenTree::Punct(tt) => {
|
|
let spacing = match tt.spacing() {
|
|
Spacing::Joint => proc_macro::Spacing::Joint,
|
|
Spacing::Alone => proc_macro::Spacing::Alone,
|
|
};
|
|
@@ -202,37 +156,37 @@ fn into_compiler_token(token: TokenTree)
|
|
}
|
|
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
|
|
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
|
|
}
|
|
}
|
|
|
|
impl From<TokenTree> for TokenStream {
|
|
fn from(token: TokenTree) -> TokenStream {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
|
|
} else {
|
|
TokenStream::Fallback(token.into())
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl iter::FromIterator<TokenTree> for TokenStream {
|
|
+impl FromIterator<TokenTree> for TokenStream {
|
|
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
TokenStream::Compiler(DeferredTokenStream::new(
|
|
trees.into_iter().map(into_compiler_token).collect(),
|
|
))
|
|
} else {
|
|
TokenStream::Fallback(trees.into_iter().collect())
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl iter::FromIterator<TokenStream> for TokenStream {
|
|
+impl FromIterator<TokenStream> for TokenStream {
|
|
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
|
let mut streams = streams.into_iter();
|
|
match streams.next() {
|
|
Some(TokenStream::Compiler(mut first)) => {
|
|
first.evaluate_now();
|
|
first.stream.extend(streams.map(|s| match s {
|
|
TokenStream::Compiler(s) => s.into_token_stream(),
|
|
TokenStream::Fallback(_) => mismatch(),
|
|
@@ -247,75 +201,76 @@ impl iter::FromIterator<TokenStream> for
|
|
TokenStream::Fallback(first)
|
|
}
|
|
None => TokenStream::new(),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Extend<TokenTree> for TokenStream {
|
|
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
|
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
|
|
match self {
|
|
TokenStream::Compiler(tts) => {
|
|
// Here is the reason for DeferredTokenStream.
|
|
- tts.extra
|
|
- .extend(streams.into_iter().map(into_compiler_token));
|
|
+ for token in stream {
|
|
+ tts.extra.push(into_compiler_token(token));
|
|
+ }
|
|
}
|
|
- TokenStream::Fallback(tts) => tts.extend(streams),
|
|
+ TokenStream::Fallback(tts) => tts.extend(stream),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Extend<TokenStream> for TokenStream {
|
|
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
|
match self {
|
|
TokenStream::Compiler(tts) => {
|
|
tts.evaluate_now();
|
|
tts.stream
|
|
- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
|
|
+ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
|
|
}
|
|
TokenStream::Fallback(tts) => {
|
|
- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
|
|
+ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for TokenStream {
|
|
+impl Debug for TokenStream {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
|
|
- TokenStream::Fallback(tts) => tts.fmt(f),
|
|
+ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
|
|
+ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl From<proc_macro::LexError> for LexError {
|
|
fn from(e: proc_macro::LexError) -> LexError {
|
|
LexError::Compiler(e)
|
|
}
|
|
}
|
|
|
|
impl From<fallback::LexError> for LexError {
|
|
fn from(e: fallback::LexError) -> LexError {
|
|
LexError::Fallback(e)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for LexError {
|
|
+impl Debug for LexError {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- LexError::Compiler(e) => e.fmt(f),
|
|
- LexError::Fallback(e) => e.fmt(f),
|
|
+ LexError::Compiler(e) => Debug::fmt(e, f),
|
|
+ LexError::Fallback(e) => Debug::fmt(e, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub enum TokenTreeIter {
|
|
+pub(crate) enum TokenTreeIter {
|
|
Compiler(proc_macro::token_stream::IntoIter),
|
|
Fallback(fallback::TokenTreeIter),
|
|
}
|
|
|
|
impl IntoIterator for TokenStream {
|
|
type Item = TokenTree;
|
|
type IntoIter = TokenTreeIter;
|
|
|
|
@@ -356,25 +311,25 @@ impl Iterator for TokenTreeIter {
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
match self {
|
|
TokenTreeIter::Compiler(tts) => tts.size_hint(),
|
|
TokenTreeIter::Fallback(tts) => tts.size_hint(),
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for TokenTreeIter {
|
|
+impl Debug for TokenTreeIter {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
f.debug_struct("TokenTreeIter").finish()
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, PartialEq, Eq)]
|
|
#[cfg(super_unstable)]
|
|
-pub enum SourceFile {
|
|
+pub(crate) enum SourceFile {
|
|
Compiler(proc_macro::SourceFile),
|
|
Fallback(fallback::SourceFile),
|
|
}
|
|
|
|
#[cfg(super_unstable)]
|
|
impl SourceFile {
|
|
fn nightly(sf: proc_macro::SourceFile) -> Self {
|
|
SourceFile::Compiler(sf)
|
|
@@ -392,68 +347,87 @@ impl SourceFile {
|
|
match self {
|
|
SourceFile::Compiler(a) => a.is_real(),
|
|
SourceFile::Fallback(a) => a.is_real(),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(super_unstable)]
|
|
-impl fmt::Debug for SourceFile {
|
|
+impl Debug for SourceFile {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- SourceFile::Compiler(a) => a.fmt(f),
|
|
- SourceFile::Fallback(a) => a.fmt(f),
|
|
+ SourceFile::Compiler(a) => Debug::fmt(a, f),
|
|
+ SourceFile::Fallback(a) => Debug::fmt(a, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(any(super_unstable, feature = "span-locations"))]
|
|
-pub struct LineColumn {
|
|
+pub(crate) struct LineColumn {
|
|
pub line: usize,
|
|
pub column: usize,
|
|
}
|
|
|
|
#[derive(Copy, Clone)]
|
|
-pub enum Span {
|
|
+pub(crate) enum Span {
|
|
Compiler(proc_macro::Span),
|
|
Fallback(fallback::Span),
|
|
}
|
|
|
|
impl Span {
|
|
pub fn call_site() -> Span {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Span::Compiler(proc_macro::Span::call_site())
|
|
} else {
|
|
Span::Fallback(fallback::Span::call_site())
|
|
}
|
|
}
|
|
|
|
+ #[cfg(hygiene)]
|
|
+ pub fn mixed_site() -> Span {
|
|
+ if inside_proc_macro() {
|
|
+ Span::Compiler(proc_macro::Span::mixed_site())
|
|
+ } else {
|
|
+ Span::Fallback(fallback::Span::mixed_site())
|
|
+ }
|
|
+ }
|
|
+
|
|
#[cfg(super_unstable)]
|
|
pub fn def_site() -> Span {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Span::Compiler(proc_macro::Span::def_site())
|
|
} else {
|
|
Span::Fallback(fallback::Span::def_site())
|
|
}
|
|
}
|
|
|
|
- #[cfg(super_unstable)]
|
|
pub fn resolved_at(&self, other: Span) -> Span {
|
|
match (self, other) {
|
|
+ #[cfg(hygiene)]
|
|
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
|
|
+
|
|
+ // Name resolution affects semantics, but location is only cosmetic
|
|
+ #[cfg(not(hygiene))]
|
|
+ (Span::Compiler(_), Span::Compiler(_)) => other,
|
|
+
|
|
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
|
|
_ => mismatch(),
|
|
}
|
|
}
|
|
|
|
- #[cfg(super_unstable)]
|
|
pub fn located_at(&self, other: Span) -> Span {
|
|
match (self, other) {
|
|
+ #[cfg(hygiene)]
|
|
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
|
|
+
|
|
+ // Name resolution affects semantics, but location is only cosmetic
|
|
+ #[cfg(not(hygiene))]
|
|
+ (Span::Compiler(_), Span::Compiler(_)) => *self,
|
|
+
|
|
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
|
|
_ => mismatch(),
|
|
}
|
|
}
|
|
|
|
pub fn unwrap(self) -> proc_macro::Span {
|
|
match self {
|
|
Span::Compiler(s) => s,
|
|
@@ -537,36 +511,36 @@ impl From<proc_macro::Span> for crate::S
|
|
}
|
|
|
|
impl From<fallback::Span> for Span {
|
|
fn from(inner: fallback::Span) -> Span {
|
|
Span::Fallback(inner)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Span {
|
|
+impl Debug for Span {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Span::Compiler(s) => s.fmt(f),
|
|
- Span::Fallback(s) => s.fmt(f),
|
|
+ Span::Compiler(s) => Debug::fmt(s, f),
|
|
+ Span::Fallback(s) => Debug::fmt(s, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
|
+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
|
match span {
|
|
Span::Compiler(s) => {
|
|
debug.field("span", &s);
|
|
}
|
|
Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub enum Group {
|
|
+pub(crate) enum Group {
|
|
Compiler(proc_macro::Group),
|
|
Fallback(fallback::Group),
|
|
}
|
|
|
|
impl Group {
|
|
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
|
match stream {
|
|
TokenStream::Compiler(tts) => {
|
|
@@ -647,36 +621,36 @@ impl Group {
|
|
}
|
|
|
|
impl From<fallback::Group> for Group {
|
|
fn from(g: fallback::Group) -> Self {
|
|
Group::Fallback(g)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Group {
|
|
+impl Display for Group {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Group::Compiler(group) => group.fmt(formatter),
|
|
- Group::Fallback(group) => group.fmt(formatter),
|
|
+ Group::Compiler(group) => Display::fmt(group, formatter),
|
|
+ Group::Fallback(group) => Display::fmt(group, formatter),
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Group {
|
|
+impl Debug for Group {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Group::Compiler(group) => group.fmt(formatter),
|
|
- Group::Fallback(group) => group.fmt(formatter),
|
|
+ Group::Compiler(group) => Debug::fmt(group, formatter),
|
|
+ Group::Fallback(group) => Debug::fmt(group, formatter),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub enum Ident {
|
|
+pub(crate) enum Ident {
|
|
Compiler(proc_macro::Ident),
|
|
Fallback(fallback::Ident),
|
|
}
|
|
|
|
impl Ident {
|
|
pub fn new(string: &str, span: Span) -> Ident {
|
|
match span {
|
|
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
|
|
@@ -742,56 +716,56 @@ where
|
|
let other = other.as_ref();
|
|
match self {
|
|
Ident::Compiler(t) => t.to_string() == other,
|
|
Ident::Fallback(t) => t == other,
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Ident {
|
|
+impl Display for Ident {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Ident::Compiler(t) => t.fmt(f),
|
|
- Ident::Fallback(t) => t.fmt(f),
|
|
+ Ident::Compiler(t) => Display::fmt(t, f),
|
|
+ Ident::Fallback(t) => Display::fmt(t, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Ident {
|
|
+impl Debug for Ident {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Ident::Compiler(t) => t.fmt(f),
|
|
- Ident::Fallback(t) => t.fmt(f),
|
|
+ Ident::Compiler(t) => Debug::fmt(t, f),
|
|
+ Ident::Fallback(t) => Debug::fmt(t, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
-pub enum Literal {
|
|
+pub(crate) enum Literal {
|
|
Compiler(proc_macro::Literal),
|
|
Fallback(fallback::Literal),
|
|
}
|
|
|
|
macro_rules! suffixed_numbers {
|
|
($($name:ident => $kind:ident,)*) => ($(
|
|
pub fn $name(n: $kind) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::$name(n))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::$name(n))
|
|
}
|
|
}
|
|
)*)
|
|
}
|
|
|
|
macro_rules! unsuffixed_integers {
|
|
($($name:ident => $kind:ident,)*) => ($(
|
|
pub fn $name(n: $kind) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::$name(n))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::$name(n))
|
|
}
|
|
}
|
|
)*)
|
|
}
|
|
|
|
@@ -825,49 +799,49 @@ impl Literal {
|
|
i16_unsuffixed => i16,
|
|
i32_unsuffixed => i32,
|
|
i64_unsuffixed => i64,
|
|
i128_unsuffixed => i128,
|
|
isize_unsuffixed => isize,
|
|
}
|
|
|
|
pub fn f32_unsuffixed(f: f32) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
|
|
}
|
|
}
|
|
|
|
pub fn f64_unsuffixed(f: f64) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
|
|
}
|
|
}
|
|
|
|
pub fn string(t: &str) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::string(t))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::string(t))
|
|
}
|
|
}
|
|
|
|
pub fn character(t: char) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::character(t))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::character(t))
|
|
}
|
|
}
|
|
|
|
pub fn byte_string(bytes: &[u8]) -> Literal {
|
|
- if nightly_works() {
|
|
+ if inside_proc_macro() {
|
|
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
|
|
} else {
|
|
Literal::Fallback(fallback::Literal::byte_string(bytes))
|
|
}
|
|
}
|
|
|
|
pub fn span(&self) -> Span {
|
|
match self {
|
|
@@ -903,25 +877,25 @@ impl Literal {
|
|
}
|
|
|
|
impl From<fallback::Literal> for Literal {
|
|
fn from(s: fallback::Literal) -> Literal {
|
|
Literal::Fallback(s)
|
|
}
|
|
}
|
|
|
|
-impl fmt::Display for Literal {
|
|
+impl Display for Literal {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Literal::Compiler(t) => t.fmt(f),
|
|
- Literal::Fallback(t) => t.fmt(f),
|
|
+ Literal::Compiler(t) => Display::fmt(t, f),
|
|
+ Literal::Fallback(t) => Display::fmt(t, f),
|
|
}
|
|
}
|
|
}
|
|
|
|
-impl fmt::Debug for Literal {
|
|
+impl Debug for Literal {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
match self {
|
|
- Literal::Compiler(t) => t.fmt(f),
|
|
- Literal::Fallback(t) => t.fmt(f),
|
|
+ Literal::Compiler(t) => Debug::fmt(t, f),
|
|
+ Literal::Fallback(t) => Debug::fmt(t, f),
|
|
}
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/proc-macro2/tests/comments.rs
|
|
@@ -0,0 +1,103 @@
|
|
+use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
|
|
+
|
|
+// #[doc = "..."] -> "..."
|
|
+fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
|
|
+ lit_of_doc_comment(tokens, false)
|
|
+}
|
|
+
|
|
+// #![doc = "..."] -> "..."
|
|
+fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
|
|
+ lit_of_doc_comment(tokens, true)
|
|
+}
|
|
+
|
|
+fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
|
|
+ let mut iter = tokens.clone().into_iter();
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Punct(punct) => {
|
|
+ assert_eq!(punct.as_char(), '#');
|
|
+ assert_eq!(punct.spacing(), Spacing::Alone);
|
|
+ }
|
|
+ _ => panic!("wrong token {:?}", tokens),
|
|
+ }
|
|
+ if inner {
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Punct(punct) => {
|
|
+ assert_eq!(punct.as_char(), '!');
|
|
+ assert_eq!(punct.spacing(), Spacing::Alone);
|
|
+ }
|
|
+ _ => panic!("wrong token {:?}", tokens),
|
|
+ }
|
|
+ }
|
|
+ iter = match iter.next().unwrap() {
|
|
+ TokenTree::Group(group) => {
|
|
+ assert_eq!(group.delimiter(), Delimiter::Bracket);
|
|
+ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
|
|
+ group.stream().into_iter()
|
|
+ }
|
|
+ _ => panic!("wrong token {:?}", tokens),
|
|
+ };
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
|
|
+ _ => panic!("wrong token {:?}", tokens),
|
|
+ }
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Punct(punct) => {
|
|
+ assert_eq!(punct.as_char(), '=');
|
|
+ assert_eq!(punct.spacing(), Spacing::Alone);
|
|
+ }
|
|
+ _ => panic!("wrong token {:?}", tokens),
|
|
+ }
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Literal(literal) => {
|
|
+ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
|
|
+ literal
|
|
+ }
|
|
+ _ => panic!("wrong token {:?}", tokens),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn closed_immediately() {
|
|
+ let stream = "/**/".parse::<TokenStream>().unwrap();
|
|
+ let tokens = stream.into_iter().collect::<Vec<_>>();
|
|
+ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn incomplete() {
|
|
+ assert!("/*/".parse::<TokenStream>().is_err());
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn lit() {
|
|
+ let stream = "/// doc".parse::<TokenStream>().unwrap();
|
|
+ let lit = lit_of_outer_doc_comment(stream);
|
|
+ assert_eq!(lit.to_string(), "\" doc\"");
|
|
+
|
|
+ let stream = "//! doc".parse::<TokenStream>().unwrap();
|
|
+ let lit = lit_of_inner_doc_comment(stream);
|
|
+ assert_eq!(lit.to_string(), "\" doc\"");
|
|
+
|
|
+ let stream = "/** doc */".parse::<TokenStream>().unwrap();
|
|
+ let lit = lit_of_outer_doc_comment(stream);
|
|
+ assert_eq!(lit.to_string(), "\" doc \"");
|
|
+
|
|
+ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
|
|
+ let lit = lit_of_inner_doc_comment(stream);
|
|
+ assert_eq!(lit.to_string(), "\" doc \"");
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn carriage_return() {
|
|
+ let stream = "///\r\n".parse::<TokenStream>().unwrap();
|
|
+ let lit = lit_of_outer_doc_comment(stream);
|
|
+ assert_eq!(lit.to_string(), "\"\"");
|
|
+
|
|
+ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
|
|
+ let lit = lit_of_outer_doc_comment(stream);
|
|
+ assert_eq!(lit.to_string(), "\"\\r\\n\"");
|
|
+
|
|
+ "///\r".parse::<TokenStream>().unwrap_err();
|
|
+ "///\r \n".parse::<TokenStream>().unwrap_err();
|
|
+ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
|
|
+}
|
|
diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
|
|
--- a/third_party/rust/proc-macro2/tests/test.rs
|
|
+++ b/third_party/rust/proc-macro2/tests/test.rs
|
|
@@ -1,12 +1,11 @@
|
|
+use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
|
use std::str::{self, FromStr};
|
|
|
|
-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
|
-
|
|
#[test]
|
|
fn idents() {
|
|
assert_eq!(
|
|
Ident::new("String", Span::call_site()).to_string(),
|
|
"String"
|
|
);
|
|
assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
|
|
assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
|
|
@@ -105,16 +104,43 @@ fn literal_suffix() {
|
|
assert_eq!(token_count("999u256"), 1);
|
|
assert_eq!(token_count("999r#u256"), 3);
|
|
assert_eq!(token_count("1."), 1);
|
|
assert_eq!(token_count("1.f32"), 3);
|
|
assert_eq!(token_count("1.0_0"), 1);
|
|
assert_eq!(token_count("1._0"), 3);
|
|
assert_eq!(token_count("1._m"), 3);
|
|
assert_eq!(token_count("\"\"s"), 1);
|
|
+ assert_eq!(token_count("r\"\"r"), 1);
|
|
+ assert_eq!(token_count("b\"\"b"), 1);
|
|
+ assert_eq!(token_count("br\"\"br"), 1);
|
|
+ assert_eq!(token_count("r#\"\"#r"), 1);
|
|
+ assert_eq!(token_count("'c'c"), 1);
|
|
+ assert_eq!(token_count("b'b'b"), 1);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn literal_iter_negative() {
|
|
+ let negative_literal = Literal::i32_suffixed(-3);
|
|
+ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
|
|
+ let mut iter = tokens.into_iter();
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Punct(punct) => {
|
|
+ assert_eq!(punct.as_char(), '-');
|
|
+ assert_eq!(punct.spacing(), Spacing::Alone);
|
|
+ }
|
|
+ unexpected => panic!("unexpected token {:?}", unexpected),
|
|
+ }
|
|
+ match iter.next().unwrap() {
|
|
+ TokenTree::Literal(literal) => {
|
|
+ assert_eq!(literal.to_string(), "3i32");
|
|
+ }
|
|
+ unexpected => panic!("unexpected token {:?}", unexpected),
|
|
+ }
|
|
+ assert!(iter.next().is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn roundtrip() {
|
|
fn roundtrip(p: &str) {
|
|
println!("parse: {}", p);
|
|
let s = p.parse::<TokenStream>().unwrap().to_string();
|
|
println!("first: {}", s);
|
|
@@ -161,46 +187,16 @@ fn fail() {
|
|
fail("' static");
|
|
fail("r#1");
|
|
fail("r#_");
|
|
}
|
|
|
|
#[cfg(span_locations)]
|
|
#[test]
|
|
fn span_test() {
|
|
- use proc_macro2::TokenTree;
|
|
-
|
|
- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
|
- let ts = p.parse::<TokenStream>().unwrap();
|
|
- check_spans_internal(ts, &mut lines);
|
|
- }
|
|
-
|
|
- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
|
|
- for i in ts {
|
|
- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
|
- *lines = rest;
|
|
-
|
|
- let start = i.span().start();
|
|
- assert_eq!(start.line, sline, "sline did not match for {}", i);
|
|
- assert_eq!(start.column, scol, "scol did not match for {}", i);
|
|
-
|
|
- let end = i.span().end();
|
|
- assert_eq!(end.line, eline, "eline did not match for {}", i);
|
|
- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
|
-
|
|
- match i {
|
|
- TokenTree::Group(ref g) => {
|
|
- check_spans_internal(g.stream().clone(), lines);
|
|
- }
|
|
- _ => {}
|
|
- }
|
|
- }
|
|
- }
|
|
- }
|
|
-
|
|
check_spans(
|
|
"\
|
|
/// This is a document comment
|
|
testing 123
|
|
{
|
|
testing 234
|
|
}",
|
|
&[
|
|
@@ -269,59 +265,17 @@ fn span_join() {
|
|
joined1.unwrap().source_file(),
|
|
source1[0].span().source_file()
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn no_panic() {
|
|
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
|
|
- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
|
|
-}
|
|
-
|
|
-#[test]
|
|
-fn tricky_doc_comment() {
|
|
- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
|
|
- let tokens = stream.into_iter().collect::<Vec<_>>();
|
|
- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
|
|
-
|
|
- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
|
|
- let tokens = stream.into_iter().collect::<Vec<_>>();
|
|
- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
|
|
- match tokens[0] {
|
|
- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
|
|
- _ => panic!("wrong token {:?}", tokens[0]),
|
|
- }
|
|
- let mut tokens = match tokens[1] {
|
|
- proc_macro2::TokenTree::Group(ref tt) => {
|
|
- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
|
|
- tt.stream().into_iter()
|
|
- }
|
|
- _ => panic!("wrong token {:?}", tokens[0]),
|
|
- };
|
|
-
|
|
- match tokens.next().unwrap() {
|
|
- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
|
|
- t => panic!("wrong token {:?}", t),
|
|
- }
|
|
- match tokens.next().unwrap() {
|
|
- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
|
|
- t => panic!("wrong token {:?}", t),
|
|
- }
|
|
- match tokens.next().unwrap() {
|
|
- proc_macro2::TokenTree::Literal(ref tt) => {
|
|
- assert_eq!(tt.to_string(), "\" doc\"");
|
|
- }
|
|
- t => panic!("wrong token {:?}", t),
|
|
- }
|
|
- assert!(tokens.next().is_none());
|
|
-
|
|
- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
|
|
- let tokens = stream.into_iter().collect::<Vec<_>>();
|
|
- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
|
|
+ assert!(s.parse::<TokenStream>().is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn op_before_comment() {
|
|
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
|
|
match tts.next().unwrap() {
|
|
TokenTree::Punct(tt) => {
|
|
assert_eq!(tt.as_char(), '~');
|
|
@@ -340,30 +294,30 @@ fn raw_identifier() {
|
|
}
|
|
assert!(tts.next().is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn test_debug_ident() {
|
|
let ident = Ident::new("proc_macro", Span::call_site());
|
|
|
|
- #[cfg(not(procmacro2_semver_exempt))]
|
|
+ #[cfg(not(span_locations))]
|
|
let expected = "Ident(proc_macro)";
|
|
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
|
|
+ #[cfg(span_locations)]
|
|
+ let expected = "Ident { sym: proc_macro }";
|
|
|
|
assert_eq!(expected, format!("{:?}", ident));
|
|
}
|
|
|
|
#[test]
|
|
fn test_debug_tokenstream() {
|
|
let tts = TokenStream::from_str("[a + 1]").unwrap();
|
|
|
|
- #[cfg(not(procmacro2_semver_exempt))]
|
|
+ #[cfg(not(span_locations))]
|
|
let expected = "\
|
|
TokenStream [
|
|
Group {
|
|
delimiter: Bracket,
|
|
stream: TokenStream [
|
|
Ident {
|
|
sym: a,
|
|
},
|
|
@@ -374,17 +328,17 @@ TokenStream [
|
|
Literal {
|
|
lit: 1,
|
|
},
|
|
],
|
|
},
|
|
]\
|
|
";
|
|
|
|
- #[cfg(not(procmacro2_semver_exempt))]
|
|
+ #[cfg(not(span_locations))]
|
|
let expected_before_trailing_commas = "\
|
|
TokenStream [
|
|
Group {
|
|
delimiter: Bracket,
|
|
stream: TokenStream [
|
|
Ident {
|
|
sym: a
|
|
},
|
|
@@ -395,17 +349,17 @@ TokenStream [
|
|
Literal {
|
|
lit: 1
|
|
}
|
|
]
|
|
}
|
|
]\
|
|
";
|
|
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
+ #[cfg(span_locations)]
|
|
let expected = "\
|
|
TokenStream [
|
|
Group {
|
|
delimiter: Bracket,
|
|
stream: TokenStream [
|
|
Ident {
|
|
sym: a,
|
|
span: bytes(2..3),
|
|
@@ -420,17 +374,17 @@ TokenStream [
|
|
span: bytes(6..7),
|
|
},
|
|
],
|
|
span: bytes(1..8),
|
|
},
|
|
]\
|
|
";
|
|
|
|
- #[cfg(procmacro2_semver_exempt)]
|
|
+ #[cfg(span_locations)]
|
|
let expected_before_trailing_commas = "\
|
|
TokenStream [
|
|
Group {
|
|
delimiter: Bracket,
|
|
stream: TokenStream [
|
|
Ident {
|
|
sym: a,
|
|
span: bytes(2..3)
|
|
@@ -459,8 +413,85 @@ TokenStream [
|
|
}
|
|
|
|
#[test]
|
|
fn default_tokenstream_is_empty() {
|
|
let default_token_stream: TokenStream = Default::default();
|
|
|
|
assert!(default_token_stream.is_empty());
|
|
}
|
|
+
|
|
+#[test]
|
|
+fn tuple_indexing() {
|
|
+ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
|
|
+ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
|
|
+ assert_eq!("tuple", tokens.next().unwrap().to_string());
|
|
+ assert_eq!(".", tokens.next().unwrap().to_string());
|
|
+ assert_eq!("0.0", tokens.next().unwrap().to_string());
|
|
+ assert!(tokens.next().is_none());
|
|
+}
|
|
+
|
|
+#[cfg(span_locations)]
|
|
+#[test]
|
|
+fn non_ascii_tokens() {
|
|
+ check_spans("// abc", &[]);
|
|
+ check_spans("// ábc", &[]);
|
|
+ check_spans("// abc x", &[]);
|
|
+ check_spans("// ábc x", &[]);
|
|
+ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
|
|
+ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
|
|
+ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
|
|
+ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
|
|
+ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
|
|
+ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
|
|
+ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
|
|
+ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
|
|
+ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
|
|
+ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
|
|
+ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
|
|
+ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
|
|
+ check_spans("'a'", &[(1, 0, 1, 3)]);
|
|
+ check_spans("'á'", &[(1, 0, 1, 3)]);
|
|
+ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
|
+ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
|
+ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
|
+ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
|
+ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
|
|
+ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
|
|
+ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
|
|
+ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
|
|
+ check_spans("abc", &[(1, 0, 1, 3)]);
|
|
+ check_spans("ábc", &[(1, 0, 1, 3)]);
|
|
+ check_spans("ábć", &[(1, 0, 1, 3)]);
|
|
+ check_spans("abc// foo", &[(1, 0, 1, 3)]);
|
|
+ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
|
|
+ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
|
|
+ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
|
|
+ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
|
|
+}
|
|
+
|
|
+#[cfg(span_locations)]
|
|
+fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
|
+ let ts = p.parse::<TokenStream>().unwrap();
|
|
+ check_spans_internal(ts, &mut lines);
|
|
+ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
|
|
+}
|
|
+
|
|
+#[cfg(span_locations)]
|
|
+fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
|
|
+ for i in ts {
|
|
+ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
|
+ *lines = rest;
|
|
+
|
|
+ let start = i.span().start();
|
|
+ assert_eq!(start.line, sline, "sline did not match for {}", i);
|
|
+ assert_eq!(start.column, scol, "scol did not match for {}", i);
|
|
+
|
|
+ let end = i.span().end();
|
|
+ assert_eq!(end.line, eline, "eline did not match for {}", i);
|
|
+ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
|
+
|
|
+ if let TokenTree::Group(g) = i {
|
|
+ check_spans_internal(g.stream().clone(), lines);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/proc-macro2/tests/test_fmt.rs
|
|
@@ -0,0 +1,26 @@
|
|
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
|
+use std::iter::{self, FromIterator};
|
|
+
|
|
+#[test]
|
|
+fn test_fmt_group() {
|
|
+ let ident = Ident::new("x", Span::call_site());
|
|
+ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
|
|
+ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
|
|
+ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
|
|
+ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
|
|
+ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
|
|
+ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
|
|
+ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
|
|
+ let none_empty = Group::new(Delimiter::None, TokenStream::new());
|
|
+ let none_nonempty = Group::new(Delimiter::None, inner.clone());
|
|
+
|
|
+ // Matches libproc_macro.
|
|
+ assert_eq!("()", parens_empty.to_string());
|
|
+ assert_eq!("(x)", parens_nonempty.to_string());
|
|
+ assert_eq!("[]", brackets_empty.to_string());
|
|
+ assert_eq!("[x]", brackets_nonempty.to_string());
|
|
+ assert_eq!("{ }", braces_empty.to_string());
|
|
+ assert_eq!("{ x }", braces_nonempty.to_string());
|
|
+ assert_eq!("", none_empty.to_string());
|
|
+ assert_eq!("x", none_nonempty.to_string());
|
|
+}
|
|
diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
|
|
--- a/third_party/rust/syn/.cargo-checksum.json
|
|
+++ b/third_party/rust/syn/.cargo-checksum.json
|
|
@@ -1,1 +1,1 @@
|
|
-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
|
|
\ No newline at end of file
|
|
+{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/.gitignore":"22e782449a3c216db3f7215d5fb8882e316768e40beeec3833aae419ad8941db","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
|
|
\ No newline at end of file
|
|
diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
|
|
--- a/third_party/rust/syn/Cargo.toml
|
|
+++ b/third_party/rust/syn/Cargo.toml
|
|
@@ -8,79 +8,90 @@
|
|
# If you believe there's an error in this file please file an
|
|
# issue against the rust-lang/cargo repository. If you're
|
|
# editing this file be aware that the upstream Cargo.toml
|
|
# will likely look very different (and much more reasonable)
|
|
|
|
[package]
|
|
edition = "2018"
|
|
name = "syn"
|
|
-version = "1.0.5"
|
|
+version = "1.0.40"
|
|
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
|
include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
|
|
description = "Parser for Rust source code"
|
|
documentation = "https://docs.rs/syn"
|
|
readme = "README.md"
|
|
categories = ["development-tools::procedural-macro-helpers"]
|
|
license = "MIT OR Apache-2.0"
|
|
repository = "https://github.com/dtolnay/syn"
|
|
[package.metadata.docs.rs]
|
|
all-features = true
|
|
+targets = ["x86_64-unknown-linux-gnu"]
|
|
|
|
[package.metadata.playground]
|
|
-all-features = true
|
|
-
|
|
-[lib]
|
|
-name = "syn"
|
|
+features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
|
|
|
|
[[bench]]
|
|
name = "rust"
|
|
harness = false
|
|
required-features = ["full", "parsing"]
|
|
-edition = "2018"
|
|
|
|
[[bench]]
|
|
name = "file"
|
|
required-features = ["full", "parsing"]
|
|
-edition = "2018"
|
|
[dependencies.proc-macro2]
|
|
-version = "1.0"
|
|
+version = "1.0.13"
|
|
default-features = false
|
|
|
|
[dependencies.quote]
|
|
version = "1.0"
|
|
optional = true
|
|
default-features = false
|
|
|
|
[dependencies.unicode-xid]
|
|
version = "0.2"
|
|
+[dev-dependencies.anyhow]
|
|
+version = "1.0"
|
|
+
|
|
+[dev-dependencies.flate2]
|
|
+version = "1.0"
|
|
+
|
|
[dev-dependencies.insta]
|
|
-version = "0.9"
|
|
+version = "0.16"
|
|
|
|
[dev-dependencies.rayon]
|
|
version = "1.0"
|
|
|
|
[dev-dependencies.ref-cast]
|
|
-version = "0.2"
|
|
+version = "1.0"
|
|
|
|
[dev-dependencies.regex]
|
|
version = "1.0"
|
|
|
|
+[dev-dependencies.reqwest]
|
|
+version = "0.10"
|
|
+features = ["blocking"]
|
|
+
|
|
+[dev-dependencies.syn-test-suite]
|
|
+version = "0"
|
|
+
|
|
+[dev-dependencies.tar]
|
|
+version = "0.4"
|
|
+
|
|
[dev-dependencies.termcolor]
|
|
version = "1.0"
|
|
|
|
[dev-dependencies.walkdir]
|
|
version = "2.1"
|
|
|
|
[features]
|
|
clone-impls = []
|
|
default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
|
|
derive = []
|
|
extra-traits = []
|
|
fold = []
|
|
full = []
|
|
parsing = []
|
|
printing = ["quote"]
|
|
proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
|
|
+test = ["syn-test-suite/all-features"]
|
|
visit = []
|
|
visit-mut = []
|
|
-[badges.travis-ci]
|
|
-repository = "dtolnay/syn"
|
|
diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
|
|
--- a/third_party/rust/syn/README.md
|
|
+++ b/third_party/rust/syn/README.md
|
|
@@ -1,15 +1,15 @@
|
|
Parser for Rust source code
|
|
===========================
|
|
|
|
-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
|
|
-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
|
|
-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
|
|
-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
|
|
+[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
|
|
+[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
|
|
+[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
|
|
+[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
|
|
|
|
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
|
|
of Rust source code.
|
|
|
|
Currently this library is geared toward use in Rust procedural macros, but
|
|
contains some APIs that may be useful more generally.
|
|
|
|
- **Data structures** — Syn provides a complete syntax tree that can represent
|
|
@@ -41,20 +41,16 @@ contains some APIs that may be useful mo
|
|
|
|
[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html
|
|
[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html
|
|
[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html
|
|
[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html
|
|
[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
|
|
[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
|
|
|
|
-If you get stuck with anything involving procedural macros in Rust I am happy to
|
|
-provide help even if the issue is not related to Syn. Please file a ticket in
|
|
-this repo.
|
|
-
|
|
*Version requirement: Syn supports rustc 1.31 and up.*
|
|
|
|
[*Release notes*](https://github.com/dtolnay/syn/releases)
|
|
|
|
<br>
|
|
|
|
## Resources
|
|
|
|
@@ -83,18 +79,16 @@ tokens back to the compiler to compile i
|
|
syn = "1.0"
|
|
quote = "1.0"
|
|
|
|
[lib]
|
|
proc-macro = true
|
|
```
|
|
|
|
```rust
|
|
-extern crate proc_macro;
|
|
-
|
|
use proc_macro::TokenStream;
|
|
use quote::quote;
|
|
use syn::{parse_macro_input, DeriveInput};
|
|
|
|
#[proc_macro_derive(MyMacro)]
|
|
pub fn my_macro(input: TokenStream) -> TokenStream {
|
|
// Parse the input tokens into a syntax tree
|
|
let input = parse_macro_input!(input as DeriveInput);
|
|
@@ -266,17 +260,17 @@ incompatible ecosystems for proc macros
|
|
|
|
In general all of your code should be written against proc-macro2 rather than
|
|
proc-macro. The one exception is in the signatures of procedural macro entry
|
|
points, which are required by the language to use `proc_macro::TokenStream`.
|
|
|
|
The proc-macro2 crate will automatically detect and use the compiler's data
|
|
structures when a procedural macro is active.
|
|
|
|
-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
|
|
+[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
|
|
|
|
<br>
|
|
|
|
#### License
|
|
|
|
<sup>
|
|
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
|
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
|
diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
|
|
--- a/third_party/rust/syn/benches/file.rs
|
|
+++ b/third_party/rust/syn/benches/file.rs
|
|
@@ -1,14 +1,21 @@
|
|
// $ cargo bench --features full --bench file
|
|
|
|
#![feature(rustc_private, test)]
|
|
+#![recursion_limit = "1024"]
|
|
|
|
extern crate test;
|
|
|
|
+#[macro_use]
|
|
+#[path = "../tests/macros/mod.rs"]
|
|
+mod macros;
|
|
+
|
|
+#[path = "../tests/common/mod.rs"]
|
|
+mod common;
|
|
#[path = "../tests/repo/mod.rs"]
|
|
pub mod repo;
|
|
|
|
use proc_macro2::TokenStream;
|
|
use std::fs;
|
|
use std::str::FromStr;
|
|
use test::Bencher;
|
|
|
|
diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
|
|
--- a/third_party/rust/syn/benches/rust.rs
|
|
+++ b/third_party/rust/syn/benches/rust.rs
|
|
@@ -1,15 +1,22 @@
|
|
// $ cargo bench --features full --bench rust
|
|
//
|
|
// Syn only, useful for profiling:
|
|
// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
|
|
|
|
#![cfg_attr(not(syn_only), feature(rustc_private))]
|
|
+#![recursion_limit = "1024"]
|
|
|
|
+#[macro_use]
|
|
+#[path = "../tests/macros/mod.rs"]
|
|
+mod macros;
|
|
+
|
|
+#[path = "../tests/common/mod.rs"]
|
|
+mod common;
|
|
#[path = "../tests/repo/mod.rs"]
|
|
mod repo;
|
|
|
|
use std::fs;
|
|
use std::time::{Duration, Instant};
|
|
|
|
#[cfg(not(syn_only))]
|
|
mod tokenstream_parse {
|
|
@@ -23,41 +30,45 @@ mod tokenstream_parse {
|
|
|
|
mod syn_parse {
|
|
pub fn bench(content: &str) -> Result<(), ()> {
|
|
syn::parse_file(content).map(drop).map_err(drop)
|
|
}
|
|
}
|
|
|
|
#[cfg(not(syn_only))]
|
|
-mod libsyntax_parse {
|
|
+mod librustc_parse {
|
|
extern crate rustc_data_structures;
|
|
- extern crate syntax;
|
|
- extern crate syntax_pos;
|
|
+ extern crate rustc_errors;
|
|
+ extern crate rustc_parse;
|
|
+ extern crate rustc_session;
|
|
+ extern crate rustc_span;
|
|
|
|
use rustc_data_structures::sync::Lrc;
|
|
- use syntax::edition::Edition;
|
|
- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
|
|
- use syntax::parse::ParseSess;
|
|
- use syntax::source_map::{FilePathMapping, SourceMap};
|
|
- use syntax_pos::FileName;
|
|
+ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
|
|
+ use rustc_session::parse::ParseSess;
|
|
+ use rustc_span::source_map::{FilePathMapping, SourceMap};
|
|
+ use rustc_span::{edition::Edition, FileName};
|
|
|
|
pub fn bench(content: &str) -> Result<(), ()> {
|
|
struct SilentEmitter;
|
|
|
|
impl Emitter for SilentEmitter {
|
|
- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
|
|
+ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
|
|
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
|
|
+ None
|
|
+ }
|
|
}
|
|
|
|
- syntax::with_globals(Edition::Edition2018, || {
|
|
+ rustc_span::with_session_globals(Edition::Edition2018, || {
|
|
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
|
let emitter = Box::new(SilentEmitter);
|
|
let handler = Handler::with_emitter(false, None, emitter);
|
|
let sess = ParseSess::with_span_handler(handler, cm);
|
|
- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
|
|
+ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
|
|
FileName::Custom("bench".to_owned()),
|
|
content.to_owned(),
|
|
&sess,
|
|
) {
|
|
diagnostic.cancel();
|
|
return Err(());
|
|
};
|
|
Ok(())
|
|
@@ -99,21 +110,21 @@ fn exec(mut codepath: impl FnMut(&str) -
|
|
assert_eq!(success, total);
|
|
begin.elapsed()
|
|
}
|
|
|
|
fn main() {
|
|
repo::clone_rust();
|
|
|
|
macro_rules! testcases {
|
|
- ($($(#[$cfg:meta])* $name:path,)*) => {
|
|
+ ($($(#[$cfg:meta])* $name:ident,)*) => {
|
|
vec![
|
|
$(
|
|
$(#[$cfg])*
|
|
- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
|
|
+ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
|
|
)*
|
|
]
|
|
};
|
|
}
|
|
|
|
#[cfg(not(syn_only))]
|
|
{
|
|
let mut lines = 0;
|
|
@@ -123,22 +134,22 @@ fn main() {
|
|
files += 1;
|
|
Ok(())
|
|
});
|
|
eprintln!("\n{} lines in {} files", lines, files);
|
|
}
|
|
|
|
for (name, f) in testcases!(
|
|
#[cfg(not(syn_only))]
|
|
- read_from_disk::bench,
|
|
+ read_from_disk,
|
|
#[cfg(not(syn_only))]
|
|
- tokenstream_parse::bench,
|
|
- syn_parse::bench,
|
|
+ tokenstream_parse,
|
|
+ syn_parse,
|
|
#[cfg(not(syn_only))]
|
|
- libsyntax_parse::bench,
|
|
+ librustc_parse,
|
|
) {
|
|
eprint!("{:20}", format!("{}:", name));
|
|
let elapsed = exec(f);
|
|
eprintln!(
|
|
"elapsed={}.{:03}s",
|
|
elapsed.as_secs(),
|
|
elapsed.subsec_millis(),
|
|
);
|
|
diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
|
|
--- a/third_party/rust/syn/build.rs
|
|
+++ b/third_party/rust/syn/build.rs
|
|
@@ -1,11 +1,11 @@
|
|
use std::env;
|
|
use std::process::Command;
|
|
-use std::str::{self, FromStr};
|
|
+use std::str;
|
|
|
|
// The rustc-cfg strings below are *not* public API. Please let us know by
|
|
// opening a GitHub issue if your build environment requires some way to enable
|
|
// these cfgs other than by executing our build script.
|
|
fn main() {
|
|
let compiler = match rustc_version() {
|
|
Some(compiler) => compiler,
|
|
None => return,
|
|
@@ -21,43 +21,19 @@ fn main() {
|
|
}
|
|
|
|
struct Compiler {
|
|
minor: u32,
|
|
nightly: bool,
|
|
}
|
|
|
|
fn rustc_version() -> Option<Compiler> {
|
|
- let rustc = match env::var_os("RUSTC") {
|
|
- Some(rustc) => rustc,
|
|
- None => return None,
|
|
- };
|
|
-
|
|
- let output = match Command::new(rustc).arg("--version").output() {
|
|
- Ok(output) => output,
|
|
- Err(_) => return None,
|
|
- };
|
|
-
|
|
- let version = match str::from_utf8(&output.stdout) {
|
|
- Ok(version) => version,
|
|
- Err(_) => return None,
|
|
- };
|
|
-
|
|
+ let rustc = env::var_os("RUSTC")?;
|
|
+ let output = Command::new(rustc).arg("--version").output().ok()?;
|
|
+ let version = str::from_utf8(&output.stdout).ok()?;
|
|
let mut pieces = version.split('.');
|
|
if pieces.next() != Some("rustc 1") {
|
|
return None;
|
|
}
|
|
-
|
|
- let next = match pieces.next() {
|
|
- Some(next) => next,
|
|
- None => return None,
|
|
- };
|
|
-
|
|
- let minor = match u32::from_str(next) {
|
|
- Ok(minor) => minor,
|
|
- Err(_) => return None,
|
|
- };
|
|
-
|
|
- Some(Compiler {
|
|
- minor: minor,
|
|
- nightly: version.contains("nightly"),
|
|
- })
|
|
+ let minor = pieces.next()?.parse().ok()?;
|
|
+ let nightly = version.contains("nightly");
|
|
+ Some(Compiler { minor, nightly })
|
|
}
|
|
diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
|
|
--- a/third_party/rust/syn/src/attr.rs
|
|
+++ b/third_party/rust/syn/src/attr.rs
|
|
@@ -4,25 +4,21 @@ use crate::punctuated::Punctuated;
|
|
use std::iter;
|
|
|
|
use proc_macro2::TokenStream;
|
|
|
|
#[cfg(feature = "parsing")]
|
|
use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
|
|
#[cfg(feature = "parsing")]
|
|
use crate::punctuated::Pair;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use crate::tt::TokenStreamHelper;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use std::hash::{Hash, Hasher};
|
|
|
|
ast_struct! {
|
|
/// An attribute like `#[repr(transparent)]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// <br>
|
|
///
|
|
/// # Syntax
|
|
///
|
|
/// Rust has six types of attributes.
|
|
///
|
|
@@ -106,58 +102,69 @@ ast_struct! {
|
|
/// If the attribute you are parsing is expected to conform to the
|
|
/// conventional structured form of attribute, use [`parse_meta()`] to
|
|
/// obtain that structured representation. If the attribute follows some
|
|
/// other grammar of its own, use [`parse_args()`] to parse that into the
|
|
/// expected data structure.
|
|
///
|
|
/// [`parse_meta()`]: Attribute::parse_meta
|
|
/// [`parse_args()`]: Attribute::parse_args
|
|
- pub struct Attribute #manual_extra_traits {
|
|
+ ///
|
|
+ /// <p><br></p>
|
|
+ ///
|
|
+ /// # Doc comments
|
|
+ ///
|
|
+ /// The compiler transforms doc comments, such as `/// comment` and `/*!
|
|
+ /// comment */`, into attributes before macros are expanded. Each comment is
|
|
+ /// expanded into an attribute of the form `#[doc = r"comment"]`.
|
|
+ ///
|
|
+ /// As an example, the following `mod` items are expanded identically:
|
|
+ ///
|
|
+ /// ```
|
|
+ /// # use syn::{ItemMod, parse_quote};
|
|
+ /// let doc: ItemMod = parse_quote! {
|
|
+ /// /// Single line doc comments
|
|
+ /// /// We write so many!
|
|
+ /// /**
|
|
+ /// * Multi-line comments...
|
|
+ /// * May span many lines
|
|
+ /// */
|
|
+ /// mod example {
|
|
+ /// //! Of course, they can be inner too
|
|
+ /// /*! And fit in a single line */
|
|
+ /// }
|
|
+ /// };
|
|
+ /// let attr: ItemMod = parse_quote! {
|
|
+ /// #[doc = r" Single line doc comments"]
|
|
+ /// #[doc = r" We write so many!"]
|
|
+ /// #[doc = r"
|
|
+ /// * Multi-line comments...
|
|
+ /// * May span many lines
|
|
+ /// "]
|
|
+ /// mod example {
|
|
+ /// #![doc = r" Of course, they can be inner too"]
|
|
+ /// #![doc = r" And fit in a single line "]
|
|
+ /// }
|
|
+ /// };
|
|
+ /// assert_eq!(doc, attr);
|
|
+ /// ```
|
|
+ pub struct Attribute {
|
|
pub pound_token: Token![#],
|
|
pub style: AttrStyle,
|
|
pub bracket_token: token::Bracket,
|
|
pub path: Path,
|
|
pub tokens: TokenStream,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Attribute {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Attribute {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- self.style == other.style
|
|
- && self.pound_token == other.pound_token
|
|
- && self.bracket_token == other.bracket_token
|
|
- && self.path == other.path
|
|
- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Attribute {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- self.style.hash(state);
|
|
- self.pound_token.hash(state);
|
|
- self.bracket_token.hash(state);
|
|
- self.path.hash(state);
|
|
- TokenStreamHelper(&self.tokens).hash(state);
|
|
- }
|
|
-}
|
|
-
|
|
impl Attribute {
|
|
/// Parses the content of the attribute, consisting of the path and tokens,
|
|
/// as a [`Meta`] if possible.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_meta(&self) -> Result<Meta> {
|
|
fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
|
|
PathSegment {
|
|
ident: segment.ident.clone(),
|
|
arguments: PathArguments::None,
|
|
}
|
|
@@ -194,91 +201,95 @@ impl Attribute {
|
|
/// parser; and
|
|
/// - the error message has a more useful span when `tokens` is empty.
|
|
///
|
|
/// ```text
|
|
/// #[my_attr(value < 5)]
|
|
/// ^^^^^^^^^ what gets parsed
|
|
/// ```
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
|
self.parse_args_with(T::parse)
|
|
}
|
|
|
|
/// Parse the arguments to the attribute using the given parser.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
|
let parser = |input: ParseStream| {
|
|
let args = enter_args(self, input)?;
|
|
parse::parse_stream(parser, &args)
|
|
};
|
|
parser.parse2(self.tokens.clone())
|
|
}
|
|
|
|
/// Parses zero or more outer attributes from the stream.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
|
|
let mut attrs = Vec::new();
|
|
while input.peek(Token![#]) {
|
|
attrs.push(input.call(parsing::single_parse_outer)?);
|
|
}
|
|
Ok(attrs)
|
|
}
|
|
|
|
/// Parses zero or more inner attributes from the stream.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
|
|
let mut attrs = Vec::new();
|
|
while input.peek(Token![#]) && input.peek2(Token![!]) {
|
|
attrs.push(input.call(parsing::single_parse_inner)?);
|
|
}
|
|
Ok(attrs)
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
-fn error_expected_args(attr: &Attribute) -> Error {
|
|
+fn expected_parentheses(attr: &Attribute) -> String {
|
|
let style = match attr.style {
|
|
AttrStyle::Outer => "#",
|
|
AttrStyle::Inner(_) => "#!",
|
|
};
|
|
|
|
let mut path = String::new();
|
|
for segment in &attr.path.segments {
|
|
if !path.is_empty() || attr.path.leading_colon.is_some() {
|
|
path += "::";
|
|
}
|
|
path += &segment.ident.to_string();
|
|
}
|
|
|
|
- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
|
|
-
|
|
- #[cfg(feature = "printing")]
|
|
- return Error::new_spanned(attr, msg);
|
|
-
|
|
- #[cfg(not(feature = "printing"))]
|
|
- return Error::new(attr.bracket_token.span, msg);
|
|
+ format!("{}[{}(...)]", style, path)
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
|
|
if input.is_empty() {
|
|
- return Err(error_expected_args(attr));
|
|
+ let expected = expected_parentheses(attr);
|
|
+ let msg = format!("expected attribute arguments in parentheses: {}", expected);
|
|
+ return Err(crate::error::new2(
|
|
+ attr.pound_token.span,
|
|
+ attr.bracket_token.span,
|
|
+ msg,
|
|
+ ));
|
|
+ } else if input.peek(Token![=]) {
|
|
+ let expected = expected_parentheses(attr);
|
|
+ let msg = format!("expected parentheses: {}", expected);
|
|
+ return Err(input.error(msg));
|
|
};
|
|
|
|
let content;
|
|
if input.peek(token::Paren) {
|
|
parenthesized!(content in input);
|
|
} else if input.peek(token::Bracket) {
|
|
bracketed!(content in input);
|
|
} else if input.peek(token::Brace) {
|
|
@@ -293,41 +304,40 @@ fn enter_args<'a>(attr: &Attribute, inpu
|
|
Err(input.error("unexpected token in attribute arguments"))
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// Distinguishes between attributes that decorate an item and attributes
|
|
/// that are contained within an item.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// # Outer attributes
|
|
///
|
|
/// - `#[repr(transparent)]`
|
|
/// - `/// # Example`
|
|
/// - `/** Please file an issue */`
|
|
///
|
|
/// # Inner attributes
|
|
///
|
|
/// - `#![feature(proc_macro)]`
|
|
/// - `//! # Example`
|
|
/// - `/*! Please file an issue */`
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy))]
|
|
pub enum AttrStyle {
|
|
Outer,
|
|
Inner(Token![!]),
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// Content of a compile-time structured attribute.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// ## Path
|
|
///
|
|
/// A meta path is like the `test` in `#[test]`.
|
|
///
|
|
/// ## List
|
|
///
|
|
@@ -355,29 +365,29 @@ ast_enum_of_structs! {
|
|
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
|
NameValue(MetaNameValue),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct MetaList {
|
|
pub path: Path,
|
|
pub paren_token: token::Paren,
|
|
pub nested: Punctuated<NestedMeta, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct MetaNameValue {
|
|
pub path: Path,
|
|
pub eq_token: Token![=],
|
|
pub lit: Lit,
|
|
}
|
|
}
|
|
|
|
@@ -393,17 +403,17 @@ impl Meta {
|
|
Meta::NameValue(meta) => &meta.path,
|
|
}
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// Element of a compile-time attribute list.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub enum NestedMeta {
|
|
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
|
|
/// would be a nested `Meta::Path`.
|
|
Meta(Meta),
|
|
|
|
/// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
|
|
Lit(Lit),
|
|
@@ -424,18 +434,18 @@ ast_enum_of_structs! {
|
|
/// /* ... */
|
|
/// }
|
|
/// ```
|
|
///
|
|
/// The implementation of this macro would want to parse its attribute arguments
|
|
/// as type `AttributeArgs`.
|
|
///
|
|
/// ```
|
|
-/// extern crate proc_macro;
|
|
-///
|
|
+/// # extern crate proc_macro;
|
|
+/// #
|
|
/// use proc_macro::TokenStream;
|
|
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
|
|
///
|
|
/// # const IGNORE: &str = stringify! {
|
|
/// #[proc_macro_attribute]
|
|
/// # };
|
|
/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
|
|
/// let args = parse_macro_input!(args as AttributeArgs);
|
|
@@ -459,27 +469,27 @@ where
|
|
T: IntoIterator<Item = &'a Attribute>,
|
|
{
|
|
type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
|
|
|
|
fn outer(self) -> Self::Ret {
|
|
fn is_outer(attr: &&Attribute) -> bool {
|
|
match attr.style {
|
|
AttrStyle::Outer => true,
|
|
- _ => false,
|
|
+ AttrStyle::Inner(_) => false,
|
|
}
|
|
}
|
|
self.into_iter().filter(is_outer)
|
|
}
|
|
|
|
fn inner(self) -> Self::Ret {
|
|
fn is_inner(attr: &&Attribute) -> bool {
|
|
match attr.style {
|
|
AttrStyle::Inner(_) => true,
|
|
- _ => false,
|
|
+ AttrStyle::Outer => false,
|
|
}
|
|
}
|
|
self.into_iter().filter(is_inner)
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parsing {
|
|
diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
|
|
--- a/third_party/rust/syn/src/buffer.rs
|
|
+++ b/third_party/rust/syn/src/buffer.rs
|
|
@@ -1,12 +1,12 @@
|
|
//! A stably addressed token buffer supporting efficient traversal based on a
|
|
//! cheaply copyable cursor.
|
|
//!
|
|
-//! *This module is available if Syn is built with the `"parsing"` feature.*
|
|
+//! *This module is available only if Syn is built with the `"parsing"` feature.*
|
|
|
|
// This module is heavily commented as it contains most of the unsafe code in
|
|
// Syn, and caution should be used when editing it. The public-facing interface
|
|
// is 100% safe but the implementation is fragile internally.
|
|
|
|
#[cfg(all(
|
|
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
|
feature = "proc-macro"
|
|
@@ -31,17 +31,17 @@ enum Entry {
|
|
// token tree, or null if this is the outermost level.
|
|
End(*const Entry),
|
|
}
|
|
|
|
/// A buffer that can be efficiently traversed multiple times, unlike
|
|
/// `TokenStream` which requires a deep copy in order to traverse more than
|
|
/// once.
|
|
///
|
|
-/// *This type is available if Syn is built with the `"parsing"` feature.*
|
|
+/// *This type is available only if Syn is built with the `"parsing"` feature.*
|
|
pub struct TokenBuffer {
|
|
// NOTE: Do not derive clone on this - there are raw pointers inside which
|
|
// will be messed up. Moving the `TokenBuffer` itself is safe as the actual
|
|
// backing slices won't be moved.
|
|
data: Box<[Entry]>,
|
|
}
|
|
|
|
impl TokenBuffer {
|
|
@@ -93,17 +93,17 @@ impl TokenBuffer {
|
|
}
|
|
|
|
TokenBuffer { data: entries }
|
|
}
|
|
|
|
/// Creates a `TokenBuffer` containing all the tokens from the input
|
|
/// `TokenStream`.
|
|
///
|
|
- /// *This method is available if Syn is built with both the `"parsing"` and
|
|
+ /// *This method is available only if Syn is built with both the `"parsing"` and
|
|
/// `"proc-macro"` features.*
|
|
#[cfg(all(
|
|
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
|
feature = "proc-macro"
|
|
))]
|
|
pub fn new(stream: pm::TokenStream) -> TokenBuffer {
|
|
Self::new2(stream.into())
|
|
}
|
|
@@ -128,18 +128,17 @@ impl TokenBuffer {
|
|
/// and copied around.
|
|
///
|
|
/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
|
|
/// object and get a cursor to its first token with `begin()`.
|
|
///
|
|
/// Two cursors are equal if they have the same location in the same input
|
|
/// stream, and have the same scope.
|
|
///
|
|
-/// *This type is available if Syn is built with the `"parsing"` feature.*
|
|
-#[derive(Copy, Clone, Eq, PartialEq)]
|
|
+/// *This type is available only if Syn is built with the `"parsing"` feature.*
|
|
pub struct Cursor<'a> {
|
|
// The current entry which the `Cursor` is pointing at.
|
|
ptr: *const Entry,
|
|
// This is the only `Entry::End(..)` object which this cursor is allowed to
|
|
// point at. All other `End` objects are skipped over in `Cursor::create`.
|
|
scope: *const Entry,
|
|
// Cursor is covariant in 'a. This field ensures that our pointers are still
|
|
// valid.
|
|
@@ -196,37 +195,38 @@ impl<'a> Cursor<'a> {
|
|
|
|
/// Bump the cursor to point at the next token after the current one. This
|
|
/// is undefined behavior if the cursor is currently looking at an
|
|
/// `Entry::End`.
|
|
unsafe fn bump(self) -> Cursor<'a> {
|
|
Cursor::create(self.ptr.offset(1), self.scope)
|
|
}
|
|
|
|
- /// If the cursor is looking at a `None`-delimited group, move it to look at
|
|
- /// the first token inside instead. If the group is empty, this will move
|
|
+ /// While the cursor is looking at a `None`-delimited group, move it to look
|
|
+ /// at the first token inside instead. If the group is empty, this will move
|
|
/// the cursor past the `None`-delimited group.
|
|
///
|
|
/// WARNING: This mutates its argument.
|
|
fn ignore_none(&mut self) {
|
|
- if let Entry::Group(group, buf) = self.entry() {
|
|
+ while let Entry::Group(group, buf) = self.entry() {
|
|
if group.delimiter() == Delimiter::None {
|
|
// NOTE: We call `Cursor::create` here to make sure that
|
|
// situations where we should immediately exit the span after
|
|
// entering it are handled correctly.
|
|
unsafe {
|
|
*self = Cursor::create(&buf.data[0], self.scope);
|
|
}
|
|
+ } else {
|
|
+ break;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Checks whether the cursor is currently pointing at the end of its valid
|
|
/// scope.
|
|
- #[inline]
|
|
pub fn eof(self) -> bool {
|
|
// We're at eof if we're at the end of our scope.
|
|
self.ptr == self.scope
|
|
}
|
|
|
|
/// If the cursor is pointing at a `Group` with the given delimiter, returns
|
|
/// a cursor into that group and one pointing to the next `TokenTree`.
|
|
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
|
|
@@ -337,16 +337,54 @@ impl<'a> Cursor<'a> {
|
|
match self.entry() {
|
|
Entry::Group(group, _) => group.span(),
|
|
Entry::Literal(l) => l.span(),
|
|
Entry::Ident(t) => t.span(),
|
|
Entry::Punct(o) => o.span(),
|
|
Entry::End(..) => Span::call_site(),
|
|
}
|
|
}
|
|
+
|
|
+ /// Skip over the next token without cloning it. Returns `None` if this
|
|
+ /// cursor points to eof.
|
|
+ ///
|
|
+ /// This method treats `'lifetimes` as a single token.
|
|
+ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
|
|
+ match self.entry() {
|
|
+ Entry::End(..) => None,
|
|
+
|
|
+ // Treat lifetimes as a single tt for the purposes of 'skip'.
|
|
+ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
|
|
+ let next = unsafe { self.bump() };
|
|
+ match next.entry() {
|
|
+ Entry::Ident(_) => Some(unsafe { next.bump() }),
|
|
+ _ => Some(next),
|
|
+ }
|
|
+ }
|
|
+ _ => Some(unsafe { self.bump() }),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+impl<'a> Copy for Cursor<'a> {}
|
|
+
|
|
+impl<'a> Clone for Cursor<'a> {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+
|
|
+impl<'a> Eq for Cursor<'a> {}
|
|
+
|
|
+impl<'a> PartialEq for Cursor<'a> {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ let Cursor { ptr, scope, marker } = self;
|
|
+ let _ = marker;
|
|
+ *ptr == other.ptr && *scope == other.scope
|
|
+ }
|
|
}
|
|
|
|
pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
|
|
a.scope == b.scope
|
|
}
|
|
|
|
pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
|
|
match cursor.entry() {
|
|
diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
|
|
--- a/third_party/rust/syn/src/custom_keyword.rs
|
|
+++ b/third_party/rust/syn/src/custom_keyword.rs
|
|
@@ -81,46 +81,46 @@
|
|
/// value: input.parse()?,
|
|
/// })
|
|
/// } else {
|
|
/// Err(lookahead.error())
|
|
/// }
|
|
/// }
|
|
/// }
|
|
/// ```
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
macro_rules! custom_keyword {
|
|
($ident:ident) => {
|
|
#[allow(non_camel_case_types)]
|
|
pub struct $ident {
|
|
pub span: $crate::export::Span,
|
|
}
|
|
|
|
#[doc(hidden)]
|
|
- #[allow(non_snake_case)]
|
|
+ #[allow(dead_code, non_snake_case)]
|
|
pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
|
|
span: __S,
|
|
) -> $ident {
|
|
$ident {
|
|
span: $crate::export::IntoSpans::into_spans(span)[0],
|
|
}
|
|
}
|
|
|
|
impl $crate::export::Default for $ident {
|
|
fn default() -> Self {
|
|
$ident {
|
|
span: $crate::export::Span::call_site(),
|
|
}
|
|
}
|
|
}
|
|
|
|
- impl_parse_for_custom_keyword!($ident);
|
|
- impl_to_tokens_for_custom_keyword!($ident);
|
|
- impl_clone_for_custom_keyword!($ident);
|
|
- impl_extra_traits_for_custom_keyword!($ident);
|
|
+ $crate::impl_parse_for_custom_keyword!($ident);
|
|
+ $crate::impl_to_tokens_for_custom_keyword!($ident);
|
|
+ $crate::impl_clone_for_custom_keyword!($ident);
|
|
+ $crate::impl_extra_traits_for_custom_keyword!($ident);
|
|
};
|
|
}
|
|
|
|
// Not public API.
|
|
#[cfg(feature = "parsing")]
|
|
#[doc(hidden)]
|
|
#[macro_export]
|
|
macro_rules! impl_parse_for_custom_keyword {
|
|
diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
|
|
--- a/third_party/rust/syn/src/custom_punctuation.rs
|
|
+++ b/third_party/rust/syn/src/custom_punctuation.rs
|
|
@@ -69,67 +69,67 @@
|
|
/// Ok(tokens)
|
|
/// }
|
|
///
|
|
/// fn main() {
|
|
/// let input = r#" a::b </> c::d::e "#;
|
|
/// let _: PathSegments = syn::parse_str(input).unwrap();
|
|
/// }
|
|
/// ```
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
macro_rules! custom_punctuation {
|
|
($ident:ident, $($tt:tt)+) => {
|
|
pub struct $ident {
|
|
- pub spans: custom_punctuation_repr!($($tt)+),
|
|
+ pub spans: $crate::custom_punctuation_repr!($($tt)+),
|
|
}
|
|
|
|
#[doc(hidden)]
|
|
- #[allow(non_snake_case)]
|
|
- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
|
|
+ #[allow(dead_code, non_snake_case)]
|
|
+ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
|
|
spans: __S,
|
|
) -> $ident {
|
|
- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
|
|
+ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
|
|
$ident {
|
|
spans: $crate::export::IntoSpans::into_spans(spans)
|
|
}
|
|
}
|
|
|
|
impl $crate::export::Default for $ident {
|
|
fn default() -> Self {
|
|
$ident($crate::export::Span::call_site())
|
|
}
|
|
}
|
|
|
|
- impl_parse_for_custom_punctuation!($ident, $($tt)+);
|
|
- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
|
|
- impl_clone_for_custom_punctuation!($ident, $($tt)+);
|
|
- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
|
|
+ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
|
|
+ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
|
|
+ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
|
|
+ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
|
|
};
|
|
}
|
|
|
|
// Not public API.
|
|
#[cfg(feature = "parsing")]
|
|
#[doc(hidden)]
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
macro_rules! impl_parse_for_custom_punctuation {
|
|
($ident:ident, $($tt:tt)+) => {
|
|
impl $crate::token::CustomToken for $ident {
|
|
fn peek(cursor: $crate::buffer::Cursor) -> bool {
|
|
- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
|
|
+ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
|
|
}
|
|
|
|
fn display() -> &'static $crate::export::str {
|
|
- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
|
|
+ concat!("`", $crate::stringify_punct!($($tt)+), "`")
|
|
}
|
|
}
|
|
|
|
impl $crate::parse::Parse for $ident {
|
|
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
|
- let spans: custom_punctuation_repr!($($tt)+) =
|
|
- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
|
|
+ let spans: $crate::custom_punctuation_repr!($($tt)+) =
|
|
+ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
|
|
Ok($ident(spans))
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
// Not public API.
|
|
#[cfg(not(feature = "parsing"))]
|
|
@@ -137,22 +137,22 @@ macro_rules! impl_parse_for_custom_punct
|
|
#[macro_export]
|
|
macro_rules! impl_parse_for_custom_punctuation {
|
|
($ident:ident, $($tt:tt)+) => {};
|
|
}
|
|
|
|
// Not public API.
|
|
#[cfg(feature = "printing")]
|
|
#[doc(hidden)]
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
macro_rules! impl_to_tokens_for_custom_punctuation {
|
|
($ident:ident, $($tt:tt)+) => {
|
|
impl $crate::export::ToTokens for $ident {
|
|
fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
|
|
- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
|
|
+ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
// Not public API.
|
|
#[cfg(not(feature = "printing"))]
|
|
#[doc(hidden)]
|
|
@@ -216,26 +216,26 @@ macro_rules! impl_extra_traits_for_custo
|
|
#[doc(hidden)]
|
|
#[macro_export]
|
|
macro_rules! impl_extra_traits_for_custom_punctuation {
|
|
($ident:ident, $($tt:tt)+) => {};
|
|
}
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
macro_rules! custom_punctuation_repr {
|
|
($($tt:tt)+) => {
|
|
- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
|
|
+ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
|
|
};
|
|
}
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
#[rustfmt::skip]
|
|
macro_rules! custom_punctuation_len {
|
|
($mode:ident, +) => { 1 };
|
|
($mode:ident, +=) => { 2 };
|
|
($mode:ident, &) => { 1 };
|
|
($mode:ident, &&) => { 2 };
|
|
($mode:ident, &=) => { 2 };
|
|
($mode:ident, @) => { 1 };
|
|
@@ -274,17 +274,17 @@ macro_rules! custom_punctuation_len {
|
|
($mode:ident, <<=) => { 3 };
|
|
($mode:ident, >>) => { 2 };
|
|
($mode:ident, >>=) => { 3 };
|
|
($mode:ident, *) => { 1 };
|
|
($mode:ident, -) => { 1 };
|
|
($mode:ident, -=) => { 2 };
|
|
($mode:ident, ~) => { 1 };
|
|
(lenient, $tt:tt) => { 0 };
|
|
- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
|
|
+ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
|
|
}
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
#[macro_export]
|
|
macro_rules! custom_punctuation_unexpected {
|
|
() => {};
|
|
}
|
|
@@ -292,18 +292,8 @@ macro_rules! custom_punctuation_unexpect
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
#[macro_export]
|
|
macro_rules! stringify_punct {
|
|
($($tt:tt)+) => {
|
|
concat!($(stringify!($tt)),+)
|
|
};
|
|
}
|
|
-
|
|
-// Not public API.
|
|
-// Without this, local_inner_macros breaks when looking for concat!
|
|
-#[doc(hidden)]
|
|
-#[macro_export]
|
|
-macro_rules! custom_punctuation_concat {
|
|
- ($($tt:tt)*) => {
|
|
- concat!($($tt)*)
|
|
- };
|
|
-}
|
|
diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
|
|
--- a/third_party/rust/syn/src/data.rs
|
|
+++ b/third_party/rust/syn/src/data.rs
|
|
@@ -1,15 +1,15 @@
|
|
use super::*;
|
|
use crate::punctuated::Punctuated;
|
|
|
|
ast_struct! {
|
|
/// An enum variant.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Variant {
|
|
/// Attributes tagged on the variant.
|
|
pub attrs: Vec<Attribute>,
|
|
|
|
/// Name of the variant.
|
|
pub ident: Ident,
|
|
|
|
@@ -19,17 +19,17 @@ ast_struct! {
|
|
/// Explicit discriminant: `Variant = 1`
|
|
pub discriminant: Option<(Token![=], Expr)>,
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// Data stored within an enum variant or struct.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
@@ -47,28 +47,28 @@ ast_enum_of_structs! {
|
|
Unit,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
|
/// y: f64 }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct FieldsNamed {
|
|
pub brace_token: token::Brace,
|
|
pub named: Punctuated<Field, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct FieldsUnnamed {
|
|
pub paren_token: token::Paren,
|
|
pub unnamed: Punctuated<Field, Token![,]>,
|
|
}
|
|
}
|
|
|
|
impl Fields {
|
|
@@ -88,16 +88,34 @@ impl Fields {
|
|
/// struct or variant's fields uniformly.
|
|
pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
|
|
match self {
|
|
Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
|
|
Fields::Named(f) => f.named.iter_mut(),
|
|
Fields::Unnamed(f) => f.unnamed.iter_mut(),
|
|
}
|
|
}
|
|
+
|
|
+ /// Returns the number of fields.
|
|
+ pub fn len(&self) -> usize {
|
|
+ match self {
|
|
+ Fields::Unit => 0,
|
|
+ Fields::Named(f) => f.named.len(),
|
|
+ Fields::Unnamed(f) => f.unnamed.len(),
|
|
+ }
|
|
+ }
|
|
+
|
|
+ /// Returns `true` if there are zero fields.
|
|
+ pub fn is_empty(&self) -> bool {
|
|
+ match self {
|
|
+ Fields::Unit => true,
|
|
+ Fields::Named(f) => f.named.is_empty(),
|
|
+ Fields::Unnamed(f) => f.unnamed.is_empty(),
|
|
+ }
|
|
+ }
|
|
}
|
|
|
|
impl IntoIterator for Fields {
|
|
type Item = Field;
|
|
type IntoIter = punctuated::IntoIter<Field>;
|
|
|
|
fn into_iter(self) -> Self::IntoIter {
|
|
match self {
|
|
@@ -124,17 +142,17 @@ impl<'a> IntoIterator for &'a mut Fields
|
|
fn into_iter(self) -> Self::IntoIter {
|
|
self.iter_mut()
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A field of a struct or enum variant.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Field {
|
|
/// Attributes tagged on the field.
|
|
pub attrs: Vec<Attribute>,
|
|
|
|
/// Visibility of the field.
|
|
pub vis: Visibility,
|
|
|
|
@@ -149,17 +167,17 @@ ast_struct! {
|
|
pub ty: Type,
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// The visibility level of an item: inherited or `pub` or
|
|
/// `pub(restricted)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
@@ -179,58 +197,61 @@ ast_enum_of_structs! {
|
|
/// An inherited visibility, which usually means private.
|
|
Inherited,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A public visibility level: `pub`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct VisPublic {
|
|
pub pub_token: Token![pub],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A crate-level visibility: `crate`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct VisCrate {
|
|
pub crate_token: Token![crate],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A visibility level restricted to some path: `pub(self)` or
|
|
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct VisRestricted {
|
|
pub pub_token: Token![pub],
|
|
pub paren_token: token::Paren,
|
|
pub in_token: Option<Token![in]>,
|
|
pub path: Box<Path>,
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parsing {
|
|
use super::*;
|
|
|
|
use crate::ext::IdentExt;
|
|
+ use crate::parse::discouraged::Speculative;
|
|
use crate::parse::{Parse, ParseStream, Result};
|
|
|
|
impl Parse for Variant {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let attrs = input.call(Attribute::parse_outer)?;
|
|
+ let _visibility: Visibility = input.parse()?;
|
|
Ok(Variant {
|
|
- attrs: input.call(Attribute::parse_outer)?,
|
|
+ attrs,
|
|
ident: input.parse()?,
|
|
fields: {
|
|
if input.peek(token::Brace) {
|
|
Fields::Named(input.parse()?)
|
|
} else if input.peek(token::Paren) {
|
|
Fields::Unnamed(input.parse()?)
|
|
} else {
|
|
Fields::Unit
|
|
@@ -290,68 +311,99 @@ pub mod parsing {
|
|
colon_token: None,
|
|
ty: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for Visibility {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ // Recognize an empty None-delimited group, as produced by a $:vis
|
|
+ // matcher that matched no tokens.
|
|
+ if input.peek(token::Group) {
|
|
+ let ahead = input.fork();
|
|
+ let group = crate::group::parse_group(&ahead)?;
|
|
+ if group.content.is_empty() {
|
|
+ input.advance_to(&ahead);
|
|
+ return Ok(Visibility::Inherited);
|
|
+ }
|
|
+ }
|
|
+
|
|
if input.peek(Token![pub]) {
|
|
Self::parse_pub(input)
|
|
} else if input.peek(Token![crate]) {
|
|
Self::parse_crate(input)
|
|
} else {
|
|
Ok(Visibility::Inherited)
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Visibility {
|
|
fn parse_pub(input: ParseStream) -> Result<Self> {
|
|
let pub_token = input.parse::<Token![pub]>()?;
|
|
|
|
if input.peek(token::Paren) {
|
|
- // TODO: optimize using advance_to
|
|
let ahead = input.fork();
|
|
- let mut content;
|
|
- parenthesized!(content in ahead);
|
|
|
|
+ let content;
|
|
+ let paren_token = parenthesized!(content in ahead);
|
|
if content.peek(Token![crate])
|
|
|| content.peek(Token![self])
|
|
|| content.peek(Token![super])
|
|
{
|
|
+ let path = content.call(Ident::parse_any)?;
|
|
+
|
|
+ // Ensure there are no additional tokens within `content`.
|
|
+ // Without explicitly checking, we may misinterpret a tuple
|
|
+ // field as a restricted visibility, causing a parse error.
|
|
+ // e.g. `pub (crate::A, crate::B)` (Issue #720).
|
|
+ if content.is_empty() {
|
|
+ input.advance_to(&ahead);
|
|
+ return Ok(Visibility::Restricted(VisRestricted {
|
|
+ pub_token,
|
|
+ paren_token,
|
|
+ in_token: None,
|
|
+ path: Box::new(Path::from(path)),
|
|
+ }));
|
|
+ }
|
|
+ } else if content.peek(Token![in]) {
|
|
+ let in_token: Token![in] = content.parse()?;
|
|
+ let path = content.call(Path::parse_mod_style)?;
|
|
+
|
|
+ input.advance_to(&ahead);
|
|
return Ok(Visibility::Restricted(VisRestricted {
|
|
pub_token,
|
|
- paren_token: parenthesized!(content in input),
|
|
- in_token: None,
|
|
- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
|
|
- }));
|
|
- } else if content.peek(Token![in]) {
|
|
- return Ok(Visibility::Restricted(VisRestricted {
|
|
- pub_token,
|
|
- paren_token: parenthesized!(content in input),
|
|
- in_token: Some(content.parse()?),
|
|
- path: Box::new(content.call(Path::parse_mod_style)?),
|
|
+ paren_token,
|
|
+ in_token: Some(in_token),
|
|
+ path: Box::new(path),
|
|
}));
|
|
}
|
|
}
|
|
|
|
Ok(Visibility::Public(VisPublic { pub_token }))
|
|
}
|
|
|
|
fn parse_crate(input: ParseStream) -> Result<Self> {
|
|
if input.peek2(Token![::]) {
|
|
Ok(Visibility::Inherited)
|
|
} else {
|
|
Ok(Visibility::Crate(VisCrate {
|
|
crate_token: input.parse()?,
|
|
}))
|
|
}
|
|
}
|
|
+
|
|
+ #[cfg(feature = "full")]
|
|
+ pub(crate) fn is_some(&self) -> bool {
|
|
+ match self {
|
|
+ Visibility::Inherited => false,
|
|
+ _ => true,
|
|
+ }
|
|
+ }
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "printing")]
|
|
mod printing {
|
|
use super::*;
|
|
|
|
use proc_macro2::TokenStream;
|
|
diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
|
|
--- a/third_party/rust/syn/src/derive.rs
|
|
+++ b/third_party/rust/syn/src/derive.rs
|
|
@@ -1,15 +1,15 @@
|
|
use super::*;
|
|
use crate::punctuated::Punctuated;
|
|
|
|
ast_struct! {
|
|
/// Data structure sent to a `proc_macro_derive` macro.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"derive"` feature.*
|
|
pub struct DeriveInput {
|
|
/// Attributes tagged on the whole struct or enum.
|
|
pub attrs: Vec<Attribute>,
|
|
|
|
/// Visibility of the struct or enum.
|
|
pub vis: Visibility,
|
|
|
|
/// Name of the struct or enum.
|
|
@@ -21,17 +21,17 @@ ast_struct! {
|
|
/// Data within the struct or enum.
|
|
pub data: Data,
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// The storage of a struct, enum or union data structure.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"derive"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
@@ -48,41 +48,41 @@ ast_enum_of_structs! {
|
|
}
|
|
|
|
do_not_generate_to_tokens
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A struct input to a `proc_macro_derive` macro.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"`
|
|
/// feature.*
|
|
pub struct DataStruct {
|
|
pub struct_token: Token![struct],
|
|
pub fields: Fields,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An enum input to a `proc_macro_derive` macro.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"`
|
|
/// feature.*
|
|
pub struct DataEnum {
|
|
pub enum_token: Token![enum],
|
|
pub brace_token: token::Brace,
|
|
pub variants: Punctuated<Variant, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An untagged union input to a `proc_macro_derive` macro.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"`
|
|
/// feature.*
|
|
pub struct DataUnion {
|
|
pub union_token: Token![union],
|
|
pub fields: FieldsNamed,
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
|
|
--- a/third_party/rust/syn/src/discouraged.rs
|
|
+++ b/third_party/rust/syn/src/discouraged.rs
|
|
@@ -11,17 +11,17 @@ pub trait Speculative {
|
|
/// stream to the fork to "commit" the parsing from the fork to the main
|
|
/// stream.
|
|
///
|
|
/// If you can avoid doing this, you should, as it limits the ability to
|
|
/// generate useful errors. That said, it is often the only way to parse
|
|
/// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
|
|
/// is that when the fork fails to parse an `A`, it's impossible to tell
|
|
/// whether that was because of a syntax error and the user meant to provide
|
|
- /// an `A`, or that the `A`s are finished and its time to start parsing
|
|
+ /// an `A`, or that the `A`s are finished and it's time to start parsing
|
|
/// `B`s. Use with care.
|
|
///
|
|
/// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
|
|
/// parsing `B*` and removing the leading members of `A` from the
|
|
/// repetition, bypassing the need to involve the downsides associated with
|
|
/// speculative parsing.
|
|
///
|
|
/// [`ParseStream::fork`]: ParseBuffer::fork
|
|
@@ -67,17 +67,16 @@ pub trait Speculative {
|
|
/// # }
|
|
///
|
|
/// impl Parse for PathSegment {
|
|
/// fn parse(input: ParseStream) -> Result<Self> {
|
|
/// if input.peek(Token![super])
|
|
/// || input.peek(Token![self])
|
|
/// || input.peek(Token![Self])
|
|
/// || input.peek(Token![crate])
|
|
- /// || input.peek(Token![extern])
|
|
/// {
|
|
/// let ident = input.call(Ident::parse_any)?;
|
|
/// return Ok(PathSegment::from(ident));
|
|
/// }
|
|
///
|
|
/// let ident = input.parse()?;
|
|
/// if input.peek(Token![::]) && input.peek3(Token![<]) {
|
|
/// return Ok(PathSegment {
|
|
@@ -159,13 +158,37 @@ pub trait Speculative {
|
|
}
|
|
|
|
impl<'a> Speculative for ParseBuffer<'a> {
|
|
fn advance_to(&self, fork: &Self) {
|
|
if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
|
|
panic!("Fork was not derived from the advancing parse stream");
|
|
}
|
|
|
|
+ let (self_unexp, self_sp) = inner_unexpected(self);
|
|
+ let (fork_unexp, fork_sp) = inner_unexpected(fork);
|
|
+ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
|
|
+ match (fork_sp, self_sp) {
|
|
+ // Unexpected set on the fork, but not on `self`, copy it over.
|
|
+ (Some(span), None) => {
|
|
+ self_unexp.set(Unexpected::Some(span));
|
|
+ }
|
|
+ // Unexpected unset. Use chain to propagate errors from fork.
|
|
+ (None, None) => {
|
|
+ fork_unexp.set(Unexpected::Chain(self_unexp));
|
|
+
|
|
+ // Ensure toplevel 'unexpected' tokens from the fork don't
|
|
+ // bubble up the chain by replacing the root `unexpected`
|
|
+ // pointer, only 'unexpected' tokens from existing group
|
|
+ // parsers should bubble.
|
|
+ fork.unexpected
|
|
+ .set(Some(Rc::new(Cell::new(Unexpected::None))));
|
|
+ }
|
|
+ // Unexpected has been set on `self`. No changes needed.
|
|
+ (_, Some(_)) => {}
|
|
+ }
|
|
+ }
|
|
+
|
|
// See comment on `cell` in the struct definition.
|
|
self.cell
|
|
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
|
|
--- a/third_party/rust/syn/src/error.rs
|
|
+++ b/third_party/rust/syn/src/error.rs
|
|
@@ -1,9 +1,8 @@
|
|
-use std;
|
|
use std::fmt::{self, Debug, Display};
|
|
use std::iter::FromIterator;
|
|
use std::slice;
|
|
use std::vec;
|
|
|
|
use proc_macro2::{
|
|
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
|
};
|
|
@@ -27,18 +26,18 @@ pub type Result<T> = std::result::Result
|
|
/// message than simply panicking the macro.
|
|
///
|
|
/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
|
|
///
|
|
/// When parsing macro input, the [`parse_macro_input!`] macro handles the
|
|
/// conversion to `compile_error!` automatically.
|
|
///
|
|
/// ```
|
|
-/// extern crate proc_macro;
|
|
-///
|
|
+/// # extern crate proc_macro;
|
|
+/// #
|
|
/// use proc_macro::TokenStream;
|
|
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
|
|
///
|
|
/// # const IGNORE: &str = stringify! {
|
|
/// #[proc_macro_attribute]
|
|
/// # };
|
|
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
|
|
/// let args = parse_macro_input!(args as AttributeArgs);
|
|
@@ -77,17 +76,16 @@ pub type Result<T> = std::result::Result
|
|
/// # use proc_macro2::TokenStream;
|
|
/// # use syn::{DeriveInput, Result};
|
|
/// #
|
|
/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
|
|
/// # unimplemented!()
|
|
/// # }
|
|
/// # }
|
|
/// ```
|
|
-#[derive(Clone)]
|
|
pub struct Error {
|
|
messages: Vec<ErrorMessage>,
|
|
}
|
|
|
|
struct ErrorMessage {
|
|
// Span is implemented as an index into a thread-local interner to keep the
|
|
// size small. It is not safe to access from a different thread. We want
|
|
// errors to be Send and Sync to play nicely with the Failure crate, so pin
|
|
@@ -245,16 +243,27 @@ pub fn new_at<T: Display>(scope: Span, c
|
|
if cursor.eof() {
|
|
Error::new(scope, format!("unexpected end of input, {}", message))
|
|
} else {
|
|
let span = crate::buffer::open_span_of_group(cursor);
|
|
Error::new(span, message)
|
|
}
|
|
}
|
|
|
|
+#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
|
|
+pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
|
|
+ Error {
|
|
+ messages: vec![ErrorMessage {
|
|
+ start_span: ThreadBound::new(start),
|
|
+ end_span: ThreadBound::new(end),
|
|
+ message: message.to_string(),
|
|
+ }],
|
|
+ }
|
|
+}
|
|
+
|
|
impl Debug for Error {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
if self.messages.len() == 1 {
|
|
formatter
|
|
.debug_tuple("Error")
|
|
.field(&self.messages[0])
|
|
.finish()
|
|
} else {
|
|
@@ -273,16 +282,24 @@ impl Debug for ErrorMessage {
|
|
}
|
|
|
|
impl Display for Error {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
formatter.write_str(&self.messages[0].message)
|
|
}
|
|
}
|
|
|
|
+impl Clone for Error {
|
|
+ fn clone(&self) -> Self {
|
|
+ Error {
|
|
+ messages: self.messages.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
impl Clone for ErrorMessage {
|
|
fn clone(&self) -> Self {
|
|
let start = self
|
|
.start_span
|
|
.get()
|
|
.cloned()
|
|
.unwrap_or_else(Span::call_site);
|
|
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
|
|
@@ -350,8 +367,16 @@ impl<'a> Iterator for Iter<'a> {
|
|
type Item = Error;
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
|
Some(Error {
|
|
messages: vec![self.messages.next()?.clone()],
|
|
})
|
|
}
|
|
}
|
|
+
|
|
+impl Extend<Error> for Error {
|
|
+ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
|
|
+ for err in iter {
|
|
+ self.combine(err);
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
|
|
--- a/third_party/rust/syn/src/expr.rs
|
|
+++ b/third_party/rust/syn/src/expr.rs
|
|
@@ -1,23 +1,26 @@
|
|
use super::*;
|
|
use crate::punctuated::Punctuated;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use crate::tt::TokenStreamHelper;
|
|
+#[cfg(feature = "full")]
|
|
+use crate::reserved::Reserved;
|
|
use proc_macro2::{Span, TokenStream};
|
|
-#[cfg(feature = "extra-traits")]
|
|
+#[cfg(feature = "printing")]
|
|
+use quote::IdentFragment;
|
|
+#[cfg(feature = "printing")]
|
|
+use std::fmt::{self, Display};
|
|
use std::hash::{Hash, Hasher};
|
|
-#[cfg(all(feature = "parsing", feature = "full"))]
|
|
+#[cfg(feature = "parsing")]
|
|
use std::mem;
|
|
|
|
ast_enum_of_structs! {
|
|
/// A Rust expression.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
- /// feature.*
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
+ /// feature, but most of the variants are not available unless "full" is enabled.*
|
|
///
|
|
/// # Syntax tree enums
|
|
///
|
|
/// This type is a syntax tree enum. In Syn this and other syntax tree enums
|
|
/// are designed to be traversed using the following rebinding idiom.
|
|
///
|
|
/// ```
|
|
/// # use syn::Expr;
|
|
@@ -78,17 +81,17 @@ ast_enum_of_structs! {
|
|
/// if let Expr::Tuple(base) = *discriminant.base {
|
|
/// # }
|
|
/// # }
|
|
/// ```
|
|
///
|
|
/// A sign that you may not be choosing the right variable names is if you
|
|
/// see names getting repeated in your code, like accessing
|
|
/// `receiver.receiver` or `pat.pat` or `cond.cond`.
|
|
- pub enum Expr #manual_extra_traits {
|
|
+ pub enum Expr {
|
|
/// A slice literal expression: `[a, b, c, d]`.
|
|
Array(ExprArray),
|
|
|
|
/// An assignment expression: `a = compute()`.
|
|
Assign(ExprAssign),
|
|
|
|
/// A compound assignment expression: `counter += 1`.
|
|
AssignOp(ExprAssignOp),
|
|
@@ -223,191 +226,191 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A slice literal expression: `[a, b, c, d]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprArray #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub bracket_token: token::Bracket,
|
|
pub elems: Punctuated<Expr, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An assignment expression: `a = compute()`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprAssign #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub left: Box<Expr>,
|
|
pub eq_token: Token![=],
|
|
pub right: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A compound assignment expression: `counter += 1`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprAssignOp #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub left: Box<Expr>,
|
|
pub op: BinOp,
|
|
pub right: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An async block: `async { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprAsync #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub async_token: Token![async],
|
|
pub capture: Option<Token![move]>,
|
|
pub block: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An await expression: `fut.await`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprAwait #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub base: Box<Expr>,
|
|
pub dot_token: Token![.],
|
|
pub await_token: token::Await,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A binary operation: `a + b`, `a * b`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprBinary {
|
|
pub attrs: Vec<Attribute>,
|
|
pub left: Box<Expr>,
|
|
pub op: BinOp,
|
|
pub right: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A blocked scope: `{ ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprBlock #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub label: Option<Label>,
|
|
pub block: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A box expression: `box f`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprBox #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub box_token: Token![box],
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A `break`, with an optional label to break and an optional
|
|
/// expression.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprBreak #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub break_token: Token![break],
|
|
pub label: Option<Lifetime>,
|
|
pub expr: Option<Box<Expr>>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A function call expression: `invoke(a, b)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprCall {
|
|
pub attrs: Vec<Attribute>,
|
|
pub func: Box<Expr>,
|
|
pub paren_token: token::Paren,
|
|
pub args: Punctuated<Expr, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A cast expression: `foo as f64`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprCast {
|
|
pub attrs: Vec<Attribute>,
|
|
pub expr: Box<Expr>,
|
|
pub as_token: Token![as],
|
|
pub ty: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A closure expression: `|a, b| a + b`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprClosure #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub asyncness: Option<Token![async]>,
|
|
pub movability: Option<Token![static]>,
|
|
pub capture: Option<Token![move]>,
|
|
pub or1_token: Token![|],
|
|
pub inputs: Punctuated<Pat, Token![,]>,
|
|
pub or2_token: Token![|],
|
|
pub output: ReturnType,
|
|
pub body: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A `continue`, with an optional label.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprContinue #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub continue_token: Token![continue],
|
|
pub label: Option<Lifetime>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Access of a named struct field (`obj.k`) or unnamed tuple struct
|
|
/// field (`obj.0`).
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprField {
|
|
pub attrs: Vec<Attribute>,
|
|
pub base: Box<Expr>,
|
|
pub dot_token: Token![.],
|
|
pub member: Member,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A for loop: `for pat in expr { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprForLoop #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub label: Option<Label>,
|
|
pub for_token: Token![for],
|
|
pub pat: Pat,
|
|
pub in_token: Token![in],
|
|
pub expr: Box<Expr>,
|
|
pub body: Block,
|
|
@@ -416,538 +419,312 @@ ast_struct! {
|
|
|
|
ast_struct! {
|
|
/// An expression contained within invisible delimiters.
|
|
///
|
|
/// This variant is important for faithfully representing the precedence
|
|
/// of expressions and is related to `None`-delimited spans in a
|
|
/// `TokenStream`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprGroup #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub group_token: token::Group,
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An `if` expression with an optional `else` block: `if expr { ... }
|
|
/// else { ... }`.
|
|
///
|
|
/// The `else` branch expression may only be an `If` or `Block`
|
|
/// expression, not any of the other types of expression.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprIf #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub if_token: Token![if],
|
|
pub cond: Box<Expr>,
|
|
pub then_branch: Block,
|
|
pub else_branch: Option<(Token![else], Box<Expr>)>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A square bracketed indexing expression: `vector[2]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprIndex {
|
|
pub attrs: Vec<Attribute>,
|
|
pub expr: Box<Expr>,
|
|
pub bracket_token: token::Bracket,
|
|
pub index: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A `let` guard: `let Some(x) = opt`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprLet #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub let_token: Token![let],
|
|
pub pat: Pat,
|
|
pub eq_token: Token![=],
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A literal in place of an expression: `1`, `"foo"`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprLit {
|
|
pub attrs: Vec<Attribute>,
|
|
pub lit: Lit,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Conditionless loop: `loop { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprLoop #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub label: Option<Label>,
|
|
pub loop_token: Token![loop],
|
|
pub body: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro invocation expression: `format!("{}", q)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprMacro #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub mac: Macro,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A `match` expression: `match n { Some(n) => {}, None => {} }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprMatch #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub match_token: Token![match],
|
|
pub expr: Box<Expr>,
|
|
pub brace_token: token::Brace,
|
|
pub arms: Vec<Arm>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A method call expression: `x.foo::<T>(a, b)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprMethodCall #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub receiver: Box<Expr>,
|
|
pub dot_token: Token![.],
|
|
pub method: Ident,
|
|
pub turbofish: Option<MethodTurbofish>,
|
|
pub paren_token: token::Paren,
|
|
pub args: Punctuated<Expr, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A parenthesized expression: `(a + b)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprParen {
|
|
pub attrs: Vec<Attribute>,
|
|
pub paren_token: token::Paren,
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A path like `std::mem::replace` possibly containing generic
|
|
/// parameters and a qualified self-type.
|
|
///
|
|
/// A plain identifier like `x` is a path of length 1.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprPath {
|
|
pub attrs: Vec<Attribute>,
|
|
pub qself: Option<QSelf>,
|
|
pub path: Path,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprRange #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub from: Option<Box<Expr>>,
|
|
pub limits: RangeLimits,
|
|
pub to: Option<Box<Expr>>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A referencing operation: `&a` or `&mut a`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprReference #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub and_token: Token![&],
|
|
pub raw: Reserved,
|
|
pub mutability: Option<Token![mut]>,
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An array literal constructed from one repeated element: `[0u8; N]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprRepeat #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub bracket_token: token::Bracket,
|
|
pub expr: Box<Expr>,
|
|
pub semi_token: Token![;],
|
|
pub len: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A `return`, with an optional value to be returned.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprReturn #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub return_token: Token![return],
|
|
pub expr: Option<Box<Expr>>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A struct literal expression: `Point { x: 1, y: 1 }`.
|
|
///
|
|
/// The `rest` provides the value of the remaining fields as in `S { a:
|
|
/// 1, b: 1, ..rest }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprStruct #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub path: Path,
|
|
pub brace_token: token::Brace,
|
|
pub fields: Punctuated<FieldValue, Token![,]>,
|
|
pub dot2_token: Option<Token![..]>,
|
|
pub rest: Option<Box<Expr>>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A try-expression: `expr?`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprTry #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub expr: Box<Expr>,
|
|
pub question_token: Token![?],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A try block: `try { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprTryBlock #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub try_token: Token![try],
|
|
pub block: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A tuple expression: `(a, b, c, d)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprTuple #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub paren_token: token::Paren,
|
|
pub elems: Punctuated<Expr, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A type ascription expression: `foo: f64`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprType #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub expr: Box<Expr>,
|
|
pub colon_token: Token![:],
|
|
pub ty: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A unary operation: `!x`, `*x`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ExprUnary {
|
|
pub attrs: Vec<Attribute>,
|
|
pub op: UnOp,
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An unsafe block: `unsafe { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprUnsafe #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub unsafe_token: Token![unsafe],
|
|
pub block: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A while loop: `while expr { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprWhile #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub label: Option<Label>,
|
|
pub while_token: Token![while],
|
|
pub cond: Box<Expr>,
|
|
pub body: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A yield expression: `yield expr`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ExprYield #full {
|
|
pub attrs: Vec<Attribute>,
|
|
pub yield_token: Token![yield],
|
|
pub expr: Option<Box<Expr>>,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Expr {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Expr {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (Expr::Array(this), Expr::Array(other)) => this == other,
|
|
- (Expr::Assign(this), Expr::Assign(other)) => this == other,
|
|
- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
|
|
- (Expr::Async(this), Expr::Async(other)) => this == other,
|
|
- (Expr::Await(this), Expr::Await(other)) => this == other,
|
|
- (Expr::Binary(this), Expr::Binary(other)) => this == other,
|
|
- (Expr::Block(this), Expr::Block(other)) => this == other,
|
|
- (Expr::Box(this), Expr::Box(other)) => this == other,
|
|
- (Expr::Break(this), Expr::Break(other)) => this == other,
|
|
- (Expr::Call(this), Expr::Call(other)) => this == other,
|
|
- (Expr::Cast(this), Expr::Cast(other)) => this == other,
|
|
- (Expr::Closure(this), Expr::Closure(other)) => this == other,
|
|
- (Expr::Continue(this), Expr::Continue(other)) => this == other,
|
|
- (Expr::Field(this), Expr::Field(other)) => this == other,
|
|
- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
|
|
- (Expr::Group(this), Expr::Group(other)) => this == other,
|
|
- (Expr::If(this), Expr::If(other)) => this == other,
|
|
- (Expr::Index(this), Expr::Index(other)) => this == other,
|
|
- (Expr::Let(this), Expr::Let(other)) => this == other,
|
|
- (Expr::Lit(this), Expr::Lit(other)) => this == other,
|
|
- (Expr::Loop(this), Expr::Loop(other)) => this == other,
|
|
- (Expr::Macro(this), Expr::Macro(other)) => this == other,
|
|
- (Expr::Match(this), Expr::Match(other)) => this == other,
|
|
- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
|
|
- (Expr::Paren(this), Expr::Paren(other)) => this == other,
|
|
- (Expr::Path(this), Expr::Path(other)) => this == other,
|
|
- (Expr::Range(this), Expr::Range(other)) => this == other,
|
|
- (Expr::Reference(this), Expr::Reference(other)) => this == other,
|
|
- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
|
|
- (Expr::Return(this), Expr::Return(other)) => this == other,
|
|
- (Expr::Struct(this), Expr::Struct(other)) => this == other,
|
|
- (Expr::Try(this), Expr::Try(other)) => this == other,
|
|
- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
|
|
- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
|
|
- (Expr::Type(this), Expr::Type(other)) => this == other,
|
|
- (Expr::Unary(this), Expr::Unary(other)) => this == other,
|
|
- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
|
|
- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- (Expr::While(this), Expr::While(other)) => this == other,
|
|
- (Expr::Yield(this), Expr::Yield(other)) => this == other,
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Expr {
|
|
- fn hash<H>(&self, hash: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- Expr::Array(expr) => {
|
|
- hash.write_u8(0);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Assign(expr) => {
|
|
- hash.write_u8(1);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::AssignOp(expr) => {
|
|
- hash.write_u8(2);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Async(expr) => {
|
|
- hash.write_u8(3);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Await(expr) => {
|
|
- hash.write_u8(4);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Binary(expr) => {
|
|
- hash.write_u8(5);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Block(expr) => {
|
|
- hash.write_u8(6);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Box(expr) => {
|
|
- hash.write_u8(7);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Break(expr) => {
|
|
- hash.write_u8(8);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Call(expr) => {
|
|
- hash.write_u8(9);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Cast(expr) => {
|
|
- hash.write_u8(10);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Closure(expr) => {
|
|
- hash.write_u8(11);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Continue(expr) => {
|
|
- hash.write_u8(12);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Field(expr) => {
|
|
- hash.write_u8(13);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::ForLoop(expr) => {
|
|
- hash.write_u8(14);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Group(expr) => {
|
|
- hash.write_u8(15);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::If(expr) => {
|
|
- hash.write_u8(16);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Index(expr) => {
|
|
- hash.write_u8(17);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Let(expr) => {
|
|
- hash.write_u8(18);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Lit(expr) => {
|
|
- hash.write_u8(19);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Loop(expr) => {
|
|
- hash.write_u8(20);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Macro(expr) => {
|
|
- hash.write_u8(21);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Match(expr) => {
|
|
- hash.write_u8(22);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::MethodCall(expr) => {
|
|
- hash.write_u8(23);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Paren(expr) => {
|
|
- hash.write_u8(24);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Path(expr) => {
|
|
- hash.write_u8(25);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Range(expr) => {
|
|
- hash.write_u8(26);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Reference(expr) => {
|
|
- hash.write_u8(27);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Repeat(expr) => {
|
|
- hash.write_u8(28);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Return(expr) => {
|
|
- hash.write_u8(29);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Struct(expr) => {
|
|
- hash.write_u8(30);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Try(expr) => {
|
|
- hash.write_u8(31);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::TryBlock(expr) => {
|
|
- hash.write_u8(32);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Tuple(expr) => {
|
|
- hash.write_u8(33);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Type(expr) => {
|
|
- hash.write_u8(34);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Unary(expr) => {
|
|
- hash.write_u8(35);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Unsafe(expr) => {
|
|
- hash.write_u8(36);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Verbatim(expr) => {
|
|
- hash.write_u8(37);
|
|
- TokenStreamHelper(expr).hash(hash);
|
|
- }
|
|
- Expr::While(expr) => {
|
|
- hash.write_u8(38);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::Yield(expr) => {
|
|
- hash.write_u8(39);
|
|
- expr.hash(hash);
|
|
- }
|
|
- Expr::__Nonexhaustive => unreachable!(),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
impl Expr {
|
|
#[cfg(all(feature = "parsing", feature = "full"))]
|
|
pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
|
|
match self {
|
|
Expr::Box(ExprBox { attrs, .. })
|
|
| Expr::Array(ExprArray { attrs, .. })
|
|
| Expr::Call(ExprCall { attrs, .. })
|
|
| Expr::MethodCall(ExprMethodCall { attrs, .. })
|
|
@@ -991,107 +768,145 @@ impl Expr {
|
|
}
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// A struct or tuple struct field accessed in a struct literal or field
|
|
/// expression.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub enum Member {
|
|
/// A named field like `self.x`.
|
|
Named(Ident),
|
|
/// An unnamed field like `self.0`.
|
|
Unnamed(Index),
|
|
}
|
|
}
|
|
|
|
+impl Eq for Member {}
|
|
+
|
|
+impl PartialEq for Member {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Member::Named(this), Member::Named(other)) => this == other,
|
|
+ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+impl Hash for Member {
|
|
+ fn hash<H: Hasher>(&self, state: &mut H) {
|
|
+ match self {
|
|
+ Member::Named(m) => m.hash(state),
|
|
+ Member::Unnamed(m) => m.hash(state),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "printing")]
|
|
+impl IdentFragment for Member {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Member::Named(m) => Display::fmt(m, formatter),
|
|
+ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn span(&self) -> Option<Span> {
|
|
+ match self {
|
|
+ Member::Named(m) => Some(m.span()),
|
|
+ Member::Unnamed(m) => Some(m.span),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
ast_struct! {
|
|
/// The index of an unnamed tuple struct field.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- pub struct Index #manual_extra_traits {
|
|
+ pub struct Index {
|
|
pub index: u32,
|
|
pub span: Span,
|
|
}
|
|
}
|
|
|
|
impl From<usize> for Index {
|
|
fn from(index: usize) -> Index {
|
|
assert!(index < u32::max_value() as usize);
|
|
Index {
|
|
index: index as u32,
|
|
span: Span::call_site(),
|
|
}
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
impl Eq for Index {}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
impl PartialEq for Index {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
self.index == other.index
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
impl Hash for Index {
|
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
self.index.hash(state);
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "full")]
|
|
-ast_struct! {
|
|
- #[derive(Default)]
|
|
- pub struct Reserved {
|
|
- private: (),
|
|
+#[cfg(feature = "printing")]
|
|
+impl IdentFragment for Index {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ Display::fmt(&self.index, formatter)
|
|
+ }
|
|
+
|
|
+ fn span(&self) -> Option<Span> {
|
|
+ Some(self.span)
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
ast_struct! {
|
|
/// The `::<>` explicit type parameters passed to a method call:
|
|
/// `parse::<u64>()`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct MethodTurbofish {
|
|
pub colon2_token: Token![::],
|
|
pub lt_token: Token![<],
|
|
pub args: Punctuated<GenericMethodArgument, Token![,]>,
|
|
pub gt_token: Token![>],
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
ast_enum! {
|
|
/// An individual generic argument to a method, like `T`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub enum GenericMethodArgument {
|
|
/// A type argument.
|
|
Type(Type),
|
|
/// A const expression. Must be inside of a block.
|
|
///
|
|
/// NOTE: Identity expressions are represented as Type arguments, as
|
|
/// they are indistinguishable syntactically.
|
|
Const(Expr),
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
ast_struct! {
|
|
/// A field-value pair in a struct literal.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct FieldValue {
|
|
/// Attributes tagged on the field.
|
|
pub attrs: Vec<Attribute>,
|
|
|
|
/// Name or index of the field.
|
|
pub member: Member,
|
|
|
|
/// The colon in `Struct { x: x }`. If written in shorthand like
|
|
@@ -1102,17 +917,17 @@ ast_struct! {
|
|
pub expr: Expr,
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
ast_struct! {
|
|
/// A lifetime labeling a `for`, `while`, or `loop`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct Label {
|
|
pub name: Lifetime,
|
|
pub colon_token: Token![:],
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
ast_struct! {
|
|
@@ -1129,45 +944,44 @@ ast_struct! {
|
|
/// }
|
|
/// // ...
|
|
/// # _ => {}
|
|
/// }
|
|
/// # false
|
|
/// # }
|
|
/// ```
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct Arm {
|
|
pub attrs: Vec<Attribute>,
|
|
pub pat: Pat,
|
|
pub guard: Option<(Token![if], Box<Expr>)>,
|
|
pub fat_arrow_token: Token![=>],
|
|
pub body: Box<Expr>,
|
|
pub comma: Option<Token![,]>,
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
ast_enum! {
|
|
/// Limit types of a range, inclusive or exclusive.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy))]
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub enum RangeLimits {
|
|
/// Inclusive at the beginning, exclusive at the end.
|
|
HalfOpen(Token![..]),
|
|
/// Inclusive at the beginning and end.
|
|
Closed(Token![..=]),
|
|
}
|
|
}
|
|
|
|
#[cfg(any(feature = "parsing", feature = "printing"))]
|
|
#[cfg(feature = "full")]
|
|
pub(crate) fn requires_terminator(expr: &Expr) -> bool {
|
|
- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
|
|
+ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
|
|
match *expr {
|
|
Expr::Unsafe(..)
|
|
| Expr::Block(..)
|
|
| Expr::If(..)
|
|
| Expr::Match(..)
|
|
| Expr::While(..)
|
|
| Expr::Loop(..)
|
|
| Expr::ForLoop(..)
|
|
@@ -1178,26 +992,27 @@ pub(crate) fn requires_terminator(expr:
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub(crate) mod parsing {
|
|
use super::*;
|
|
|
|
use crate::parse::{Parse, ParseStream, Result};
|
|
use crate::path;
|
|
+ use std::cmp::Ordering;
|
|
+
|
|
+ crate::custom_keyword!(raw);
|
|
|
|
// When we're parsing expressions which occur before blocks, like in an if
|
|
// statement's condition, we cannot parse a struct literal.
|
|
//
|
|
// Struct literals are ambiguous in certain positions
|
|
// https://github.com/rust-lang/rfcs/pull/92
|
|
- #[derive(Copy, Clone)]
|
|
pub struct AllowStruct(bool);
|
|
|
|
- #[derive(Copy, Clone, PartialEq, PartialOrd)]
|
|
enum Precedence {
|
|
Any,
|
|
Assign,
|
|
Range,
|
|
Or,
|
|
And,
|
|
Compare,
|
|
BitOr,
|
|
@@ -1241,19 +1056,131 @@ pub(crate) mod parsing {
|
|
}
|
|
|
|
impl Parse for Expr {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
ambiguous_expr(input, AllowStruct(true))
|
|
}
|
|
}
|
|
|
|
- #[cfg(feature = "full")]
|
|
- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
|
|
- ambiguous_expr(input, AllowStruct(false))
|
|
+ impl Expr {
|
|
+ /// An alternative to the primary `Expr::parse` parser (from the
|
|
+ /// [`Parse`] trait) for ambiguous syntactic positions in which a
|
|
+ /// trailing brace should not be taken as part of the expression.
|
|
+ ///
|
|
+ /// Rust grammar has an ambiguity where braces sometimes turn a path
|
|
+ /// expression into a struct initialization and sometimes do not. In the
|
|
+ /// following code, the expression `S {}` is one expression. Presumably
|
|
+ /// there is an empty struct `struct S {}` defined somewhere which it is
|
|
+ /// instantiating.
|
|
+ ///
|
|
+ /// ```
|
|
+ /// # struct S;
|
|
+ /// # impl std::ops::Deref for S {
|
|
+ /// # type Target = bool;
|
|
+ /// # fn deref(&self) -> &Self::Target {
|
|
+ /// # &true
|
|
+ /// # }
|
|
+ /// # }
|
|
+ /// let _ = *S {};
|
|
+ ///
|
|
+ /// // parsed by rustc as: `*(S {})`
|
|
+ /// ```
|
|
+ ///
|
|
+ /// We would want to parse the above using `Expr::parse` after the `=`
|
|
+ /// token.
|
|
+ ///
|
|
+ /// But in the following, `S {}` is *not* a struct init expression.
|
|
+ ///
|
|
+ /// ```
|
|
+ /// # const S: &bool = &true;
|
|
+ /// if *S {} {}
|
|
+ ///
|
|
+ /// // parsed by rustc as:
|
|
+ /// //
|
|
+ /// // if (*S) {
|
|
+ /// // /* empty block */
|
|
+ /// // }
|
|
+ /// // {
|
|
+ /// // /* another empty block */
|
|
+ /// // }
|
|
+ /// ```
|
|
+ ///
|
|
+ /// For that reason we would want to parse if-conditions using
|
|
+ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
|
|
+ /// similar syntactic positions such as the condition expr after a
|
|
+ /// `while` token or the expr at the top of a `match`.
|
|
+ ///
|
|
+ /// The Rust grammar's choices around which way this ambiguity is
|
|
+ /// resolved at various syntactic positions is fairly arbitrary. Really
|
|
+ /// either parse behavior could work in most positions, and language
|
|
+ /// designers just decide each case based on which is more likely to be
|
|
+ /// what the programmer had in mind most of the time.
|
|
+ ///
|
|
+ /// ```
|
|
+ /// # struct S;
|
|
+ /// # fn doc() -> S {
|
|
+ /// if return S {} {}
|
|
+ /// # unreachable!()
|
|
+ /// # }
|
|
+ ///
|
|
+ /// // parsed by rustc as:
|
|
+ /// //
|
|
+ /// // if (return (S {})) {
|
|
+ /// // }
|
|
+ /// //
|
|
+ /// // but could equally well have been this other arbitrary choice:
|
|
+ /// //
|
|
+ /// // if (return S) {
|
|
+ /// // }
|
|
+ /// // {}
|
|
+ /// ```
|
|
+ ///
|
|
+ /// Note the grammar ambiguity on trailing braces is distinct from
|
|
+ /// precedence and is not captured by assigning a precedence level to
|
|
+ /// the braced struct init expr in relation to other operators. This can
|
|
+ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
|
|
+ /// parses as `return (0..(S {}))` implying tighter precedence for
|
|
+ /// struct init than `..`, while the latter parses as `match (0..S) {}`
|
|
+ /// implying tighter precedence for `..` than struct init, a
|
|
+ /// contradiction.
|
|
+ #[cfg(feature = "full")]
|
|
+ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
|
|
+ ambiguous_expr(input, AllowStruct(false))
|
|
+ }
|
|
+ }
|
|
+
|
|
+ impl Copy for AllowStruct {}
|
|
+
|
|
+ impl Clone for AllowStruct {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+ }
|
|
+
|
|
+ impl Copy for Precedence {}
|
|
+
|
|
+ impl Clone for Precedence {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+ }
|
|
+
|
|
+ impl PartialEq for Precedence {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ *self as u8 == *other as u8
|
|
+ }
|
|
+ }
|
|
+
|
|
+ impl PartialOrd for Precedence {
|
|
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
+ let this = *self as u8;
|
|
+ let other = *other as u8;
|
|
+ Some(this.cmp(&other))
|
|
+ }
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
fn parse_expr(
|
|
input: ParseStream,
|
|
mut lhs: Expr,
|
|
allow_struct: AllowStruct,
|
|
base: Precedence,
|
|
@@ -1425,88 +1352,114 @@ pub(crate) mod parsing {
|
|
}
|
|
|
|
// Parse an arbitrary expression.
|
|
fn ambiguous_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
|
|
let lhs = unary_expr(input, allow_struct)?;
|
|
parse_expr(input, lhs, allow_struct, Precedence::Any)
|
|
}
|
|
|
|
+ #[cfg(feature = "full")]
|
|
+ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
|
|
+ let mut attrs = Vec::new();
|
|
+ loop {
|
|
+ if input.peek(token::Group) {
|
|
+ let ahead = input.fork();
|
|
+ let group = crate::group::parse_group(&ahead)?;
|
|
+ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
|
|
+ break;
|
|
+ }
|
|
+ let attr = group.content.call(attr::parsing::single_parse_outer)?;
|
|
+ if !group.content.is_empty() {
|
|
+ break;
|
|
+ }
|
|
+ attrs.push(attr);
|
|
+ } else if input.peek(Token![#]) {
|
|
+ attrs.push(input.call(attr::parsing::single_parse_outer)?);
|
|
+ } else {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ Ok(attrs)
|
|
+ }
|
|
+
|
|
// <UnOp> <trailer>
|
|
// & <trailer>
|
|
// &mut <trailer>
|
|
// box <trailer>
|
|
#[cfg(feature = "full")]
|
|
fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
|
|
- // TODO: optimize using advance_to
|
|
- let ahead = input.fork();
|
|
- ahead.call(Attribute::parse_outer)?;
|
|
- if ahead.peek(Token![&])
|
|
- || ahead.peek(Token![box])
|
|
- || ahead.peek(Token![*])
|
|
- || ahead.peek(Token![!])
|
|
- || ahead.peek(Token![-])
|
|
- {
|
|
- let attrs = input.call(Attribute::parse_outer)?;
|
|
- if input.peek(Token![&]) {
|
|
+ let begin = input.fork();
|
|
+ let attrs = input.call(expr_attrs)?;
|
|
+ if input.peek(Token![&]) {
|
|
+ let and_token: Token![&] = input.parse()?;
|
|
+ let raw: Option<raw> =
|
|
+ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
|
|
+ Some(input.parse()?)
|
|
+ } else {
|
|
+ None
|
|
+ };
|
|
+ let mutability: Option<Token![mut]> = input.parse()?;
|
|
+ if raw.is_some() && mutability.is_none() {
|
|
+ input.parse::<Token![const]>()?;
|
|
+ }
|
|
+ let expr = Box::new(unary_expr(input, allow_struct)?);
|
|
+ if raw.is_some() {
|
|
+ Ok(Expr::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
Ok(Expr::Reference(ExprReference {
|
|
attrs,
|
|
- and_token: input.parse()?,
|
|
+ and_token,
|
|
raw: Reserved::default(),
|
|
- mutability: input.parse()?,
|
|
- expr: Box::new(unary_expr(input, allow_struct)?),
|
|
- }))
|
|
- } else if input.peek(Token![box]) {
|
|
- Ok(Expr::Box(ExprBox {
|
|
- attrs,
|
|
- box_token: input.parse()?,
|
|
- expr: Box::new(unary_expr(input, allow_struct)?),
|
|
- }))
|
|
- } else {
|
|
- Ok(Expr::Unary(ExprUnary {
|
|
- attrs,
|
|
- op: input.parse()?,
|
|
- expr: Box::new(unary_expr(input, allow_struct)?),
|
|
+ mutability,
|
|
+ expr,
|
|
}))
|
|
}
|
|
+ } else if input.peek(Token![box]) {
|
|
+ Ok(Expr::Box(ExprBox {
|
|
+ attrs,
|
|
+ box_token: input.parse()?,
|
|
+ expr: Box::new(unary_expr(input, allow_struct)?),
|
|
+ }))
|
|
+ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
|
|
+ Ok(Expr::Unary(ExprUnary {
|
|
+ attrs,
|
|
+ op: input.parse()?,
|
|
+ expr: Box::new(unary_expr(input, allow_struct)?),
|
|
+ }))
|
|
} else {
|
|
- trailer_expr(input, allow_struct)
|
|
+ trailer_expr(attrs, input, allow_struct)
|
|
}
|
|
}
|
|
|
|
#[cfg(not(feature = "full"))]
|
|
fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
|
|
- // TODO: optimize using advance_to
|
|
- let ahead = input.fork();
|
|
- ahead.call(Attribute::parse_outer)?;
|
|
- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
|
|
+ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
|
|
Ok(Expr::Unary(ExprUnary {
|
|
- attrs: input.call(Attribute::parse_outer)?,
|
|
+ attrs: Vec::new(),
|
|
op: input.parse()?,
|
|
expr: Box::new(unary_expr(input, allow_struct)?),
|
|
}))
|
|
} else {
|
|
trailer_expr(input, allow_struct)
|
|
}
|
|
}
|
|
|
|
// <atom> (..<args>) ...
|
|
// <atom> . <ident> (..<args>) ...
|
|
// <atom> . <ident> ...
|
|
// <atom> . <lit> ...
|
|
// <atom> [ <expr> ] ...
|
|
// <atom> ? ...
|
|
#[cfg(feature = "full")]
|
|
- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
|
|
- if input.peek(token::Group) {
|
|
- return input.call(expr_group).map(Expr::Group);
|
|
- }
|
|
-
|
|
- let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
-
|
|
+ fn trailer_expr(
|
|
+ outer_attrs: Vec<Attribute>,
|
|
+ input: ParseStream,
|
|
+ allow_struct: AllowStruct,
|
|
+ ) -> Result<Expr> {
|
|
let atom = atom_expr(input, allow_struct)?;
|
|
let mut e = trailer_helper(input, atom)?;
|
|
|
|
let inner_attrs = e.replace_attrs(Vec::new());
|
|
let attrs = private::attrs(outer_attrs, inner_attrs);
|
|
e.replace_attrs(attrs);
|
|
Ok(e)
|
|
}
|
|
@@ -1518,28 +1471,36 @@ pub(crate) mod parsing {
|
|
let content;
|
|
e = Expr::Call(ExprCall {
|
|
attrs: Vec::new(),
|
|
func: Box::new(e),
|
|
paren_token: parenthesized!(content in input),
|
|
args: content.parse_terminated(Expr::parse)?,
|
|
});
|
|
} else if input.peek(Token![.]) && !input.peek(Token![..]) {
|
|
- let dot_token: Token![.] = input.parse()?;
|
|
+ let mut dot_token: Token![.] = input.parse()?;
|
|
|
|
- if input.peek(token::Await) {
|
|
+ let await_token: Option<token::Await> = input.parse()?;
|
|
+ if let Some(await_token) = await_token {
|
|
e = Expr::Await(ExprAwait {
|
|
attrs: Vec::new(),
|
|
base: Box::new(e),
|
|
dot_token,
|
|
- await_token: input.parse()?,
|
|
+ await_token,
|
|
});
|
|
continue;
|
|
}
|
|
|
|
+ let float_token: Option<LitFloat> = input.parse()?;
|
|
+ if let Some(float_token) = float_token {
|
|
+ if multi_index(&mut e, &mut dot_token, float_token)? {
|
|
+ continue;
|
|
+ }
|
|
+ }
|
|
+
|
|
let member: Member = input.parse()?;
|
|
let turbofish = if member.is_named() && input.peek(Token![::]) {
|
|
Some(MethodTurbofish {
|
|
colon2_token: input.parse()?,
|
|
lt_token: input.parse()?,
|
|
args: {
|
|
let mut args = Punctuated::new();
|
|
loop {
|
|
@@ -1615,20 +1576,27 @@ pub(crate) mod parsing {
|
|
e = Expr::Call(ExprCall {
|
|
attrs: Vec::new(),
|
|
func: Box::new(e),
|
|
paren_token: parenthesized!(content in input),
|
|
args: content.parse_terminated(Expr::parse)?,
|
|
});
|
|
} else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
|
|
{
|
|
+ let mut dot_token: Token![.] = input.parse()?;
|
|
+ let float_token: Option<LitFloat> = input.parse()?;
|
|
+ if let Some(float_token) = float_token {
|
|
+ if multi_index(&mut e, &mut dot_token, float_token)? {
|
|
+ continue;
|
|
+ }
|
|
+ }
|
|
e = Expr::Field(ExprField {
|
|
attrs: Vec::new(),
|
|
base: Box::new(e),
|
|
- dot_token: input.parse()?,
|
|
+ dot_token,
|
|
member: input.parse()?,
|
|
});
|
|
} else if input.peek(token::Bracket) {
|
|
let content;
|
|
e = Expr::Index(ExprIndex {
|
|
attrs: Vec::new(),
|
|
expr: Box::new(e),
|
|
bracket_token: bracketed!(content in input),
|
|
@@ -1641,17 +1609,21 @@ pub(crate) mod parsing {
|
|
|
|
Ok(e)
|
|
}
|
|
|
|
// Parse all atomic expressions which don't have to worry about precedence
|
|
// interactions, as they are fully contained.
|
|
#[cfg(feature = "full")]
|
|
fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
|
|
- if input.peek(token::Group) {
|
|
+ if input.peek(token::Group)
|
|
+ && !input.peek2(Token![::])
|
|
+ && !input.peek2(Token![!])
|
|
+ && !input.peek2(token::Brace)
|
|
+ {
|
|
input.call(expr_group).map(Expr::Group)
|
|
} else if input.peek(Lit) {
|
|
input.parse().map(Expr::Lit)
|
|
} else if input.peek(Token![async])
|
|
&& (input.peek2(token::Brace) || input.peek2(Token![move]) && input.peek3(token::Brace))
|
|
{
|
|
input.call(expr_async).map(Expr::Async)
|
|
} else if input.peek(Token![try]) && input.peek2(token::Brace) {
|
|
@@ -1663,17 +1635,16 @@ pub(crate) mod parsing {
|
|
{
|
|
expr_closure(input, allow_struct).map(Expr::Closure)
|
|
} else if input.peek(Ident)
|
|
|| input.peek(Token![::])
|
|
|| input.peek(Token![<])
|
|
|| input.peek(Token![self])
|
|
|| input.peek(Token![Self])
|
|
|| input.peek(Token![super])
|
|
- || input.peek(Token![extern])
|
|
|| input.peek(Token![crate])
|
|
{
|
|
path_or_macro_or_struct(input, allow_struct)
|
|
} else if input.peek(token::Paren) {
|
|
paren_or_tuple(input)
|
|
} else if input.peek(Token![break]) {
|
|
expr_break(input, allow_struct).map(Expr::Break)
|
|
} else if input.peek(Token![continue]) {
|
|
@@ -1735,17 +1706,16 @@ pub(crate) mod parsing {
|
|
} else if input.peek(token::Paren) {
|
|
input.call(expr_paren).map(Expr::Paren)
|
|
} else if input.peek(Ident)
|
|
|| input.peek(Token![::])
|
|
|| input.peek(Token![<])
|
|
|| input.peek(Token![self])
|
|
|| input.peek(Token![Self])
|
|
|| input.peek(Token![super])
|
|
- || input.peek(Token![extern])
|
|
|| input.peek(Token![crate])
|
|
{
|
|
input.parse().map(Expr::Path)
|
|
} else {
|
|
Err(input.error("unsupported expression; enable syn's features=[\"full\"]"))
|
|
}
|
|
}
|
|
|
|
@@ -1873,17 +1843,17 @@ pub(crate) mod parsing {
|
|
}))
|
|
} else {
|
|
Err(content.error("expected `,` or `;`"))
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
|
|
- let mut attrs = input.call(Attribute::parse_outer)?;
|
|
+ let mut attrs = input.call(expr_attrs)?;
|
|
let mut expr = if input.peek(Token![if]) {
|
|
Expr::If(input.parse()?)
|
|
} else if input.peek(Token![while]) {
|
|
Expr::While(input.parse()?)
|
|
} else if input.peek(Token![for]) {
|
|
Expr::ForLoop(input.parse()?)
|
|
} else if input.peek(Token![loop]) {
|
|
Expr::Loop(input.parse()?)
|
|
@@ -1900,17 +1870,17 @@ pub(crate) mod parsing {
|
|
let mut expr = unary_expr(input, allow_struct)?;
|
|
|
|
attrs.extend(expr.replace_attrs(Vec::new()));
|
|
expr.replace_attrs(attrs);
|
|
|
|
return parse_expr(input, expr, allow_struct, Precedence::Any);
|
|
};
|
|
|
|
- if input.peek(Token![.]) || input.peek(Token![?]) {
|
|
+ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
|
|
expr = trailer_helper(input, expr)?;
|
|
|
|
attrs.extend(expr.replace_attrs(Vec::new()));
|
|
expr.replace_attrs(attrs);
|
|
|
|
let allow_struct = AllowStruct(true);
|
|
return parse_expr(input, expr, allow_struct, Precedence::Any);
|
|
}
|
|
@@ -1946,63 +1916,48 @@ pub(crate) mod parsing {
|
|
attrs: Vec::new(),
|
|
paren_token: parenthesized!(content in input),
|
|
expr: content.parse()?,
|
|
})
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
|
|
- // TODO parse const generics as well
|
|
+ if input.peek(Lit) {
|
|
+ let lit = input.parse()?;
|
|
+ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
|
|
+ }
|
|
+
|
|
+ if input.peek(token::Brace) {
|
|
+ let block = input.call(expr::parsing::expr_block)?;
|
|
+ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
|
|
+ }
|
|
+
|
|
input.parse().map(GenericMethodArgument::Type)
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
fn expr_let(input: ParseStream) -> Result<ExprLet> {
|
|
Ok(ExprLet {
|
|
attrs: Vec::new(),
|
|
let_token: input.parse()?,
|
|
- pat: {
|
|
- let leading_vert: Option<Token![|]> = input.parse()?;
|
|
- let pat: Pat = input.parse()?;
|
|
- if leading_vert.is_some()
|
|
- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
|
|
- {
|
|
- let mut cases = Punctuated::new();
|
|
- cases.push_value(pat);
|
|
- while input.peek(Token![|])
|
|
- && !input.peek(Token![||])
|
|
- && !input.peek(Token![|=])
|
|
- {
|
|
- let punct = input.parse()?;
|
|
- cases.push_punct(punct);
|
|
- let pat: Pat = input.parse()?;
|
|
- cases.push_value(pat);
|
|
- }
|
|
- Pat::Or(PatOr {
|
|
- attrs: Vec::new(),
|
|
- leading_vert,
|
|
- cases,
|
|
- })
|
|
- } else {
|
|
- pat
|
|
- }
|
|
- },
|
|
+ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
|
|
eq_token: input.parse()?,
|
|
- expr: Box::new(input.call(expr_no_struct)?),
|
|
+ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
|
|
})
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for ExprIf {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let attrs = input.call(Attribute::parse_outer)?;
|
|
Ok(ExprIf {
|
|
- attrs: Vec::new(),
|
|
+ attrs,
|
|
if_token: input.parse()?,
|
|
- cond: Box::new(input.call(expr_no_struct)?),
|
|
+ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
|
|
then_branch: input.parse()?,
|
|
else_branch: {
|
|
if input.peek(Token![else]) {
|
|
Some(input.call(else_block)?)
|
|
} else {
|
|
None
|
|
}
|
|
},
|
|
@@ -2028,94 +1983,81 @@ pub(crate) mod parsing {
|
|
};
|
|
|
|
Ok((else_token, Box::new(else_branch)))
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for ExprForLoop {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
let label: Option<Label> = input.parse()?;
|
|
let for_token: Token![for] = input.parse()?;
|
|
|
|
- let leading_vert: Option<Token![|]> = input.parse()?;
|
|
- let mut pat: Pat = input.parse()?;
|
|
- if leading_vert.is_some() || input.peek(Token![|]) {
|
|
- let mut cases = Punctuated::new();
|
|
- cases.push_value(pat);
|
|
- while input.peek(Token![|]) {
|
|
- let punct = input.parse()?;
|
|
- cases.push_punct(punct);
|
|
- let pat: Pat = input.parse()?;
|
|
- cases.push_value(pat);
|
|
- }
|
|
- pat = Pat::Or(PatOr {
|
|
- attrs: Vec::new(),
|
|
- leading_vert,
|
|
- cases,
|
|
- });
|
|
- }
|
|
+ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
|
|
|
|
let in_token: Token![in] = input.parse()?;
|
|
- let expr: Expr = input.call(expr_no_struct)?;
|
|
+ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
|
|
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
let stmts = content.call(Block::parse_within)?;
|
|
|
|
Ok(ExprForLoop {
|
|
- attrs: inner_attrs,
|
|
+ attrs: private::attrs(outer_attrs, inner_attrs),
|
|
label,
|
|
for_token,
|
|
pat,
|
|
in_token,
|
|
expr: Box::new(expr),
|
|
body: Block { brace_token, stmts },
|
|
})
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for ExprLoop {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
let label: Option<Label> = input.parse()?;
|
|
let loop_token: Token![loop] = input.parse()?;
|
|
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
let stmts = content.call(Block::parse_within)?;
|
|
|
|
Ok(ExprLoop {
|
|
- attrs: inner_attrs,
|
|
+ attrs: private::attrs(outer_attrs, inner_attrs),
|
|
label,
|
|
loop_token,
|
|
body: Block { brace_token, stmts },
|
|
})
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for ExprMatch {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
let match_token: Token![match] = input.parse()?;
|
|
- let expr = expr_no_struct(input)?;
|
|
+ let expr = Expr::parse_without_eager_brace(input)?;
|
|
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
|
|
let mut arms = Vec::new();
|
|
while !content.is_empty() {
|
|
arms.push(content.call(Arm::parse)?);
|
|
}
|
|
|
|
Ok(ExprMatch {
|
|
- attrs: inner_attrs,
|
|
+ attrs: private::attrs(outer_attrs, inner_attrs),
|
|
match_token,
|
|
expr: Box::new(expr),
|
|
brace_token,
|
|
arms,
|
|
})
|
|
}
|
|
}
|
|
|
|
@@ -2300,27 +2242,28 @@ pub(crate) mod parsing {
|
|
}
|
|
Ok(pat)
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for ExprWhile {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
let label: Option<Label> = input.parse()?;
|
|
let while_token: Token![while] = input.parse()?;
|
|
- let cond = expr_no_struct(input)?;
|
|
+ let cond = Expr::parse_without_eager_brace(input)?;
|
|
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
let stmts = content.call(Block::parse_within)?;
|
|
|
|
Ok(ExprWhile {
|
|
- attrs: inner_attrs,
|
|
+ attrs: private::attrs(outer_attrs, inner_attrs),
|
|
label,
|
|
while_token,
|
|
cond: Box::new(cond),
|
|
body: Block { brace_token, stmts },
|
|
})
|
|
}
|
|
}
|
|
|
|
@@ -2394,16 +2337,17 @@ pub(crate) mod parsing {
|
|
}
|
|
},
|
|
})
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for FieldValue {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let attrs = input.call(Attribute::parse_outer)?;
|
|
let member: Member = input.parse()?;
|
|
let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
|
|
let colon_token: Token![:] = input.parse()?;
|
|
let value: Expr = input.parse()?;
|
|
(Some(colon_token), value)
|
|
} else if let Member::Named(ident) = &member {
|
|
let value = Expr::Path(ExprPath {
|
|
attrs: Vec::new(),
|
|
@@ -2411,73 +2355,63 @@ pub(crate) mod parsing {
|
|
path: Path::from(ident.clone()),
|
|
});
|
|
(None, value)
|
|
} else {
|
|
unreachable!()
|
|
};
|
|
|
|
Ok(FieldValue {
|
|
- attrs: Vec::new(),
|
|
+ attrs,
|
|
member,
|
|
colon_token,
|
|
expr: value,
|
|
})
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
fn expr_struct_helper(
|
|
input: ParseStream,
|
|
outer_attrs: Vec<Attribute>,
|
|
path: Path,
|
|
) -> Result<ExprStruct> {
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
+ let attrs = private::attrs(outer_attrs, inner_attrs);
|
|
|
|
let mut fields = Punctuated::new();
|
|
- loop {
|
|
- let attrs = content.call(Attribute::parse_outer)?;
|
|
- // TODO: optimize using advance_to
|
|
- if content.fork().parse::<Member>().is_err() {
|
|
- if attrs.is_empty() {
|
|
- break;
|
|
- } else {
|
|
- return Err(content.error("expected struct field"));
|
|
- }
|
|
+ while !content.is_empty() {
|
|
+ if content.peek(Token![..]) {
|
|
+ return Ok(ExprStruct {
|
|
+ attrs,
|
|
+ brace_token,
|
|
+ path,
|
|
+ fields,
|
|
+ dot2_token: Some(content.parse()?),
|
|
+ rest: Some(Box::new(content.parse()?)),
|
|
+ });
|
|
}
|
|
|
|
- fields.push(FieldValue {
|
|
- attrs,
|
|
- ..content.parse()?
|
|
- });
|
|
-
|
|
- if !content.peek(Token![,]) {
|
|
+ fields.push(content.parse()?);
|
|
+ if content.is_empty() {
|
|
break;
|
|
}
|
|
let punct: Token![,] = content.parse()?;
|
|
fields.push_punct(punct);
|
|
}
|
|
|
|
- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
|
|
- let dot2_token: Token![..] = content.parse()?;
|
|
- let rest: Expr = content.parse()?;
|
|
- (Some(dot2_token), Some(Box::new(rest)))
|
|
- } else {
|
|
- (None, None)
|
|
- };
|
|
-
|
|
Ok(ExprStruct {
|
|
- attrs: private::attrs(outer_attrs, inner_attrs),
|
|
+ attrs,
|
|
brace_token,
|
|
path,
|
|
fields,
|
|
- dot2_token,
|
|
- rest,
|
|
+ dot2_token: None,
|
|
+ rest: None,
|
|
})
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
fn expr_unsafe(input: ParseStream) -> Result<ExprUnsafe> {
|
|
let unsafe_token: Token![unsafe] = input.parse()?;
|
|
|
|
let content;
|
|
@@ -2572,37 +2506,17 @@ pub(crate) mod parsing {
|
|
}
|
|
|
|
#[cfg(feature = "full")]
|
|
impl Parse for Arm {
|
|
fn parse(input: ParseStream) -> Result<Arm> {
|
|
let requires_comma;
|
|
Ok(Arm {
|
|
attrs: input.call(Attribute::parse_outer)?,
|
|
- pat: {
|
|
- let leading_vert: Option<Token![|]> = input.parse()?;
|
|
- let pat: Pat = input.parse()?;
|
|
- if leading_vert.is_some() || input.peek(Token![|]) {
|
|
- let mut cases = Punctuated::new();
|
|
- cases.push_value(pat);
|
|
- while input.peek(Token![|]) {
|
|
- let punct = input.parse()?;
|
|
- cases.push_punct(punct);
|
|
- let pat: Pat = input.parse()?;
|
|
- cases.push_value(pat);
|
|
- }
|
|
- Pat::Or(PatOr {
|
|
- attrs: Vec::new(),
|
|
- leading_vert,
|
|
- cases,
|
|
- })
|
|
- } else {
|
|
- pat
|
|
- }
|
|
- },
|
|
+ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
|
|
guard: {
|
|
if input.peek(Token![if]) {
|
|
let if_token: Token![if] = input.parse()?;
|
|
let guard: Expr = input.parse()?;
|
|
Some((if_token, Box::new(guard)))
|
|
} else {
|
|
None
|
|
}
|
|
@@ -2636,16 +2550,36 @@ pub(crate) mod parsing {
|
|
span: lit.span(),
|
|
})
|
|
} else {
|
|
Err(Error::new(lit.span(), "expected unsuffixed integer"))
|
|
}
|
|
}
|
|
}
|
|
|
|
+ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
|
|
+ let mut float_repr = float.to_string();
|
|
+ let trailing_dot = float_repr.ends_with('.');
|
|
+ if trailing_dot {
|
|
+ float_repr.truncate(float_repr.len() - 1);
|
|
+ }
|
|
+ for part in float_repr.split('.') {
|
|
+ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
|
|
+ let base = mem::replace(e, Expr::__Nonexhaustive);
|
|
+ *e = Expr::Field(ExprField {
|
|
+ attrs: Vec::new(),
|
|
+ base: Box::new(base),
|
|
+ dot_token: Token![.](dot_token.span),
|
|
+ member: Member::Unnamed(index),
|
|
+ });
|
|
+ *dot_token = Token![.](float.span());
|
|
+ }
|
|
+ Ok(!trailing_dot)
|
|
+ }
|
|
+
|
|
#[cfg(feature = "full")]
|
|
impl Member {
|
|
fn is_named(&self) -> bool {
|
|
match *self {
|
|
Member::Named(_) => true,
|
|
Member::Unnamed(_) => false,
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
|
|
--- a/third_party/rust/syn/src/ext.rs
|
|
+++ b/third_party/rust/syn/src/ext.rs
|
|
@@ -1,27 +1,27 @@
|
|
//! Extension traits to provide parsing methods on foreign types.
|
|
//!
|
|
-//! *This module is available if Syn is built with the `"parsing"` feature.*
|
|
+//! *This module is available only if Syn is built with the `"parsing"` feature.*
|
|
|
|
use proc_macro2::Ident;
|
|
|
|
use crate::parse::{ParseStream, Result};
|
|
|
|
use crate::buffer::Cursor;
|
|
use crate::parse::Peek;
|
|
use crate::sealed::lookahead;
|
|
use crate::token::CustomToken;
|
|
|
|
/// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
|
|
///
|
|
/// This trait is sealed and cannot be implemented for types outside of Syn. It
|
|
/// is implemented only for `proc_macro2::Ident`.
|
|
///
|
|
-/// *This trait is available if Syn is built with the `"parsing"` feature.*
|
|
+/// *This trait is available only if Syn is built with the `"parsing"` feature.*
|
|
pub trait IdentExt: Sized + private::Sealed {
|
|
/// Parses any identifier including keywords.
|
|
///
|
|
/// This is useful when parsing macro input which allows Rust keywords as
|
|
/// identifiers.
|
|
///
|
|
/// # Example
|
|
///
|
|
@@ -124,12 +124,18 @@ impl lookahead::Sealed for private::Peek
|
|
|
|
mod private {
|
|
use proc_macro2::Ident;
|
|
|
|
pub trait Sealed {}
|
|
|
|
impl Sealed for Ident {}
|
|
|
|
- #[derive(Copy, Clone)]
|
|
pub struct PeekFn;
|
|
pub struct IdentAny;
|
|
+
|
|
+ impl Copy for PeekFn {}
|
|
+ impl Clone for PeekFn {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+ }
|
|
}
|
|
diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
|
|
--- a/third_party/rust/syn/src/file.rs
|
|
+++ b/third_party/rust/syn/src/file.rs
|
|
@@ -1,14 +1,14 @@
|
|
use super::*;
|
|
|
|
ast_struct! {
|
|
/// A complete file of Rust source code.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Example
|
|
///
|
|
/// Parse a Rust source file into a `syn::File` and print out a debug
|
|
/// representation of the syntax tree.
|
|
///
|
|
/// ```
|
|
/// use std::env;
|
|
@@ -32,16 +32,18 @@ ast_struct! {
|
|
/// };
|
|
///
|
|
/// let mut file = File::open(&filename).expect("Unable to open file");
|
|
///
|
|
/// let mut src = String::new();
|
|
/// file.read_to_string(&mut src).expect("Unable to read file");
|
|
///
|
|
/// let syntax = syn::parse_file(&src).expect("Unable to parse file");
|
|
+ ///
|
|
+ /// // Debug impl is available if Syn is built with "extra-traits" feature.
|
|
/// println!("{:#?}", syntax);
|
|
/// }
|
|
/// ```
|
|
///
|
|
/// Running with its own source code as input, this program prints output
|
|
/// that begins with:
|
|
///
|
|
/// ```text
|
|
diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/gen/clone.rs
|
|
@@ -0,0 +1,2051 @@
|
|
+// This file is @generated by syn-internal-codegen.
|
|
+// It is not intended for manual editing.
|
|
+
|
|
+#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
|
|
+use crate::*;
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Abi {
|
|
+ fn clone(&self) -> Self {
|
|
+ Abi {
|
|
+ extern_token: self.extern_token.clone(),
|
|
+ name: self.name.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for AngleBracketedGenericArguments {
|
|
+ fn clone(&self) -> Self {
|
|
+ AngleBracketedGenericArguments {
|
|
+ colon2_token: self.colon2_token.clone(),
|
|
+ lt_token: self.lt_token.clone(),
|
|
+ args: self.args.clone(),
|
|
+ gt_token: self.gt_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Arm {
|
|
+ fn clone(&self) -> Self {
|
|
+ Arm {
|
|
+ attrs: self.attrs.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ guard: self.guard.clone(),
|
|
+ fat_arrow_token: self.fat_arrow_token.clone(),
|
|
+ body: self.body.clone(),
|
|
+ comma: self.comma.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Copy for AttrStyle {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for AttrStyle {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Attribute {
|
|
+ fn clone(&self) -> Self {
|
|
+ Attribute {
|
|
+ pound_token: self.pound_token.clone(),
|
|
+ style: self.style.clone(),
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ path: self.path.clone(),
|
|
+ tokens: self.tokens.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for BareFnArg {
|
|
+ fn clone(&self) -> Self {
|
|
+ BareFnArg {
|
|
+ attrs: self.attrs.clone(),
|
|
+ name: self.name.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Copy for BinOp {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for BinOp {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Binding {
|
|
+ fn clone(&self) -> Self {
|
|
+ Binding {
|
|
+ ident: self.ident.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Block {
|
|
+ fn clone(&self) -> Self {
|
|
+ Block {
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ stmts: self.stmts.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for BoundLifetimes {
|
|
+ fn clone(&self) -> Self {
|
|
+ BoundLifetimes {
|
|
+ for_token: self.for_token.clone(),
|
|
+ lt_token: self.lt_token.clone(),
|
|
+ lifetimes: self.lifetimes.clone(),
|
|
+ gt_token: self.gt_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ConstParam {
|
|
+ fn clone(&self) -> Self {
|
|
+ ConstParam {
|
|
+ attrs: self.attrs.clone(),
|
|
+ const_token: self.const_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ default: self.default.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Constraint {
|
|
+ fn clone(&self) -> Self {
|
|
+ Constraint {
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Clone for Data {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Data::Struct(v0) => Data::Struct(v0.clone()),
|
|
+ Data::Enum(v0) => Data::Enum(v0.clone()),
|
|
+ Data::Union(v0) => Data::Union(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Clone for DataEnum {
|
|
+ fn clone(&self) -> Self {
|
|
+ DataEnum {
|
|
+ enum_token: self.enum_token.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ variants: self.variants.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Clone for DataStruct {
|
|
+ fn clone(&self) -> Self {
|
|
+ DataStruct {
|
|
+ struct_token: self.struct_token.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Clone for DataUnion {
|
|
+ fn clone(&self) -> Self {
|
|
+ DataUnion {
|
|
+ union_token: self.union_token.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Clone for DeriveInput {
|
|
+ fn clone(&self) -> Self {
|
|
+ DeriveInput {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ data: self.data.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Expr {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Array(v0) => Expr::Array(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Assign(v0) => Expr::Assign(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Async(v0) => Expr::Async(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Await(v0) => Expr::Await(v0.clone()),
|
|
+ Expr::Binary(v0) => Expr::Binary(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Block(v0) => Expr::Block(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Box(v0) => Expr::Box(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Break(v0) => Expr::Break(v0.clone()),
|
|
+ Expr::Call(v0) => Expr::Call(v0.clone()),
|
|
+ Expr::Cast(v0) => Expr::Cast(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Closure(v0) => Expr::Closure(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Continue(v0) => Expr::Continue(v0.clone()),
|
|
+ Expr::Field(v0) => Expr::Field(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Group(v0) => Expr::Group(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::If(v0) => Expr::If(v0.clone()),
|
|
+ Expr::Index(v0) => Expr::Index(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Let(v0) => Expr::Let(v0.clone()),
|
|
+ Expr::Lit(v0) => Expr::Lit(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Loop(v0) => Expr::Loop(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Macro(v0) => Expr::Macro(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Match(v0) => Expr::Match(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
|
|
+ Expr::Paren(v0) => Expr::Paren(v0.clone()),
|
|
+ Expr::Path(v0) => Expr::Path(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Range(v0) => Expr::Range(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Reference(v0) => Expr::Reference(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Return(v0) => Expr::Return(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Struct(v0) => Expr::Struct(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Try(v0) => Expr::Try(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Type(v0) => Expr::Type(v0.clone()),
|
|
+ Expr::Unary(v0) => Expr::Unary(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
|
|
+ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::While(v0) => Expr::While(v0.clone()),
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Yield(v0) => Expr::Yield(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprArray {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprArray {
|
|
+ attrs: self.attrs.clone(),
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ elems: self.elems.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprAssign {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprAssign {
|
|
+ attrs: self.attrs.clone(),
|
|
+ left: self.left.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ right: self.right.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprAssignOp {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprAssignOp {
|
|
+ attrs: self.attrs.clone(),
|
|
+ left: self.left.clone(),
|
|
+ op: self.op.clone(),
|
|
+ right: self.right.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprAsync {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprAsync {
|
|
+ attrs: self.attrs.clone(),
|
|
+ async_token: self.async_token.clone(),
|
|
+ capture: self.capture.clone(),
|
|
+ block: self.block.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprAwait {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprAwait {
|
|
+ attrs: self.attrs.clone(),
|
|
+ base: self.base.clone(),
|
|
+ dot_token: self.dot_token.clone(),
|
|
+ await_token: self.await_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprBinary {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprBinary {
|
|
+ attrs: self.attrs.clone(),
|
|
+ left: self.left.clone(),
|
|
+ op: self.op.clone(),
|
|
+ right: self.right.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprBlock {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprBlock {
|
|
+ attrs: self.attrs.clone(),
|
|
+ label: self.label.clone(),
|
|
+ block: self.block.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprBox {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprBox {
|
|
+ attrs: self.attrs.clone(),
|
|
+ box_token: self.box_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprBreak {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprBreak {
|
|
+ attrs: self.attrs.clone(),
|
|
+ break_token: self.break_token.clone(),
|
|
+ label: self.label.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprCall {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprCall {
|
|
+ attrs: self.attrs.clone(),
|
|
+ func: self.func.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ args: self.args.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprCast {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprCast {
|
|
+ attrs: self.attrs.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ as_token: self.as_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprClosure {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprClosure {
|
|
+ attrs: self.attrs.clone(),
|
|
+ asyncness: self.asyncness.clone(),
|
|
+ movability: self.movability.clone(),
|
|
+ capture: self.capture.clone(),
|
|
+ or1_token: self.or1_token.clone(),
|
|
+ inputs: self.inputs.clone(),
|
|
+ or2_token: self.or2_token.clone(),
|
|
+ output: self.output.clone(),
|
|
+ body: self.body.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprContinue {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprContinue {
|
|
+ attrs: self.attrs.clone(),
|
|
+ continue_token: self.continue_token.clone(),
|
|
+ label: self.label.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprField {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprField {
|
|
+ attrs: self.attrs.clone(),
|
|
+ base: self.base.clone(),
|
|
+ dot_token: self.dot_token.clone(),
|
|
+ member: self.member.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprForLoop {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprForLoop {
|
|
+ attrs: self.attrs.clone(),
|
|
+ label: self.label.clone(),
|
|
+ for_token: self.for_token.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ in_token: self.in_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ body: self.body.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprGroup {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprGroup {
|
|
+ attrs: self.attrs.clone(),
|
|
+ group_token: self.group_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprIf {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprIf {
|
|
+ attrs: self.attrs.clone(),
|
|
+ if_token: self.if_token.clone(),
|
|
+ cond: self.cond.clone(),
|
|
+ then_branch: self.then_branch.clone(),
|
|
+ else_branch: self.else_branch.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprIndex {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprIndex {
|
|
+ attrs: self.attrs.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ index: self.index.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprLet {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprLet {
|
|
+ attrs: self.attrs.clone(),
|
|
+ let_token: self.let_token.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprLit {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprLit {
|
|
+ attrs: self.attrs.clone(),
|
|
+ lit: self.lit.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprLoop {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprLoop {
|
|
+ attrs: self.attrs.clone(),
|
|
+ label: self.label.clone(),
|
|
+ loop_token: self.loop_token.clone(),
|
|
+ body: self.body.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprMacro {
|
|
+ attrs: self.attrs.clone(),
|
|
+ mac: self.mac.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprMatch {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprMatch {
|
|
+ attrs: self.attrs.clone(),
|
|
+ match_token: self.match_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ arms: self.arms.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprMethodCall {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprMethodCall {
|
|
+ attrs: self.attrs.clone(),
|
|
+ receiver: self.receiver.clone(),
|
|
+ dot_token: self.dot_token.clone(),
|
|
+ method: self.method.clone(),
|
|
+ turbofish: self.turbofish.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ args: self.args.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprParen {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprParen {
|
|
+ attrs: self.attrs.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprPath {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprPath {
|
|
+ attrs: self.attrs.clone(),
|
|
+ qself: self.qself.clone(),
|
|
+ path: self.path.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprRange {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprRange {
|
|
+ attrs: self.attrs.clone(),
|
|
+ from: self.from.clone(),
|
|
+ limits: self.limits.clone(),
|
|
+ to: self.to.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprReference {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprReference {
|
|
+ attrs: self.attrs.clone(),
|
|
+ and_token: self.and_token.clone(),
|
|
+ raw: self.raw.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprRepeat {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprRepeat {
|
|
+ attrs: self.attrs.clone(),
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ len: self.len.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprReturn {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprReturn {
|
|
+ attrs: self.attrs.clone(),
|
|
+ return_token: self.return_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprStruct {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprStruct {
|
|
+ attrs: self.attrs.clone(),
|
|
+ path: self.path.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ dot2_token: self.dot2_token.clone(),
|
|
+ rest: self.rest.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprTry {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprTry {
|
|
+ attrs: self.attrs.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ question_token: self.question_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprTryBlock {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprTryBlock {
|
|
+ attrs: self.attrs.clone(),
|
|
+ try_token: self.try_token.clone(),
|
|
+ block: self.block.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprTuple {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprTuple {
|
|
+ attrs: self.attrs.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ elems: self.elems.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprType {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprType {
|
|
+ attrs: self.attrs.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ExprUnary {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprUnary {
|
|
+ attrs: self.attrs.clone(),
|
|
+ op: self.op.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprUnsafe {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprUnsafe {
|
|
+ attrs: self.attrs.clone(),
|
|
+ unsafe_token: self.unsafe_token.clone(),
|
|
+ block: self.block.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprWhile {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprWhile {
|
|
+ attrs: self.attrs.clone(),
|
|
+ label: self.label.clone(),
|
|
+ while_token: self.while_token.clone(),
|
|
+ cond: self.cond.clone(),
|
|
+ body: self.body.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ExprYield {
|
|
+ fn clone(&self) -> Self {
|
|
+ ExprYield {
|
|
+ attrs: self.attrs.clone(),
|
|
+ yield_token: self.yield_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Field {
|
|
+ fn clone(&self) -> Self {
|
|
+ Field {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for FieldPat {
|
|
+ fn clone(&self) -> Self {
|
|
+ FieldPat {
|
|
+ attrs: self.attrs.clone(),
|
|
+ member: self.member.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for FieldValue {
|
|
+ fn clone(&self) -> Self {
|
|
+ FieldValue {
|
|
+ attrs: self.attrs.clone(),
|
|
+ member: self.member.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Fields {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Fields::Named(v0) => Fields::Named(v0.clone()),
|
|
+ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
|
|
+ Fields::Unit => Fields::Unit,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for FieldsNamed {
|
|
+ fn clone(&self) -> Self {
|
|
+ FieldsNamed {
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ named: self.named.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for FieldsUnnamed {
|
|
+ fn clone(&self) -> Self {
|
|
+ FieldsUnnamed {
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ unnamed: self.unnamed.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for File {
|
|
+ fn clone(&self) -> Self {
|
|
+ File {
|
|
+ shebang: self.shebang.clone(),
|
|
+ attrs: self.attrs.clone(),
|
|
+ items: self.items.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for FnArg {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
|
|
+ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ForeignItem {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
|
|
+ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
|
|
+ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
|
|
+ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
|
|
+ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ForeignItemFn {
|
|
+ fn clone(&self) -> Self {
|
|
+ ForeignItemFn {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ sig: self.sig.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ForeignItemMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ ForeignItemMacro {
|
|
+ attrs: self.attrs.clone(),
|
|
+ mac: self.mac.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ForeignItemStatic {
|
|
+ fn clone(&self) -> Self {
|
|
+ ForeignItemStatic {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ static_token: self.static_token.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ForeignItemType {
|
|
+ fn clone(&self) -> Self {
|
|
+ ForeignItemType {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ type_token: self.type_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for GenericArgument {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
|
|
+ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
|
|
+ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
|
|
+ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
|
|
+ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for GenericMethodArgument {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
|
|
+ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for GenericParam {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
|
|
+ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
|
|
+ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Generics {
|
|
+ fn clone(&self) -> Self {
|
|
+ Generics {
|
|
+ lt_token: self.lt_token.clone(),
|
|
+ params: self.params.clone(),
|
|
+ gt_token: self.gt_token.clone(),
|
|
+ where_clause: self.where_clause.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ImplItem {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
|
|
+ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
|
|
+ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
|
|
+ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
|
|
+ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ImplItemConst {
|
|
+ fn clone(&self) -> Self {
|
|
+ ImplItemConst {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ defaultness: self.defaultness.clone(),
|
|
+ const_token: self.const_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ImplItemMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ ImplItemMacro {
|
|
+ attrs: self.attrs.clone(),
|
|
+ mac: self.mac.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ImplItemMethod {
|
|
+ fn clone(&self) -> Self {
|
|
+ ImplItemMethod {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ defaultness: self.defaultness.clone(),
|
|
+ sig: self.sig.clone(),
|
|
+ block: self.block.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ImplItemType {
|
|
+ fn clone(&self) -> Self {
|
|
+ ImplItemType {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ defaultness: self.defaultness.clone(),
|
|
+ type_token: self.type_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Index {
|
|
+ fn clone(&self) -> Self {
|
|
+ Index {
|
|
+ index: self.index.clone(),
|
|
+ span: self.span.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Item {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Item::Const(v0) => Item::Const(v0.clone()),
|
|
+ Item::Enum(v0) => Item::Enum(v0.clone()),
|
|
+ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
|
|
+ Item::Fn(v0) => Item::Fn(v0.clone()),
|
|
+ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
|
|
+ Item::Impl(v0) => Item::Impl(v0.clone()),
|
|
+ Item::Macro(v0) => Item::Macro(v0.clone()),
|
|
+ Item::Macro2(v0) => Item::Macro2(v0.clone()),
|
|
+ Item::Mod(v0) => Item::Mod(v0.clone()),
|
|
+ Item::Static(v0) => Item::Static(v0.clone()),
|
|
+ Item::Struct(v0) => Item::Struct(v0.clone()),
|
|
+ Item::Trait(v0) => Item::Trait(v0.clone()),
|
|
+ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
|
|
+ Item::Type(v0) => Item::Type(v0.clone()),
|
|
+ Item::Union(v0) => Item::Union(v0.clone()),
|
|
+ Item::Use(v0) => Item::Use(v0.clone()),
|
|
+ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemConst {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemConst {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ const_token: self.const_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemEnum {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemEnum {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ enum_token: self.enum_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ variants: self.variants.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemExternCrate {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemExternCrate {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ extern_token: self.extern_token.clone(),
|
|
+ crate_token: self.crate_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ rename: self.rename.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemFn {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemFn {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ sig: self.sig.clone(),
|
|
+ block: self.block.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemForeignMod {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemForeignMod {
|
|
+ attrs: self.attrs.clone(),
|
|
+ abi: self.abi.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ items: self.items.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemImpl {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemImpl {
|
|
+ attrs: self.attrs.clone(),
|
|
+ defaultness: self.defaultness.clone(),
|
|
+ unsafety: self.unsafety.clone(),
|
|
+ impl_token: self.impl_token.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ trait_: self.trait_.clone(),
|
|
+ self_ty: self.self_ty.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ items: self.items.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemMacro {
|
|
+ attrs: self.attrs.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ mac: self.mac.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemMacro2 {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemMacro2 {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ macro_token: self.macro_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ rules: self.rules.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemMod {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemMod {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ mod_token: self.mod_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ content: self.content.clone(),
|
|
+ semi: self.semi.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemStatic {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemStatic {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ static_token: self.static_token.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemStruct {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemStruct {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ struct_token: self.struct_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemTrait {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemTrait {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ unsafety: self.unsafety.clone(),
|
|
+ auto_token: self.auto_token.clone(),
|
|
+ trait_token: self.trait_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ supertraits: self.supertraits.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ items: self.items.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemTraitAlias {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemTraitAlias {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ trait_token: self.trait_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemType {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemType {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ type_token: self.type_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemUnion {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemUnion {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ union_token: self.union_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for ItemUse {
|
|
+ fn clone(&self) -> Self {
|
|
+ ItemUse {
|
|
+ attrs: self.attrs.clone(),
|
|
+ vis: self.vis.clone(),
|
|
+ use_token: self.use_token.clone(),
|
|
+ leading_colon: self.leading_colon.clone(),
|
|
+ tree: self.tree.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Label {
|
|
+ fn clone(&self) -> Self {
|
|
+ Label {
|
|
+ name: self.name.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for LifetimeDef {
|
|
+ fn clone(&self) -> Self {
|
|
+ LifetimeDef {
|
|
+ attrs: self.attrs.clone(),
|
|
+ lifetime: self.lifetime.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+impl Clone for Lit {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Lit::Str(v0) => Lit::Str(v0.clone()),
|
|
+ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
|
|
+ Lit::Byte(v0) => Lit::Byte(v0.clone()),
|
|
+ Lit::Char(v0) => Lit::Char(v0.clone()),
|
|
+ Lit::Int(v0) => Lit::Int(v0.clone()),
|
|
+ Lit::Float(v0) => Lit::Float(v0.clone()),
|
|
+ Lit::Bool(v0) => Lit::Bool(v0.clone()),
|
|
+ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+impl Clone for LitBool {
|
|
+ fn clone(&self) -> Self {
|
|
+ LitBool {
|
|
+ value: self.value.clone(),
|
|
+ span: self.span.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Local {
|
|
+ fn clone(&self) -> Self {
|
|
+ Local {
|
|
+ attrs: self.attrs.clone(),
|
|
+ let_token: self.let_token.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ init: self.init.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Macro {
|
|
+ fn clone(&self) -> Self {
|
|
+ Macro {
|
|
+ path: self.path.clone(),
|
|
+ bang_token: self.bang_token.clone(),
|
|
+ delimiter: self.delimiter.clone(),
|
|
+ tokens: self.tokens.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for MacroDelimiter {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
|
|
+ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
|
|
+ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Member {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Member::Named(v0) => Member::Named(v0.clone()),
|
|
+ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Meta {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Meta::Path(v0) => Meta::Path(v0.clone()),
|
|
+ Meta::List(v0) => Meta::List(v0.clone()),
|
|
+ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for MetaList {
|
|
+ fn clone(&self) -> Self {
|
|
+ MetaList {
|
|
+ path: self.path.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ nested: self.nested.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for MetaNameValue {
|
|
+ fn clone(&self) -> Self {
|
|
+ MetaNameValue {
|
|
+ path: self.path.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ lit: self.lit.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for MethodTurbofish {
|
|
+ fn clone(&self) -> Self {
|
|
+ MethodTurbofish {
|
|
+ colon2_token: self.colon2_token.clone(),
|
|
+ lt_token: self.lt_token.clone(),
|
|
+ args: self.args.clone(),
|
|
+ gt_token: self.gt_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for NestedMeta {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
|
|
+ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ParenthesizedGenericArguments {
|
|
+ fn clone(&self) -> Self {
|
|
+ ParenthesizedGenericArguments {
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ inputs: self.inputs.clone(),
|
|
+ output: self.output.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Pat {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Pat::Box(v0) => Pat::Box(v0.clone()),
|
|
+ Pat::Ident(v0) => Pat::Ident(v0.clone()),
|
|
+ Pat::Lit(v0) => Pat::Lit(v0.clone()),
|
|
+ Pat::Macro(v0) => Pat::Macro(v0.clone()),
|
|
+ Pat::Or(v0) => Pat::Or(v0.clone()),
|
|
+ Pat::Path(v0) => Pat::Path(v0.clone()),
|
|
+ Pat::Range(v0) => Pat::Range(v0.clone()),
|
|
+ Pat::Reference(v0) => Pat::Reference(v0.clone()),
|
|
+ Pat::Rest(v0) => Pat::Rest(v0.clone()),
|
|
+ Pat::Slice(v0) => Pat::Slice(v0.clone()),
|
|
+ Pat::Struct(v0) => Pat::Struct(v0.clone()),
|
|
+ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
|
|
+ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
|
|
+ Pat::Type(v0) => Pat::Type(v0.clone()),
|
|
+ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
|
|
+ Pat::Wild(v0) => Pat::Wild(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatBox {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatBox {
|
|
+ attrs: self.attrs.clone(),
|
|
+ box_token: self.box_token.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatIdent {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatIdent {
|
|
+ attrs: self.attrs.clone(),
|
|
+ by_ref: self.by_ref.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ subpat: self.subpat.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatLit {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatLit {
|
|
+ attrs: self.attrs.clone(),
|
|
+ expr: self.expr.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatMacro {
|
|
+ attrs: self.attrs.clone(),
|
|
+ mac: self.mac.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatOr {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatOr {
|
|
+ attrs: self.attrs.clone(),
|
|
+ leading_vert: self.leading_vert.clone(),
|
|
+ cases: self.cases.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatPath {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatPath {
|
|
+ attrs: self.attrs.clone(),
|
|
+ qself: self.qself.clone(),
|
|
+ path: self.path.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatRange {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatRange {
|
|
+ attrs: self.attrs.clone(),
|
|
+ lo: self.lo.clone(),
|
|
+ limits: self.limits.clone(),
|
|
+ hi: self.hi.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatReference {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatReference {
|
|
+ attrs: self.attrs.clone(),
|
|
+ and_token: self.and_token.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatRest {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatRest {
|
|
+ attrs: self.attrs.clone(),
|
|
+ dot2_token: self.dot2_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatSlice {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatSlice {
|
|
+ attrs: self.attrs.clone(),
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ elems: self.elems.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatStruct {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatStruct {
|
|
+ attrs: self.attrs.clone(),
|
|
+ path: self.path.clone(),
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ dot2_token: self.dot2_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatTuple {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatTuple {
|
|
+ attrs: self.attrs.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ elems: self.elems.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatTupleStruct {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatTupleStruct {
|
|
+ attrs: self.attrs.clone(),
|
|
+ path: self.path.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatType {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatType {
|
|
+ attrs: self.attrs.clone(),
|
|
+ pat: self.pat.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for PatWild {
|
|
+ fn clone(&self) -> Self {
|
|
+ PatWild {
|
|
+ attrs: self.attrs.clone(),
|
|
+ underscore_token: self.underscore_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Path {
|
|
+ fn clone(&self) -> Self {
|
|
+ Path {
|
|
+ leading_colon: self.leading_colon.clone(),
|
|
+ segments: self.segments.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for PathArguments {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ PathArguments::None => PathArguments::None,
|
|
+ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
|
|
+ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for PathSegment {
|
|
+ fn clone(&self) -> Self {
|
|
+ PathSegment {
|
|
+ ident: self.ident.clone(),
|
|
+ arguments: self.arguments.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for PredicateEq {
|
|
+ fn clone(&self) -> Self {
|
|
+ PredicateEq {
|
|
+ lhs_ty: self.lhs_ty.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ rhs_ty: self.rhs_ty.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for PredicateLifetime {
|
|
+ fn clone(&self) -> Self {
|
|
+ PredicateLifetime {
|
|
+ lifetime: self.lifetime.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for PredicateType {
|
|
+ fn clone(&self) -> Self {
|
|
+ PredicateType {
|
|
+ lifetimes: self.lifetimes.clone(),
|
|
+ bounded_ty: self.bounded_ty.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for QSelf {
|
|
+ fn clone(&self) -> Self {
|
|
+ QSelf {
|
|
+ lt_token: self.lt_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ position: self.position.clone(),
|
|
+ as_token: self.as_token.clone(),
|
|
+ gt_token: self.gt_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Copy for RangeLimits {}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for RangeLimits {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Receiver {
|
|
+ fn clone(&self) -> Self {
|
|
+ Receiver {
|
|
+ attrs: self.attrs.clone(),
|
|
+ reference: self.reference.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ self_token: self.self_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for ReturnType {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ ReturnType::Default => ReturnType::Default,
|
|
+ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Signature {
|
|
+ fn clone(&self) -> Self {
|
|
+ Signature {
|
|
+ constness: self.constness.clone(),
|
|
+ asyncness: self.asyncness.clone(),
|
|
+ unsafety: self.unsafety.clone(),
|
|
+ abi: self.abi.clone(),
|
|
+ fn_token: self.fn_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ inputs: self.inputs.clone(),
|
|
+ variadic: self.variadic.clone(),
|
|
+ output: self.output.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for Stmt {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Stmt::Local(v0) => Stmt::Local(v0.clone()),
|
|
+ Stmt::Item(v0) => Stmt::Item(v0.clone()),
|
|
+ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
|
|
+ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TraitBound {
|
|
+ fn clone(&self) -> Self {
|
|
+ TraitBound {
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ modifier: self.modifier.clone(),
|
|
+ lifetimes: self.lifetimes.clone(),
|
|
+ path: self.path.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Copy for TraitBoundModifier {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TraitBoundModifier {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for TraitItem {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
|
|
+ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
|
|
+ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
|
|
+ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
|
|
+ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for TraitItemConst {
|
|
+ fn clone(&self) -> Self {
|
|
+ TraitItemConst {
|
|
+ attrs: self.attrs.clone(),
|
|
+ const_token: self.const_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ ty: self.ty.clone(),
|
|
+ default: self.default.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for TraitItemMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ TraitItemMacro {
|
|
+ attrs: self.attrs.clone(),
|
|
+ mac: self.mac.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for TraitItemMethod {
|
|
+ fn clone(&self) -> Self {
|
|
+ TraitItemMethod {
|
|
+ attrs: self.attrs.clone(),
|
|
+ sig: self.sig.clone(),
|
|
+ default: self.default.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for TraitItemType {
|
|
+ fn clone(&self) -> Self {
|
|
+ TraitItemType {
|
|
+ attrs: self.attrs.clone(),
|
|
+ type_token: self.type_token.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ generics: self.generics.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ default: self.default.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Type {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Type::Array(v0) => Type::Array(v0.clone()),
|
|
+ Type::BareFn(v0) => Type::BareFn(v0.clone()),
|
|
+ Type::Group(v0) => Type::Group(v0.clone()),
|
|
+ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
|
|
+ Type::Infer(v0) => Type::Infer(v0.clone()),
|
|
+ Type::Macro(v0) => Type::Macro(v0.clone()),
|
|
+ Type::Never(v0) => Type::Never(v0.clone()),
|
|
+ Type::Paren(v0) => Type::Paren(v0.clone()),
|
|
+ Type::Path(v0) => Type::Path(v0.clone()),
|
|
+ Type::Ptr(v0) => Type::Ptr(v0.clone()),
|
|
+ Type::Reference(v0) => Type::Reference(v0.clone()),
|
|
+ Type::Slice(v0) => Type::Slice(v0.clone()),
|
|
+ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
|
|
+ Type::Tuple(v0) => Type::Tuple(v0.clone()),
|
|
+ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeArray {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeArray {
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ elem: self.elem.clone(),
|
|
+ semi_token: self.semi_token.clone(),
|
|
+ len: self.len.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeBareFn {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeBareFn {
|
|
+ lifetimes: self.lifetimes.clone(),
|
|
+ unsafety: self.unsafety.clone(),
|
|
+ abi: self.abi.clone(),
|
|
+ fn_token: self.fn_token.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ inputs: self.inputs.clone(),
|
|
+ variadic: self.variadic.clone(),
|
|
+ output: self.output.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeGroup {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeGroup {
|
|
+ group_token: self.group_token.clone(),
|
|
+ elem: self.elem.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeImplTrait {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeImplTrait {
|
|
+ impl_token: self.impl_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeInfer {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeInfer {
|
|
+ underscore_token: self.underscore_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeMacro {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeMacro {
|
|
+ mac: self.mac.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeNever {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeNever {
|
|
+ bang_token: self.bang_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeParam {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeParam {
|
|
+ attrs: self.attrs.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ colon_token: self.colon_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ eq_token: self.eq_token.clone(),
|
|
+ default: self.default.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeParamBound {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
|
|
+ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeParen {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeParen {
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ elem: self.elem.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypePath {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypePath {
|
|
+ qself: self.qself.clone(),
|
|
+ path: self.path.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypePtr {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypePtr {
|
|
+ star_token: self.star_token.clone(),
|
|
+ const_token: self.const_token.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ elem: self.elem.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeReference {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeReference {
|
|
+ and_token: self.and_token.clone(),
|
|
+ lifetime: self.lifetime.clone(),
|
|
+ mutability: self.mutability.clone(),
|
|
+ elem: self.elem.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeSlice {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeSlice {
|
|
+ bracket_token: self.bracket_token.clone(),
|
|
+ elem: self.elem.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeTraitObject {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeTraitObject {
|
|
+ dyn_token: self.dyn_token.clone(),
|
|
+ bounds: self.bounds.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for TypeTuple {
|
|
+ fn clone(&self) -> Self {
|
|
+ TypeTuple {
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ elems: self.elems.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Copy for UnOp {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for UnOp {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for UseGlob {
|
|
+ fn clone(&self) -> Self {
|
|
+ UseGlob {
|
|
+ star_token: self.star_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for UseGroup {
|
|
+ fn clone(&self) -> Self {
|
|
+ UseGroup {
|
|
+ brace_token: self.brace_token.clone(),
|
|
+ items: self.items.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for UseName {
|
|
+ fn clone(&self) -> Self {
|
|
+ UseName {
|
|
+ ident: self.ident.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for UsePath {
|
|
+ fn clone(&self) -> Self {
|
|
+ UsePath {
|
|
+ ident: self.ident.clone(),
|
|
+ colon2_token: self.colon2_token.clone(),
|
|
+ tree: self.tree.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for UseRename {
|
|
+ fn clone(&self) -> Self {
|
|
+ UseRename {
|
|
+ ident: self.ident.clone(),
|
|
+ as_token: self.as_token.clone(),
|
|
+ rename: self.rename.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Clone for UseTree {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ UseTree::Path(v0) => UseTree::Path(v0.clone()),
|
|
+ UseTree::Name(v0) => UseTree::Name(v0.clone()),
|
|
+ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
|
|
+ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
|
|
+ UseTree::Group(v0) => UseTree::Group(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Variadic {
|
|
+ fn clone(&self) -> Self {
|
|
+ Variadic {
|
|
+ attrs: self.attrs.clone(),
|
|
+ dots: self.dots.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Variant {
|
|
+ fn clone(&self) -> Self {
|
|
+ Variant {
|
|
+ attrs: self.attrs.clone(),
|
|
+ ident: self.ident.clone(),
|
|
+ fields: self.fields.clone(),
|
|
+ discriminant: self.discriminant.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for VisCrate {
|
|
+ fn clone(&self) -> Self {
|
|
+ VisCrate {
|
|
+ crate_token: self.crate_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for VisPublic {
|
|
+ fn clone(&self) -> Self {
|
|
+ VisPublic {
|
|
+ pub_token: self.pub_token.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for VisRestricted {
|
|
+ fn clone(&self) -> Self {
|
|
+ VisRestricted {
|
|
+ pub_token: self.pub_token.clone(),
|
|
+ paren_token: self.paren_token.clone(),
|
|
+ in_token: self.in_token.clone(),
|
|
+ path: self.path.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for Visibility {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Visibility::Public(v0) => Visibility::Public(v0.clone()),
|
|
+ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
|
|
+ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
|
|
+ Visibility::Inherited => Visibility::Inherited,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for WhereClause {
|
|
+ fn clone(&self) -> Self {
|
|
+ WhereClause {
|
|
+ where_token: self.where_token.clone(),
|
|
+ predicates: self.predicates.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Clone for WherePredicate {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
|
|
+ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
|
|
+ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/gen/debug.rs
|
|
@@ -0,0 +1,2857 @@
|
|
+// This file is @generated by syn-internal-codegen.
|
|
+// It is not intended for manual editing.
|
|
+
|
|
+use crate::*;
|
|
+use std::fmt::{self, Debug};
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Abi {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Abi");
|
|
+ formatter.field("extern_token", &self.extern_token);
|
|
+ formatter.field("name", &self.name);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for AngleBracketedGenericArguments {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
|
|
+ formatter.field("colon2_token", &self.colon2_token);
|
|
+ formatter.field("lt_token", &self.lt_token);
|
|
+ formatter.field("args", &self.args);
|
|
+ formatter.field("gt_token", &self.gt_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Arm {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Arm");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.field("guard", &self.guard);
|
|
+ formatter.field("fat_arrow_token", &self.fat_arrow_token);
|
|
+ formatter.field("body", &self.body);
|
|
+ formatter.field("comma", &self.comma);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for AttrStyle {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ AttrStyle::Outer => formatter.write_str("Outer"),
|
|
+ AttrStyle::Inner(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Inner");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Attribute {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Attribute");
|
|
+ formatter.field("pound_token", &self.pound_token);
|
|
+ formatter.field("style", &self.style);
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("tokens", &self.tokens);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for BareFnArg {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("BareFnArg");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("name", &self.name);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for BinOp {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ BinOp::Add(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Add");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Sub(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Sub");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Mul(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Mul");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Div(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Div");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Rem(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Rem");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::And(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("And");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Or(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Or");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::BitXor(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BitXor");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::BitAnd(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BitAnd");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::BitOr(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BitOr");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Shl(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Shl");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Shr(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Shr");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Eq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Eq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Lt(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lt");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Le(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Le");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Ne(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Ne");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Ge(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Ge");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::Gt(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Gt");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::AddEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("AddEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::SubEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("SubEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::MulEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("MulEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::DivEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("DivEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::RemEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("RemEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::BitXorEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BitXorEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::BitAndEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BitAndEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::BitOrEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BitOrEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::ShlEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ShlEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ BinOp::ShrEq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ShrEq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Binding {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Binding");
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Block {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Block");
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("stmts", &self.stmts);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for BoundLifetimes {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("BoundLifetimes");
|
|
+ formatter.field("for_token", &self.for_token);
|
|
+ formatter.field("lt_token", &self.lt_token);
|
|
+ formatter.field("lifetimes", &self.lifetimes);
|
|
+ formatter.field("gt_token", &self.gt_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ConstParam {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ConstParam");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("const_token", &self.const_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("default", &self.default);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Constraint {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Constraint");
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Debug for Data {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Data::Struct(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Struct");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Data::Enum(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Enum");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Data::Union(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Union");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Debug for DataEnum {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("DataEnum");
|
|
+ formatter.field("enum_token", &self.enum_token);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("variants", &self.variants);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Debug for DataStruct {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("DataStruct");
|
|
+ formatter.field("struct_token", &self.struct_token);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Debug for DataUnion {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("DataUnion");
|
|
+ formatter.field("union_token", &self.union_token);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Debug for DeriveInput {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("DeriveInput");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("data", &self.data);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Expr {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Array(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Array");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Assign(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Assign");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::AssignOp(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("AssignOp");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Async(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Async");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Await(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Await");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Binary(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Binary");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Block(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Block");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Box(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Box");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Break(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Break");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Call(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Call");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Cast(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Cast");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Closure(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Closure");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Continue(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Continue");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Field(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Field");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::ForLoop(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ForLoop");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Group(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Group");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::If(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("If");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Index(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Index");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Let(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Let");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Lit(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lit");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Loop(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Loop");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Match(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Match");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::MethodCall(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("MethodCall");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Paren(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Paren");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Path(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Path");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Range(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Range");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Reference(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Reference");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Repeat(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Repeat");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Return(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Return");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Struct(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Struct");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Try(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Try");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::TryBlock(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("TryBlock");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Tuple(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Tuple");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Unary(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Unary");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Unsafe(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Unsafe");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Expr::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::While(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("While");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Yield(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Yield");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprArray {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprArray");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("elems", &self.elems);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprAssign {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprAssign");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("left", &self.left);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("right", &self.right);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprAssignOp {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprAssignOp");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("left", &self.left);
|
|
+ formatter.field("op", &self.op);
|
|
+ formatter.field("right", &self.right);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprAsync {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprAsync");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("async_token", &self.async_token);
|
|
+ formatter.field("capture", &self.capture);
|
|
+ formatter.field("block", &self.block);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprAwait {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprAwait");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("base", &self.base);
|
|
+ formatter.field("dot_token", &self.dot_token);
|
|
+ formatter.field("await_token", &self.await_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprBinary {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprBinary");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("left", &self.left);
|
|
+ formatter.field("op", &self.op);
|
|
+ formatter.field("right", &self.right);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprBlock {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprBlock");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("label", &self.label);
|
|
+ formatter.field("block", &self.block);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprBox {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprBox");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("box_token", &self.box_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprBreak {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprBreak");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("break_token", &self.break_token);
|
|
+ formatter.field("label", &self.label);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprCall {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprCall");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("func", &self.func);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("args", &self.args);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprCast {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprCast");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("as_token", &self.as_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprClosure {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprClosure");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("asyncness", &self.asyncness);
|
|
+ formatter.field("movability", &self.movability);
|
|
+ formatter.field("capture", &self.capture);
|
|
+ formatter.field("or1_token", &self.or1_token);
|
|
+ formatter.field("inputs", &self.inputs);
|
|
+ formatter.field("or2_token", &self.or2_token);
|
|
+ formatter.field("output", &self.output);
|
|
+ formatter.field("body", &self.body);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprContinue {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprContinue");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("continue_token", &self.continue_token);
|
|
+ formatter.field("label", &self.label);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprField {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprField");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("base", &self.base);
|
|
+ formatter.field("dot_token", &self.dot_token);
|
|
+ formatter.field("member", &self.member);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprForLoop {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprForLoop");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("label", &self.label);
|
|
+ formatter.field("for_token", &self.for_token);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.field("in_token", &self.in_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("body", &self.body);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprGroup {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprGroup");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("group_token", &self.group_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprIf {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprIf");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("if_token", &self.if_token);
|
|
+ formatter.field("cond", &self.cond);
|
|
+ formatter.field("then_branch", &self.then_branch);
|
|
+ formatter.field("else_branch", &self.else_branch);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprIndex {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprIndex");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("index", &self.index);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprLet {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprLet");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("let_token", &self.let_token);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprLit {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprLit");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("lit", &self.lit);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprLoop {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprLoop");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("label", &self.label);
|
|
+ formatter.field("loop_token", &self.loop_token);
|
|
+ formatter.field("body", &self.body);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprMacro");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprMatch {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprMatch");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("match_token", &self.match_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("arms", &self.arms);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprMethodCall {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprMethodCall");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("receiver", &self.receiver);
|
|
+ formatter.field("dot_token", &self.dot_token);
|
|
+ formatter.field("method", &self.method);
|
|
+ formatter.field("turbofish", &self.turbofish);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("args", &self.args);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprParen {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprParen");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprPath {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprPath");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("qself", &self.qself);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprRange {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprRange");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("from", &self.from);
|
|
+ formatter.field("limits", &self.limits);
|
|
+ formatter.field("to", &self.to);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprReference {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprReference");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("and_token", &self.and_token);
|
|
+ formatter.field("raw", &self.raw);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprRepeat {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprRepeat");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.field("len", &self.len);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprReturn {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprReturn");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("return_token", &self.return_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprStruct {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprStruct");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.field("dot2_token", &self.dot2_token);
|
|
+ formatter.field("rest", &self.rest);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprTry {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprTry");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("question_token", &self.question_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprTryBlock {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprTryBlock");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("try_token", &self.try_token);
|
|
+ formatter.field("block", &self.block);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprTuple {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprTuple");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("elems", &self.elems);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprType");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ExprUnary {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprUnary");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("op", &self.op);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprUnsafe {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprUnsafe");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("unsafe_token", &self.unsafe_token);
|
|
+ formatter.field("block", &self.block);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprWhile {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprWhile");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("label", &self.label);
|
|
+ formatter.field("while_token", &self.while_token);
|
|
+ formatter.field("cond", &self.cond);
|
|
+ formatter.field("body", &self.body);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ExprYield {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ExprYield");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("yield_token", &self.yield_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Field {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Field");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for FieldPat {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("FieldPat");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("member", &self.member);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for FieldValue {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("FieldValue");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("member", &self.member);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Fields {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Fields::Named(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Named");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Fields::Unnamed(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Unnamed");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Fields::Unit => formatter.write_str("Unit"),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for FieldsNamed {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("FieldsNamed");
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("named", &self.named);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for FieldsUnnamed {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("FieldsUnnamed");
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("unnamed", &self.unnamed);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for File {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("File");
|
|
+ formatter.field("shebang", &self.shebang);
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("items", &self.items);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for FnArg {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ FnArg::Receiver(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Receiver");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ FnArg::Typed(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Typed");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ForeignItem {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ ForeignItem::Fn(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Fn");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ForeignItem::Static(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Static");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ForeignItem::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ForeignItem::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ForeignItem::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ForeignItemFn {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ForeignItemFn");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("sig", &self.sig);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ForeignItemMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ForeignItemMacro");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ForeignItemStatic {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ForeignItemStatic");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("static_token", &self.static_token);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ForeignItemType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ForeignItemType");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("type_token", &self.type_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for GenericArgument {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ GenericArgument::Lifetime(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lifetime");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericArgument::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericArgument::Binding(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Binding");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericArgument::Constraint(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Constraint");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericArgument::Const(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Const");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for GenericMethodArgument {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ GenericMethodArgument::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericMethodArgument::Const(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Const");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for GenericParam {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ GenericParam::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericParam::Lifetime(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lifetime");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ GenericParam::Const(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Const");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Generics {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Generics");
|
|
+ formatter.field("lt_token", &self.lt_token);
|
|
+ formatter.field("params", &self.params);
|
|
+ formatter.field("gt_token", &self.gt_token);
|
|
+ formatter.field("where_clause", &self.where_clause);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ImplItem {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ ImplItem::Const(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Const");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ImplItem::Method(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Method");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ImplItem::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ImplItem::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ ImplItem::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ImplItemConst {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ImplItemConst");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("defaultness", &self.defaultness);
|
|
+ formatter.field("const_token", &self.const_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ImplItemMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ImplItemMacro");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ImplItemMethod {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ImplItemMethod");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("defaultness", &self.defaultness);
|
|
+ formatter.field("sig", &self.sig);
|
|
+ formatter.field("block", &self.block);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ImplItemType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ImplItemType");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("defaultness", &self.defaultness);
|
|
+ formatter.field("type_token", &self.type_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Index {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Index");
|
|
+ formatter.field("index", &self.index);
|
|
+ formatter.field("span", &self.span);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Item {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Item::Const(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Const");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Enum(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Enum");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::ExternCrate(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ExternCrate");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Fn(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Fn");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::ForeignMod(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ForeignMod");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Impl(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Impl");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Macro2(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro2");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Mod(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Mod");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Static(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Static");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Struct(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Struct");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Trait(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Trait");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::TraitAlias(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("TraitAlias");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Union(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Union");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Use(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Use");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Item::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemConst {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemConst");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("const_token", &self.const_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemEnum {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemEnum");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("enum_token", &self.enum_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("variants", &self.variants);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemExternCrate {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemExternCrate");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("extern_token", &self.extern_token);
|
|
+ formatter.field("crate_token", &self.crate_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("rename", &self.rename);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemFn {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemFn");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("sig", &self.sig);
|
|
+ formatter.field("block", &self.block);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemForeignMod {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemForeignMod");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("abi", &self.abi);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("items", &self.items);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemImpl {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemImpl");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("defaultness", &self.defaultness);
|
|
+ formatter.field("unsafety", &self.unsafety);
|
|
+ formatter.field("impl_token", &self.impl_token);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("trait_", &self.trait_);
|
|
+ formatter.field("self_ty", &self.self_ty);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("items", &self.items);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemMacro");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemMacro2 {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemMacro2");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("macro_token", &self.macro_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("rules", &self.rules);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemMod {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemMod");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("mod_token", &self.mod_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("content", &self.content);
|
|
+ formatter.field("semi", &self.semi);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemStatic {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemStatic");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("static_token", &self.static_token);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemStruct {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemStruct");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("struct_token", &self.struct_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemTrait {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemTrait");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("unsafety", &self.unsafety);
|
|
+ formatter.field("auto_token", &self.auto_token);
|
|
+ formatter.field("trait_token", &self.trait_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("supertraits", &self.supertraits);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("items", &self.items);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemTraitAlias {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemTraitAlias");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("trait_token", &self.trait_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemType");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("type_token", &self.type_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemUnion {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemUnion");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("union_token", &self.union_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for ItemUse {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ItemUse");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("vis", &self.vis);
|
|
+ formatter.field("use_token", &self.use_token);
|
|
+ formatter.field("leading_colon", &self.leading_colon);
|
|
+ formatter.field("tree", &self.tree);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Label {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Label");
|
|
+ formatter.field("name", &self.name);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+impl Debug for Lifetime {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Lifetime");
|
|
+ formatter.field("apostrophe", &self.apostrophe);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for LifetimeDef {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("LifetimeDef");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("lifetime", &self.lifetime);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+impl Debug for Lit {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Lit::Str(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Str");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::ByteStr(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ByteStr");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::Byte(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Byte");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::Char(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Char");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::Int(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Int");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::Float(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Float");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::Bool(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Bool");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Lit::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Local {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Local");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("let_token", &self.let_token);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.field("init", &self.init);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Macro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Macro");
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("bang_token", &self.bang_token);
|
|
+ formatter.field("delimiter", &self.delimiter);
|
|
+ formatter.field("tokens", &self.tokens);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for MacroDelimiter {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ MacroDelimiter::Paren(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Paren");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ MacroDelimiter::Brace(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Brace");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ MacroDelimiter::Bracket(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Bracket");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Member {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Member::Named(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Named");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Member::Unnamed(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Unnamed");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Meta {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Meta::Path(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Path");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Meta::List(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("List");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Meta::NameValue(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("NameValue");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for MetaList {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("MetaList");
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("nested", &self.nested);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for MetaNameValue {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("MetaNameValue");
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("lit", &self.lit);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for MethodTurbofish {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("MethodTurbofish");
|
|
+ formatter.field("colon2_token", &self.colon2_token);
|
|
+ formatter.field("lt_token", &self.lt_token);
|
|
+ formatter.field("args", &self.args);
|
|
+ formatter.field("gt_token", &self.gt_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for NestedMeta {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ NestedMeta::Meta(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Meta");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ NestedMeta::Lit(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lit");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ParenthesizedGenericArguments {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("inputs", &self.inputs);
|
|
+ formatter.field("output", &self.output);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Pat {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Pat::Box(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Box");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Ident(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Ident");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Lit(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lit");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Or(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Or");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Path(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Path");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Range(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Range");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Reference(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Reference");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Rest(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Rest");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Slice(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Slice");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Struct(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Struct");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Tuple(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Tuple");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::TupleStruct(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("TupleStruct");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Pat::Wild(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Wild");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatBox {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatBox");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("box_token", &self.box_token);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatIdent {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatIdent");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("by_ref", &self.by_ref);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("subpat", &self.subpat);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatLit {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatLit");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("expr", &self.expr);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatMacro");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatOr {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatOr");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("leading_vert", &self.leading_vert);
|
|
+ formatter.field("cases", &self.cases);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatPath {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatPath");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("qself", &self.qself);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatRange {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatRange");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("lo", &self.lo);
|
|
+ formatter.field("limits", &self.limits);
|
|
+ formatter.field("hi", &self.hi);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatReference {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatReference");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("and_token", &self.and_token);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatRest {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatRest");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("dot2_token", &self.dot2_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatSlice {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatSlice");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("elems", &self.elems);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatStruct {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatStruct");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.field("dot2_token", &self.dot2_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatTuple {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatTuple");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("elems", &self.elems);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatTupleStruct {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatTupleStruct");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatType");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("pat", &self.pat);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for PatWild {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PatWild");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("underscore_token", &self.underscore_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Path {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Path");
|
|
+ formatter.field("leading_colon", &self.leading_colon);
|
|
+ formatter.field("segments", &self.segments);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for PathArguments {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ PathArguments::None => formatter.write_str("None"),
|
|
+ PathArguments::AngleBracketed(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("AngleBracketed");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ PathArguments::Parenthesized(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Parenthesized");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for PathSegment {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PathSegment");
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("arguments", &self.arguments);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for PredicateEq {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PredicateEq");
|
|
+ formatter.field("lhs_ty", &self.lhs_ty);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("rhs_ty", &self.rhs_ty);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for PredicateLifetime {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PredicateLifetime");
|
|
+ formatter.field("lifetime", &self.lifetime);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for PredicateType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("PredicateType");
|
|
+ formatter.field("lifetimes", &self.lifetimes);
|
|
+ formatter.field("bounded_ty", &self.bounded_ty);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for QSelf {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("QSelf");
|
|
+ formatter.field("lt_token", &self.lt_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("position", &self.position);
|
|
+ formatter.field("as_token", &self.as_token);
|
|
+ formatter.field("gt_token", &self.gt_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for RangeLimits {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ RangeLimits::HalfOpen(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("HalfOpen");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ RangeLimits::Closed(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Closed");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Receiver {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Receiver");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("reference", &self.reference);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("self_token", &self.self_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for ReturnType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ ReturnType::Default => formatter.write_str("Default"),
|
|
+ ReturnType::Type(v0, v1) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.field(v1);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Signature {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Signature");
|
|
+ formatter.field("constness", &self.constness);
|
|
+ formatter.field("asyncness", &self.asyncness);
|
|
+ formatter.field("unsafety", &self.unsafety);
|
|
+ formatter.field("abi", &self.abi);
|
|
+ formatter.field("fn_token", &self.fn_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("inputs", &self.inputs);
|
|
+ formatter.field("variadic", &self.variadic);
|
|
+ formatter.field("output", &self.output);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for Stmt {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Stmt::Local(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Local");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Stmt::Item(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Item");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Stmt::Expr(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Expr");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Stmt::Semi(v0, v1) => {
|
|
+ let mut formatter = formatter.debug_tuple("Semi");
|
|
+ formatter.field(v0);
|
|
+ formatter.field(v1);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TraitBound {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TraitBound");
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("modifier", &self.modifier);
|
|
+ formatter.field("lifetimes", &self.lifetimes);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TraitBoundModifier {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ TraitBoundModifier::None => formatter.write_str("None"),
|
|
+ TraitBoundModifier::Maybe(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Maybe");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for TraitItem {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ TraitItem::Const(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Const");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ TraitItem::Method(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Method");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ TraitItem::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ TraitItem::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ TraitItem::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for TraitItemConst {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TraitItemConst");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("const_token", &self.const_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("ty", &self.ty);
|
|
+ formatter.field("default", &self.default);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for TraitItemMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TraitItemMacro");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for TraitItemMethod {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TraitItemMethod");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("sig", &self.sig);
|
|
+ formatter.field("default", &self.default);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for TraitItemType {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TraitItemType");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("type_token", &self.type_token);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("generics", &self.generics);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.field("default", &self.default);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Type {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Type::Array(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Array");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::BareFn(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("BareFn");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Group(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Group");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::ImplTrait(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("ImplTrait");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Infer(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Infer");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Macro(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Macro");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Never(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Never");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Paren(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Paren");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Path(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Path");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Ptr(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Ptr");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Reference(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Reference");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Slice(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Slice");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::TraitObject(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("TraitObject");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Tuple(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Tuple");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Type::Verbatim(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Verbatim");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeArray {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeArray");
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("elem", &self.elem);
|
|
+ formatter.field("semi_token", &self.semi_token);
|
|
+ formatter.field("len", &self.len);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeBareFn {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeBareFn");
|
|
+ formatter.field("lifetimes", &self.lifetimes);
|
|
+ formatter.field("unsafety", &self.unsafety);
|
|
+ formatter.field("abi", &self.abi);
|
|
+ formatter.field("fn_token", &self.fn_token);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("inputs", &self.inputs);
|
|
+ formatter.field("variadic", &self.variadic);
|
|
+ formatter.field("output", &self.output);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeGroup {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeGroup");
|
|
+ formatter.field("group_token", &self.group_token);
|
|
+ formatter.field("elem", &self.elem);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeImplTrait {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeImplTrait");
|
|
+ formatter.field("impl_token", &self.impl_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeInfer {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeInfer");
|
|
+ formatter.field("underscore_token", &self.underscore_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeMacro {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeMacro");
|
|
+ formatter.field("mac", &self.mac);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeNever {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeNever");
|
|
+ formatter.field("bang_token", &self.bang_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeParam {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeParam");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon_token", &self.colon_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.field("eq_token", &self.eq_token);
|
|
+ formatter.field("default", &self.default);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeParamBound {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ TypeParamBound::Trait(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Trait");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ TypeParamBound::Lifetime(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lifetime");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeParen {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeParen");
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("elem", &self.elem);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypePath {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypePath");
|
|
+ formatter.field("qself", &self.qself);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypePtr {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypePtr");
|
|
+ formatter.field("star_token", &self.star_token);
|
|
+ formatter.field("const_token", &self.const_token);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("elem", &self.elem);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeReference {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeReference");
|
|
+ formatter.field("and_token", &self.and_token);
|
|
+ formatter.field("lifetime", &self.lifetime);
|
|
+ formatter.field("mutability", &self.mutability);
|
|
+ formatter.field("elem", &self.elem);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeSlice {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeSlice");
|
|
+ formatter.field("bracket_token", &self.bracket_token);
|
|
+ formatter.field("elem", &self.elem);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeTraitObject {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeTraitObject");
|
|
+ formatter.field("dyn_token", &self.dyn_token);
|
|
+ formatter.field("bounds", &self.bounds);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for TypeTuple {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("TypeTuple");
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("elems", &self.elems);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for UnOp {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ UnOp::Deref(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Deref");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ UnOp::Not(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Not");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ UnOp::Neg(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Neg");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for UseGlob {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("UseGlob");
|
|
+ formatter.field("star_token", &self.star_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for UseGroup {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("UseGroup");
|
|
+ formatter.field("brace_token", &self.brace_token);
|
|
+ formatter.field("items", &self.items);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for UseName {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("UseName");
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for UsePath {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("UsePath");
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("colon2_token", &self.colon2_token);
|
|
+ formatter.field("tree", &self.tree);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for UseRename {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("UseRename");
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("as_token", &self.as_token);
|
|
+ formatter.field("rename", &self.rename);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Debug for UseTree {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ UseTree::Path(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Path");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ UseTree::Name(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Name");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ UseTree::Rename(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Rename");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ UseTree::Glob(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Glob");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ UseTree::Group(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Group");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Variadic {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Variadic");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("dots", &self.dots);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Variant {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("Variant");
|
|
+ formatter.field("attrs", &self.attrs);
|
|
+ formatter.field("ident", &self.ident);
|
|
+ formatter.field("fields", &self.fields);
|
|
+ formatter.field("discriminant", &self.discriminant);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for VisCrate {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("VisCrate");
|
|
+ formatter.field("crate_token", &self.crate_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for VisPublic {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("VisPublic");
|
|
+ formatter.field("pub_token", &self.pub_token);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for VisRestricted {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("VisRestricted");
|
|
+ formatter.field("pub_token", &self.pub_token);
|
|
+ formatter.field("paren_token", &self.paren_token);
|
|
+ formatter.field("in_token", &self.in_token);
|
|
+ formatter.field("path", &self.path);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for Visibility {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ Visibility::Public(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Public");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Visibility::Crate(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Crate");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Visibility::Restricted(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Restricted");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ Visibility::Inherited => formatter.write_str("Inherited"),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for WhereClause {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ let mut formatter = formatter.debug_struct("WhereClause");
|
|
+ formatter.field("where_token", &self.where_token);
|
|
+ formatter.field("predicates", &self.predicates);
|
|
+ formatter.finish()
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Debug for WherePredicate {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ match self {
|
|
+ WherePredicate::Type(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Type");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ WherePredicate::Lifetime(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Lifetime");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ WherePredicate::Eq(v0) => {
|
|
+ let mut formatter = formatter.debug_tuple("Eq");
|
|
+ formatter.field(v0);
|
|
+ formatter.finish()
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/gen/eq.rs
|
|
@@ -0,0 +1,1930 @@
|
|
+// This file is @generated by syn-internal-codegen.
|
|
+// It is not intended for manual editing.
|
|
+
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+use crate::tt::TokenStreamHelper;
|
|
+use crate::*;
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Abi {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Abi {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.name == other.name
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for AngleBracketedGenericArguments {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for AngleBracketedGenericArguments {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.colon2_token == other.colon2_token && self.args == other.args
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Arm {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Arm {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.pat == other.pat
|
|
+ && self.guard == other.guard
|
|
+ && self.body == other.body
|
|
+ && self.comma == other.comma
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for AttrStyle {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for AttrStyle {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (AttrStyle::Outer, AttrStyle::Outer) => true,
|
|
+ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Attribute {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Attribute {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.style == other.style
|
|
+ && self.path == other.path
|
|
+ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for BareFnArg {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for BareFnArg {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for BinOp {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for BinOp {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (BinOp::Add(_), BinOp::Add(_)) => true,
|
|
+ (BinOp::Sub(_), BinOp::Sub(_)) => true,
|
|
+ (BinOp::Mul(_), BinOp::Mul(_)) => true,
|
|
+ (BinOp::Div(_), BinOp::Div(_)) => true,
|
|
+ (BinOp::Rem(_), BinOp::Rem(_)) => true,
|
|
+ (BinOp::And(_), BinOp::And(_)) => true,
|
|
+ (BinOp::Or(_), BinOp::Or(_)) => true,
|
|
+ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
|
|
+ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
|
|
+ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
|
|
+ (BinOp::Shl(_), BinOp::Shl(_)) => true,
|
|
+ (BinOp::Shr(_), BinOp::Shr(_)) => true,
|
|
+ (BinOp::Eq(_), BinOp::Eq(_)) => true,
|
|
+ (BinOp::Lt(_), BinOp::Lt(_)) => true,
|
|
+ (BinOp::Le(_), BinOp::Le(_)) => true,
|
|
+ (BinOp::Ne(_), BinOp::Ne(_)) => true,
|
|
+ (BinOp::Ge(_), BinOp::Ge(_)) => true,
|
|
+ (BinOp::Gt(_), BinOp::Gt(_)) => true,
|
|
+ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
|
|
+ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
|
|
+ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
|
|
+ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
|
|
+ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
|
|
+ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
|
|
+ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
|
|
+ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
|
|
+ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
|
|
+ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Binding {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Binding {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ident == other.ident && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Block {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Block {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.stmts == other.stmts
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for BoundLifetimes {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for BoundLifetimes {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lifetimes == other.lifetimes
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ConstParam {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ConstParam {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.ident == other.ident
|
|
+ && self.ty == other.ty
|
|
+ && self.eq_token == other.eq_token
|
|
+ && self.default == other.default
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Constraint {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Constraint {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ident == other.ident && self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Eq for Data {}
|
|
+#[cfg(feature = "derive")]
|
|
+impl PartialEq for Data {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
|
|
+ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
|
|
+ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Eq for DataEnum {}
|
|
+#[cfg(feature = "derive")]
|
|
+impl PartialEq for DataEnum {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.variants == other.variants
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Eq for DataStruct {}
|
|
+#[cfg(feature = "derive")]
|
|
+impl PartialEq for DataStruct {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.fields == other.fields && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Eq for DataUnion {}
|
|
+#[cfg(feature = "derive")]
|
|
+impl PartialEq for DataUnion {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.fields == other.fields
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Eq for DeriveInput {}
|
|
+#[cfg(feature = "derive")]
|
|
+impl PartialEq for DeriveInput {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.data == other.data
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Expr {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Expr {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
|
|
+ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
|
|
+ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
|
|
+ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
|
|
+ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
|
|
+ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
|
|
+ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
|
|
+ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
|
|
+ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
|
|
+ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
|
|
+ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
|
|
+ #[cfg(feature = "full")]
|
|
+ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprArray {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprArray {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.elems == other.elems
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprAssign {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprAssign {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.left == other.left && self.right == other.right
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprAssignOp {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprAssignOp {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.left == other.left
|
|
+ && self.op == other.op
|
|
+ && self.right == other.right
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprAsync {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprAsync {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprAwait {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprAwait {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.base == other.base
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprBinary {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprBinary {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.left == other.left
|
|
+ && self.op == other.op
|
|
+ && self.right == other.right
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprBlock {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprBlock {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.label == other.label && self.block == other.block
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprBox {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprBox {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprBreak {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprBreak {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprCall {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprCall {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.func == other.func && self.args == other.args
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprCast {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprCast {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprClosure {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprClosure {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.asyncness == other.asyncness
|
|
+ && self.movability == other.movability
|
|
+ && self.capture == other.capture
|
|
+ && self.inputs == other.inputs
|
|
+ && self.output == other.output
|
|
+ && self.body == other.body
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprContinue {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprContinue {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.label == other.label
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprField {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprField {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.base == other.base && self.member == other.member
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprForLoop {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprForLoop {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.label == other.label
|
|
+ && self.pat == other.pat
|
|
+ && self.expr == other.expr
|
|
+ && self.body == other.body
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprGroup {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprGroup {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprIf {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprIf {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.cond == other.cond
|
|
+ && self.then_branch == other.then_branch
|
|
+ && self.else_branch == other.else_branch
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprIndex {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprIndex {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprLet {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprLet {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprLit {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprLit {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.lit == other.lit
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprLoop {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprLoop {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.label == other.label && self.body == other.body
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprMacro {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mac == other.mac
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprMatch {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprMatch {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprMethodCall {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprMethodCall {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.receiver == other.receiver
|
|
+ && self.method == other.method
|
|
+ && self.turbofish == other.turbofish
|
|
+ && self.args == other.args
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprParen {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprParen {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprPath {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprPath {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprRange {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprRange {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.from == other.from
|
|
+ && self.limits == other.limits
|
|
+ && self.to == other.to
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprReference {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprReference {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprRepeat {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprRepeat {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprReturn {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprReturn {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprStruct {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprStruct {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.path == other.path
|
|
+ && self.fields == other.fields
|
|
+ && self.dot2_token == other.dot2_token
|
|
+ && self.rest == other.rest
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprTry {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprTry {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprTryBlock {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprTryBlock {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.block == other.block
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprTuple {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprTuple {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.elems == other.elems
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprType {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ExprUnary {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ExprUnary {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprUnsafe {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprUnsafe {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.block == other.block
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprWhile {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprWhile {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.label == other.label
|
|
+ && self.cond == other.cond
|
|
+ && self.body == other.body
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ExprYield {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ExprYield {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Field {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Field {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for FieldPat {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for FieldPat {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.member == other.member
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.pat == other.pat
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for FieldValue {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for FieldValue {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.member == other.member
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Fields {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Fields {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
|
|
+ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
|
|
+ (Fields::Unit, Fields::Unit) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for FieldsNamed {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for FieldsNamed {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.named == other.named
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for FieldsUnnamed {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for FieldsUnnamed {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.unnamed == other.unnamed
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for File {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for File {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for FnArg {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for FnArg {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
|
|
+ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ForeignItem {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ForeignItem {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
|
|
+ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
|
|
+ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
|
|
+ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
|
|
+ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ForeignItemFn {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ForeignItemFn {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ForeignItemMacro {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ForeignItemMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ForeignItemStatic {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ForeignItemStatic {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.mutability == other.mutability
|
|
+ && self.ident == other.ident
|
|
+ && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ForeignItemType {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ForeignItemType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for GenericArgument {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for GenericArgument {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
|
|
+ self0 == other0
|
|
+ }
|
|
+ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
|
|
+ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
|
|
+ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
|
|
+ self0 == other0
|
|
+ }
|
|
+ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for GenericMethodArgument {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for GenericMethodArgument {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
|
|
+ self0 == other0
|
|
+ }
|
|
+ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
|
|
+ self0 == other0
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for GenericParam {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for GenericParam {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
|
|
+ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
|
|
+ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Generics {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Generics {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lt_token == other.lt_token
|
|
+ && self.params == other.params
|
|
+ && self.gt_token == other.gt_token
|
|
+ && self.where_clause == other.where_clause
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ImplItem {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ImplItem {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
|
|
+ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
|
|
+ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
|
|
+ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
|
|
+ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ImplItemConst {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ImplItemConst {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.defaultness == other.defaultness
|
|
+ && self.ident == other.ident
|
|
+ && self.ty == other.ty
|
|
+ && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ImplItemMacro {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ImplItemMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ImplItemMethod {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ImplItemMethod {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.defaultness == other.defaultness
|
|
+ && self.sig == other.sig
|
|
+ && self.block == other.block
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ImplItemType {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ImplItemType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.defaultness == other.defaultness
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Item {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Item {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
|
|
+ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
|
|
+ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
|
|
+ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
|
|
+ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
|
|
+ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
|
|
+ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
|
|
+ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
|
|
+ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
|
|
+ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
|
|
+ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
|
|
+ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
|
|
+ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
|
|
+ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
|
|
+ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
|
|
+ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
|
|
+ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemConst {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemConst {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.ty == other.ty
|
|
+ && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemEnum {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemEnum {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.variants == other.variants
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemExternCrate {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemExternCrate {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.rename == other.rename
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemFn {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemFn {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.sig == other.sig
|
|
+ && self.block == other.block
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemForeignMod {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemForeignMod {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemImpl {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemImpl {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.defaultness == other.defaultness
|
|
+ && self.unsafety == other.unsafety
|
|
+ && self.generics == other.generics
|
|
+ && self.trait_ == other.trait_
|
|
+ && self.self_ty == other.self_ty
|
|
+ && self.items == other.items
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemMacro {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.ident == other.ident
|
|
+ && self.mac == other.mac
|
|
+ && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemMacro2 {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemMacro2 {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemMod {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemMod {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.content == other.content
|
|
+ && self.semi == other.semi
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemStatic {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemStatic {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.mutability == other.mutability
|
|
+ && self.ident == other.ident
|
|
+ && self.ty == other.ty
|
|
+ && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemStruct {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemStruct {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.fields == other.fields
|
|
+ && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemTrait {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemTrait {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.unsafety == other.unsafety
|
|
+ && self.auto_token == other.auto_token
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.supertraits == other.supertraits
|
|
+ && self.items == other.items
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemTraitAlias {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemTraitAlias {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemType {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemUnion {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemUnion {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.fields == other.fields
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for ItemUse {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for ItemUse {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.vis == other.vis
|
|
+ && self.leading_colon == other.leading_colon
|
|
+ && self.tree == other.tree
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Label {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Label {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.name == other.name
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for LifetimeDef {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for LifetimeDef {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.lifetime == other.lifetime
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+impl Eq for Lit {}
|
|
+impl PartialEq for Lit {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
|
|
+ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
|
|
+ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
|
|
+ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
|
|
+ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
|
|
+ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
|
|
+ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
|
|
+ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
|
|
+ self0.to_string() == other0.to_string()
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+impl Eq for LitBool {}
|
|
+impl PartialEq for LitBool {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.value == other.value
|
|
+ }
|
|
+}
|
|
+impl Eq for LitByte {}
|
|
+impl Eq for LitByteStr {}
|
|
+impl Eq for LitChar {}
|
|
+impl Eq for LitFloat {}
|
|
+impl Eq for LitInt {}
|
|
+impl Eq for LitStr {}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Local {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Local {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Macro {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Macro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.path == other.path
|
|
+ && self.delimiter == other.delimiter
|
|
+ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for MacroDelimiter {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for MacroDelimiter {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
|
|
+ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
|
|
+ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Meta {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Meta {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
|
|
+ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
|
|
+ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for MetaList {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for MetaList {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.path == other.path && self.nested == other.nested
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for MetaNameValue {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for MetaNameValue {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.path == other.path && self.lit == other.lit
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for MethodTurbofish {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for MethodTurbofish {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.args == other.args
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for NestedMeta {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for NestedMeta {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
|
|
+ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ParenthesizedGenericArguments {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ParenthesizedGenericArguments {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.inputs == other.inputs && self.output == other.output
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Pat {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Pat {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
|
|
+ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
|
|
+ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
|
|
+ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
|
|
+ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
|
|
+ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
|
|
+ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
|
|
+ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
|
|
+ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
|
|
+ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
|
|
+ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
|
|
+ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
|
|
+ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
|
|
+ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
|
|
+ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatBox {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatBox {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.pat == other.pat
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatIdent {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatIdent {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.by_ref == other.by_ref
|
|
+ && self.mutability == other.mutability
|
|
+ && self.ident == other.ident
|
|
+ && self.subpat == other.subpat
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatLit {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatLit {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.expr == other.expr
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatMacro {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mac == other.mac
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatOr {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatOr {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.leading_vert == other.leading_vert
|
|
+ && self.cases == other.cases
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatPath {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatPath {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatRange {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatRange {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.lo == other.lo
|
|
+ && self.limits == other.limits
|
|
+ && self.hi == other.hi
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatReference {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatReference {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatRest {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatRest {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatSlice {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatSlice {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.elems == other.elems
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatStruct {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatStruct {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.path == other.path
|
|
+ && self.fields == other.fields
|
|
+ && self.dot2_token == other.dot2_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatTuple {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatTuple {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.elems == other.elems
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatTupleStruct {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatTupleStruct {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatType {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for PatWild {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for PatWild {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Path {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Path {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.leading_colon == other.leading_colon && self.segments == other.segments
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for PathArguments {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for PathArguments {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (PathArguments::None, PathArguments::None) => true,
|
|
+ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
|
|
+ self0 == other0
|
|
+ }
|
|
+ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
|
|
+ self0 == other0
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for PathSegment {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for PathSegment {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ident == other.ident && self.arguments == other.arguments
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for PredicateEq {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for PredicateEq {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for PredicateLifetime {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for PredicateLifetime {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lifetime == other.lifetime && self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for PredicateType {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for PredicateType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lifetimes == other.lifetimes
|
|
+ && self.bounded_ty == other.bounded_ty
|
|
+ && self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for QSelf {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for QSelf {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for RangeLimits {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for RangeLimits {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
|
|
+ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Receiver {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Receiver {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.reference == other.reference
|
|
+ && self.mutability == other.mutability
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for ReturnType {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for ReturnType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (ReturnType::Default, ReturnType::Default) => true,
|
|
+ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Signature {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Signature {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.constness == other.constness
|
|
+ && self.asyncness == other.asyncness
|
|
+ && self.unsafety == other.unsafety
|
|
+ && self.abi == other.abi
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.inputs == other.inputs
|
|
+ && self.variadic == other.variadic
|
|
+ && self.output == other.output
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for Stmt {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for Stmt {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
|
|
+ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
|
|
+ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
|
|
+ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TraitBound {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TraitBound {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.paren_token == other.paren_token
|
|
+ && self.modifier == other.modifier
|
|
+ && self.lifetimes == other.lifetimes
|
|
+ && self.path == other.path
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TraitBoundModifier {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TraitBoundModifier {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
|
|
+ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for TraitItem {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for TraitItem {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
|
|
+ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
|
|
+ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
|
|
+ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
|
|
+ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for TraitItemConst {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for TraitItemConst {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.ident == other.ident
|
|
+ && self.ty == other.ty
|
|
+ && self.default == other.default
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for TraitItemMacro {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for TraitItemMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for TraitItemMethod {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for TraitItemMethod {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.sig == other.sig
|
|
+ && self.default == other.default
|
|
+ && self.semi_token == other.semi_token
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for TraitItemType {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for TraitItemType {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.ident == other.ident
|
|
+ && self.generics == other.generics
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.bounds == other.bounds
|
|
+ && self.default == other.default
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Type {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Type {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
|
|
+ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
|
|
+ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
|
|
+ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
|
|
+ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
|
|
+ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
|
|
+ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
|
|
+ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
|
|
+ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
|
|
+ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
|
|
+ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
|
|
+ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
|
|
+ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
|
|
+ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
|
|
+ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
|
|
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
|
|
+ }
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeArray {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeArray {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.elem == other.elem && self.len == other.len
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeBareFn {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeBareFn {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lifetimes == other.lifetimes
|
|
+ && self.unsafety == other.unsafety
|
|
+ && self.abi == other.abi
|
|
+ && self.inputs == other.inputs
|
|
+ && self.variadic == other.variadic
|
|
+ && self.output == other.output
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeGroup {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeGroup {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.elem == other.elem
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeImplTrait {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeImplTrait {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeInfer {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeInfer {
|
|
+ fn eq(&self, _other: &Self) -> bool {
|
|
+ true
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeMacro {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeMacro {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.mac == other.mac
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeNever {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeNever {
|
|
+ fn eq(&self, _other: &Self) -> bool {
|
|
+ true
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeParam {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeParam {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.ident == other.ident
|
|
+ && self.colon_token == other.colon_token
|
|
+ && self.bounds == other.bounds
|
|
+ && self.eq_token == other.eq_token
|
|
+ && self.default == other.default
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeParamBound {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeParamBound {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
|
|
+ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeParen {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeParen {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.elem == other.elem
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypePath {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypePath {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.qself == other.qself && self.path == other.path
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypePtr {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypePtr {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.const_token == other.const_token
|
|
+ && self.mutability == other.mutability
|
|
+ && self.elem == other.elem
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeReference {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeReference {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.lifetime == other.lifetime
|
|
+ && self.mutability == other.mutability
|
|
+ && self.elem == other.elem
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeSlice {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeSlice {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.elem == other.elem
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeTraitObject {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeTraitObject {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.dyn_token == other.dyn_token && self.bounds == other.bounds
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for TypeTuple {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for TypeTuple {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.elems == other.elems
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for UnOp {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for UnOp {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (UnOp::Deref(_), UnOp::Deref(_)) => true,
|
|
+ (UnOp::Not(_), UnOp::Not(_)) => true,
|
|
+ (UnOp::Neg(_), UnOp::Neg(_)) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for UseGlob {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for UseGlob {
|
|
+ fn eq(&self, _other: &Self) -> bool {
|
|
+ true
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for UseGroup {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for UseGroup {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.items == other.items
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for UseName {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for UseName {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ident == other.ident
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for UsePath {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for UsePath {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ident == other.ident && self.tree == other.tree
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for UseRename {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for UseRename {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.ident == other.ident && self.rename == other.rename
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Eq for UseTree {}
|
|
+#[cfg(feature = "full")]
|
|
+impl PartialEq for UseTree {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
|
|
+ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
|
|
+ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
|
|
+ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
|
|
+ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Variadic {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Variadic {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Variant {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Variant {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.attrs == other.attrs
|
|
+ && self.ident == other.ident
|
|
+ && self.fields == other.fields
|
|
+ && self.discriminant == other.discriminant
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for VisCrate {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for VisCrate {
|
|
+ fn eq(&self, _other: &Self) -> bool {
|
|
+ true
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for VisPublic {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for VisPublic {
|
|
+ fn eq(&self, _other: &Self) -> bool {
|
|
+ true
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for VisRestricted {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for VisRestricted {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.in_token == other.in_token && self.path == other.path
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for Visibility {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for Visibility {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
|
|
+ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
|
|
+ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
|
|
+ (Visibility::Inherited, Visibility::Inherited) => true,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for WhereClause {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for WhereClause {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.predicates == other.predicates
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Eq for WherePredicate {}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl PartialEq for WherePredicate {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ match (self, other) {
|
|
+ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
|
|
+ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
|
|
+ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
|
|
+ _ => false,
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
|
|
--- a/third_party/rust/syn/src/gen/fold.rs
|
|
+++ b/third_party/rust/syn/src/gen/fold.rs
|
|
@@ -1,12 +1,13 @@
|
|
// This file is @generated by syn-internal-codegen.
|
|
// It is not intended for manual editing.
|
|
|
|
#![allow(unreachable_code, unused_variables)]
|
|
+#![allow(clippy::match_wildcard_for_single_variants)]
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
use crate::gen::helper::fold::*;
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
use crate::token::{Brace, Bracket, Group, Paren};
|
|
use crate::*;
|
|
use proc_macro2::Span;
|
|
#[cfg(feature = "full")]
|
|
macro_rules! full {
|
|
@@ -21,17 +22,17 @@ macro_rules! full {
|
|
};
|
|
}
|
|
/// Syntax tree traversal to transform the nodes of an owned syntax tree.
|
|
///
|
|
/// See the [module documentation] for details.
|
|
///
|
|
/// [module documentation]: self
|
|
///
|
|
-/// *This trait is available if Syn is built with the `"fold"` feature.*
|
|
+/// *This trait is available only if Syn is built with the `"fold"` feature.*
|
|
pub trait Fold {
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_abi(&mut self, i: Abi) -> Abi {
|
|
fold_abi(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_angle_bracketed_generic_arguments(
|
|
&mut self,
|
|
@@ -428,45 +429,37 @@ pub trait Fold {
|
|
}
|
|
fn fold_lifetime(&mut self, i: Lifetime) -> Lifetime {
|
|
fold_lifetime(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
|
|
fold_lifetime_def(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit(&mut self, i: Lit) -> Lit {
|
|
fold_lit(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
|
|
fold_lit_bool(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
|
|
fold_lit_byte(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
|
|
fold_lit_byte_str(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
|
|
fold_lit_char(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
|
|
fold_lit_float(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
|
|
fold_lit_int(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
|
|
fold_lit_str(self, i)
|
|
}
|
|
#[cfg(feature = "full")]
|
|
fn fold_local(&mut self, i: Local) -> Local {
|
|
fold_local(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
@@ -794,39 +787,39 @@ where
|
|
pub fn fold_angle_bracketed_generic_arguments<F>(
|
|
f: &mut F,
|
|
node: AngleBracketedGenericArguments,
|
|
) -> AngleBracketedGenericArguments
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
AngleBracketedGenericArguments {
|
|
- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
|
|
- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
|
|
+ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
|
|
+ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
|
|
args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
|
|
- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
|
|
+ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_arm<F>(f: &mut F, node: Arm) -> Arm
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Arm {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
pat: f.fold_pat(node.pat),
|
|
guard: (node.guard).map(|it| {
|
|
(
|
|
Token![if](tokens_helper(f, &(it).0.span)),
|
|
Box::new(f.fold_expr(*(it).1)),
|
|
)
|
|
}),
|
|
- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
|
|
+ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
|
|
body: Box::new(f.fold_expr(*node.body)),
|
|
- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
|
|
+ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_attr_style<F>(f: &mut F, node: AttrStyle) -> AttrStyle
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
@@ -837,109 +830,97 @@ where
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_attribute<F>(f: &mut F, node: Attribute) -> Attribute
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Attribute {
|
|
- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
|
|
+ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
|
|
style: f.fold_attr_style(node.style),
|
|
bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
|
|
path: f.fold_path(node.path),
|
|
tokens: node.tokens,
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_bare_fn_arg<F>(f: &mut F, node: BareFnArg) -> BareFnArg
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
BareFnArg {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
name: (node.name).map(|it| {
|
|
(
|
|
f.fold_ident((it).0),
|
|
- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
|
|
+ Token ! [:](tokens_helper(f, &(it).1.spans)),
|
|
)
|
|
}),
|
|
ty: f.fold_type(node.ty),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_bin_op<F>(f: &mut F, node: BinOp) -> BinOp
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
|
|
BinOp::BitXor(_binding_0) => {
|
|
- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
BinOp::BitAnd(_binding_0) => {
|
|
- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
|
|
- }
|
|
- BinOp::BitOr(_binding_0) => {
|
|
- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
|
|
- BinOp::AddEq(_binding_0) => {
|
|
- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
|
|
- }
|
|
- BinOp::SubEq(_binding_0) => {
|
|
- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
|
|
- }
|
|
- BinOp::MulEq(_binding_0) => {
|
|
- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
|
|
- }
|
|
- BinOp::DivEq(_binding_0) => {
|
|
- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
|
|
- }
|
|
- BinOp::RemEq(_binding_0) => {
|
|
- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
|
|
- }
|
|
+ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
|
|
+ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
|
|
BinOp::BitXorEq(_binding_0) => {
|
|
- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
BinOp::BitAndEq(_binding_0) => {
|
|
- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
BinOp::BitOrEq(_binding_0) => {
|
|
- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
BinOp::ShlEq(_binding_0) => {
|
|
- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
BinOp::ShrEq(_binding_0) => {
|
|
- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
|
|
+ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_binding<F>(f: &mut F, node: Binding) -> Binding
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Binding {
|
|
ident: f.fold_ident(node.ident),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
ty: f.fold_type(node.ty),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_block<F>(f: &mut F, node: Block) -> Block
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -950,44 +931,44 @@ where
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_bound_lifetimes<F>(f: &mut F, node: BoundLifetimes) -> BoundLifetimes
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
BoundLifetimes {
|
|
for_token: Token![for](tokens_helper(f, &node.for_token.span)),
|
|
- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
|
|
+ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
|
|
lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
|
|
- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
|
|
+ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_const_param<F>(f: &mut F, node: ConstParam) -> ConstParam
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ConstParam {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: f.fold_type(node.ty),
|
|
- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
|
|
+ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
|
|
default: (node.default).map(|it| f.fold_expr(it)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_constraint<F>(f: &mut F, node: Constraint) -> Constraint
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Constraint {
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
|
|
}
|
|
}
|
|
#[cfg(feature = "derive")]
|
|
pub fn fold_data<F>(f: &mut F, node: Data) -> Data
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1011,17 +992,17 @@ where
|
|
#[cfg(feature = "derive")]
|
|
pub fn fold_data_struct<F>(f: &mut F, node: DataStruct) -> DataStruct
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
DataStruct {
|
|
struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
|
|
fields: f.fold_fields(node.fields),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "derive")]
|
|
pub fn fold_data_union<F>(f: &mut F, node: DataUnion) -> DataUnion
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
DataUnion {
|
|
@@ -1107,17 +1088,17 @@ where
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_assign<F>(f: &mut F, node: ExprAssign) -> ExprAssign
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprAssign {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
left: Box::new(f.fold_expr(*node.left)),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
right: Box::new(f.fold_expr(*node.right)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_assign_op<F>(f: &mut F, node: ExprAssignOp) -> ExprAssignOp
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1143,17 +1124,17 @@ where
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_await<F>(f: &mut F, node: ExprAwait) -> ExprAwait
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprAwait {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
base: Box::new(f.fold_expr(*node.base)),
|
|
- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
|
|
+ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
|
|
await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_expr_binary<F>(f: &mut F, node: ExprBinary) -> ExprBinary
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1227,19 +1208,19 @@ pub fn fold_expr_closure<F>(f: &mut F, n
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprClosure {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
|
|
movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
|
|
capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
|
|
- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
|
|
+ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
|
|
inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
|
|
- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
|
|
+ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
|
|
output: f.fold_return_type(node.output),
|
|
body: Box::new(f.fold_expr(*node.body)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_continue<F>(f: &mut F, node: ExprContinue) -> ExprContinue
|
|
where
|
|
F: Fold + ?Sized,
|
|
@@ -1253,17 +1234,17 @@ where
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_expr_field<F>(f: &mut F, node: ExprField) -> ExprField
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprField {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
base: Box::new(f.fold_expr(*node.base)),
|
|
- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
|
|
+ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
|
|
member: f.fold_member(node.member),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_for_loop<F>(f: &mut F, node: ExprForLoop) -> ExprForLoop
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1322,17 +1303,17 @@ where
|
|
pub fn fold_expr_let<F>(f: &mut F, node: ExprLet) -> ExprLet
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprLet {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
let_token: Token![let](tokens_helper(f, &node.let_token.span)),
|
|
pat: f.fold_pat(node.pat),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_expr_lit<F>(f: &mut F, node: ExprLit) -> ExprLit
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1379,17 +1360,17 @@ where
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_method_call<F>(f: &mut F, node: ExprMethodCall) -> ExprMethodCall
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprMethodCall {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
receiver: Box::new(f.fold_expr(*node.receiver)),
|
|
- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
|
|
+ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
|
|
method: f.fold_ident(node.method),
|
|
turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
|
|
paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
|
|
args: FoldHelper::lift(node.args, |it| f.fold_expr(it)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_expr_paren<F>(f: &mut F, node: ExprParen) -> ExprParen
|
|
@@ -1427,32 +1408,32 @@ where
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_reference<F>(f: &mut F, node: ExprReference) -> ExprReference
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprReference {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
|
|
+ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
|
|
raw: node.raw,
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_repeat<F>(f: &mut F, node: ExprRepeat) -> ExprRepeat
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprRepeat {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
len: Box::new(f.fold_expr(*node.len)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_return<F>(f: &mut F, node: ExprReturn) -> ExprReturn
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1479,17 +1460,17 @@ where
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_try<F>(f: &mut F, node: ExprTry) -> ExprTry
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprTry {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
|
|
+ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_try_block<F>(f: &mut F, node: ExprTryBlock) -> ExprTryBlock
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprTryBlock {
|
|
@@ -1512,17 +1493,17 @@ where
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_expr_type<F>(f: &mut F, node: ExprType) -> ExprType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ExprType {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_expr_unary<F>(f: &mut F, node: ExprUnary) -> ExprUnary
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1571,41 +1552,41 @@ where
|
|
pub fn fold_field<F>(f: &mut F, node: Field) -> Field
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Field {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
ident: (node.ident).map(|it| f.fold_ident(it)),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
ty: f.fold_type(node.ty),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_field_pat<F>(f: &mut F, node: FieldPat) -> FieldPat
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
FieldPat {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
member: f.fold_member(node.member),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
pat: Box::new(f.fold_pat(*node.pat)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_field_value<F>(f: &mut F, node: FieldValue) -> FieldValue
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
FieldValue {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
member: f.fold_member(node.member),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
expr: f.fold_expr(node.expr),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_fields<F>(f: &mut F, node: Fields) -> Fields
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -1676,57 +1657,57 @@ where
|
|
pub fn fold_foreign_item_fn<F>(f: &mut F, node: ForeignItemFn) -> ForeignItemFn
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ForeignItemFn {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
sig: f.fold_signature(node.sig),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_foreign_item_macro<F>(f: &mut F, node: ForeignItemMacro) -> ForeignItemMacro
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ForeignItemMacro {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
mac: f.fold_macro(node.mac),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_foreign_item_static<F>(f: &mut F, node: ForeignItemStatic) -> ForeignItemStatic
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ForeignItemStatic {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
static_token: Token![static](tokens_helper(f, &node.static_token.span)),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_foreign_item_type<F>(f: &mut F, node: ForeignItemType) -> ForeignItemType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ForeignItemType {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_generic_argument<F>(f: &mut F, node: GenericArgument) -> GenericArgument
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
@@ -1774,19 +1755,19 @@ where
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_generics<F>(f: &mut F, node: Generics) -> Generics
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Generics {
|
|
- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
|
|
+ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
|
|
params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
|
|
- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
|
|
+ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
|
|
where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
|
|
}
|
|
}
|
|
pub fn fold_ident<F>(f: &mut F, node: Ident) -> Ident
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let mut node = node;
|
|
@@ -1814,32 +1795,32 @@ where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ImplItemConst {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
|
|
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: f.fold_type(node.ty),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
expr: f.fold_expr(node.expr),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_impl_item_macro<F>(f: &mut F, node: ImplItemMacro) -> ImplItemMacro
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ImplItemMacro {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
mac: f.fold_macro(node.mac),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_impl_item_method<F>(f: &mut F, node: ImplItemMethod) -> ImplItemMethod
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ImplItemMethod {
|
|
@@ -1857,19 +1838,19 @@ where
|
|
{
|
|
ImplItemType {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
|
|
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
generics: f.fold_generics(node.generics),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
ty: f.fold_type(node.ty),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_index<F>(f: &mut F, node: Index) -> Index
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Index {
|
|
@@ -1908,21 +1889,21 @@ pub fn fold_item_const<F>(f: &mut F, nod
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemConst {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_enum<F>(f: &mut F, node: ItemEnum) -> ItemEnum
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemEnum {
|
|
@@ -1947,17 +1928,17 @@ where
|
|
crate_token: Token![crate](tokens_helper(f, &node.crate_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
rename: (node.rename).map(|it| {
|
|
(
|
|
Token![as](tokens_helper(f, &(it).0.span)),
|
|
f.fold_ident((it).1),
|
|
)
|
|
}),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_fn<F>(f: &mut F, node: ItemFn) -> ItemFn
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemFn {
|
|
@@ -2006,17 +1987,17 @@ where
|
|
pub fn fold_item_macro<F>(f: &mut F, node: ItemMacro) -> ItemMacro
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemMacro {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
ident: (node.ident).map(|it| f.fold_ident(it)),
|
|
mac: f.fold_macro(node.mac),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_macro2<F>(f: &mut F, node: ItemMacro2) -> ItemMacro2
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemMacro2 {
|
|
@@ -2038,101 +2019,101 @@ where
|
|
mod_token: Token![mod](tokens_helper(f, &node.mod_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
content: (node.content).map(|it| {
|
|
(
|
|
Brace(tokens_helper(f, &(it).0.span)),
|
|
FoldHelper::lift((it).1, |it| f.fold_item(it)),
|
|
)
|
|
}),
|
|
- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_static<F>(f: &mut F, node: ItemStatic) -> ItemStatic
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemStatic {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
static_token: Token![static](tokens_helper(f, &node.static_token.span)),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
expr: Box::new(f.fold_expr(*node.expr)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_struct<F>(f: &mut F, node: ItemStruct) -> ItemStruct
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemStruct {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
generics: f.fold_generics(node.generics),
|
|
fields: f.fold_fields(node.fields),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_trait<F>(f: &mut F, node: ItemTrait) -> ItemTrait
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemTrait {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
unsafety: (node.unsafety).map(|it| Token![unsafe](tokens_helper(f, &it.span))),
|
|
auto_token: (node.auto_token).map(|it| Token![auto](tokens_helper(f, &it.span))),
|
|
trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
generics: f.fold_generics(node.generics),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
|
|
brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
|
|
items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_trait_alias<F>(f: &mut F, node: ItemTraitAlias) -> ItemTraitAlias
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemTraitAlias {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
generics: f.fold_generics(node.generics),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_type<F>(f: &mut F, node: ItemType) -> ItemType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemType {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
generics: f.fold_generics(node.generics),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_item_union<F>(f: &mut F, node: ItemUnion) -> ItemUnion
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemUnion {
|
|
@@ -2148,29 +2129,29 @@ where
|
|
pub fn fold_item_use<F>(f: &mut F, node: ItemUse) -> ItemUse
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
ItemUse {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
vis: f.fold_visibility(node.vis),
|
|
use_token: Token![use](tokens_helper(f, &node.use_token.span)),
|
|
- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
|
|
+ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
|
|
tree: f.fold_use_tree(node.tree),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_label<F>(f: &mut F, node: Label) -> Label
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Label {
|
|
name: f.fold_lifetime(node.name),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
}
|
|
}
|
|
pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Lifetime {
|
|
apostrophe: f.fold_span(node.apostrophe),
|
|
@@ -2180,97 +2161,89 @@ where
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lifetime_def<F>(f: &mut F, node: LifetimeDef) -> LifetimeDef
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
LifetimeDef {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
lifetime: f.fold_lifetime(node.lifetime),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
Lit::Str(_binding_0) => Lit::Str(f.fold_lit_str(_binding_0)),
|
|
Lit::ByteStr(_binding_0) => Lit::ByteStr(f.fold_lit_byte_str(_binding_0)),
|
|
Lit::Byte(_binding_0) => Lit::Byte(f.fold_lit_byte(_binding_0)),
|
|
Lit::Char(_binding_0) => Lit::Char(f.fold_lit_char(_binding_0)),
|
|
Lit::Int(_binding_0) => Lit::Int(f.fold_lit_int(_binding_0)),
|
|
Lit::Float(_binding_0) => Lit::Float(f.fold_lit_float(_binding_0)),
|
|
Lit::Bool(_binding_0) => Lit::Bool(f.fold_lit_bool(_binding_0)),
|
|
Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
LitBool {
|
|
value: node.value,
|
|
span: f.fold_span(node.span),
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let span = f.fold_span(node.span());
|
|
let mut node = node;
|
|
node.set_span(span);
|
|
node
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let span = f.fold_span(node.span());
|
|
let mut node = node;
|
|
node.set_span(span);
|
|
node
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let span = f.fold_span(node.span());
|
|
let mut node = node;
|
|
node.set_span(span);
|
|
node
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let span = f.fold_span(node.span());
|
|
let mut node = node;
|
|
node.set_span(span);
|
|
node
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let span = f.fold_span(node.span());
|
|
let mut node = node;
|
|
node.set_span(span);
|
|
node
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
let span = f.fold_span(node.span());
|
|
let mut node = node;
|
|
node.set_span(span);
|
|
node
|
|
@@ -2281,21 +2254,21 @@ where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Local {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
let_token: Token![let](tokens_helper(f, &node.let_token.span)),
|
|
pat: f.fold_pat(node.pat),
|
|
init: (node.init).map(|it| {
|
|
(
|
|
- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
|
|
+ Token ! [=](tokens_helper(f, &(it).0.spans)),
|
|
Box::new(f.fold_expr(*(it).1)),
|
|
)
|
|
}),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_macro<F>(f: &mut F, node: Macro) -> Macro
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Macro {
|
|
@@ -2356,30 +2329,30 @@ where
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_meta_name_value<F>(f: &mut F, node: MetaNameValue) -> MetaNameValue
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
MetaNameValue {
|
|
path: f.fold_path(node.path),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
lit: f.fold_lit(node.lit),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_method_turbofish<F>(f: &mut F, node: MethodTurbofish) -> MethodTurbofish
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
MethodTurbofish {
|
|
- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
|
|
- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
|
|
+ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
|
|
+ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
|
|
args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
|
|
- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
|
|
+ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_nested_meta<F>(f: &mut F, node: NestedMeta) -> NestedMeta
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
@@ -2444,17 +2417,17 @@ where
|
|
{
|
|
PatIdent {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
by_ref: (node.by_ref).map(|it| Token![ref](tokens_helper(f, &it.span))),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
ident: f.fold_ident(node.ident),
|
|
subpat: (node.subpat).map(|it| {
|
|
(
|
|
- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
|
|
+ Token ! [@](tokens_helper(f, &(it).0.spans)),
|
|
Box::new(f.fold_pat(*(it).1)),
|
|
)
|
|
}),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_lit<F>(f: &mut F, node: PatLit) -> PatLit
|
|
where
|
|
@@ -2477,17 +2450,17 @@ where
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_or<F>(f: &mut F, node: PatOr) -> PatOr
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
PatOr {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
|
|
+ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
|
|
cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_path<F>(f: &mut F, node: PatPath) -> PatPath
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -2511,17 +2484,17 @@ where
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_reference<F>(f: &mut F, node: PatReference) -> PatReference
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
PatReference {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
|
|
+ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
pat: Box::new(f.fold_pat(*node.pat)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_rest<F>(f: &mut F, node: PatRest) -> PatRest
|
|
where
|
|
F: Fold + ?Sized,
|
|
@@ -2580,17 +2553,17 @@ where
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_type<F>(f: &mut F, node: PatType) -> PatType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
PatType {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
pat: Box::new(f.fold_pat(*node.pat)),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_pat_wild<F>(f: &mut F, node: PatWild) -> PatWild
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -2600,17 +2573,17 @@ where
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_path<F>(f: &mut F, node: Path) -> Path
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Path {
|
|
- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
|
|
+ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
|
|
segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_path_arguments<F>(f: &mut F, node: PathArguments) -> PathArguments
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -2636,96 +2609,96 @@ where
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_predicate_eq<F>(f: &mut F, node: PredicateEq) -> PredicateEq
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
PredicateEq {
|
|
lhs_ty: f.fold_type(node.lhs_ty),
|
|
- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
|
|
+ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
|
|
rhs_ty: f.fold_type(node.rhs_ty),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_predicate_lifetime<F>(f: &mut F, node: PredicateLifetime) -> PredicateLifetime
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
PredicateLifetime {
|
|
lifetime: f.fold_lifetime(node.lifetime),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_predicate_type<F>(f: &mut F, node: PredicateType) -> PredicateType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
PredicateType {
|
|
lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
|
|
bounded_ty: f.fold_type(node.bounded_ty),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_qself<F>(f: &mut F, node: QSelf) -> QSelf
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
QSelf {
|
|
- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
|
|
+ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
|
|
ty: Box::new(f.fold_type(*node.ty)),
|
|
position: node.position,
|
|
as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
|
|
- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
|
|
+ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_range_limits<F>(f: &mut F, node: RangeLimits) -> RangeLimits
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
RangeLimits::HalfOpen(_binding_0) => {
|
|
RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
RangeLimits::Closed(_binding_0) => {
|
|
- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
|
|
+ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_receiver<F>(f: &mut F, node: Receiver) -> Receiver
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Receiver {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
reference: (node.reference).map(|it| {
|
|
(
|
|
- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
|
|
+ Token ! [&](tokens_helper(f, &(it).0.spans)),
|
|
((it).1).map(|it| f.fold_lifetime(it)),
|
|
)
|
|
}),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
self_token: Token![self](tokens_helper(f, &node.self_token.span)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_return_type<F>(f: &mut F, node: ReturnType) -> ReturnType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
ReturnType::Default => ReturnType::Default,
|
|
ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
|
|
- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
|
|
+ Token ! [->](tokens_helper(f, &_binding_0.spans)),
|
|
Box::new(f.fold_type(*_binding_1)),
|
|
),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_signature<F>(f: &mut F, node: Signature) -> Signature
|
|
where
|
|
F: Fold + ?Sized,
|
|
@@ -2756,17 +2729,17 @@ where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
Stmt::Local(_binding_0) => Stmt::Local(f.fold_local(_binding_0)),
|
|
Stmt::Item(_binding_0) => Stmt::Item(f.fold_item(_binding_0)),
|
|
Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
|
|
Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
|
|
f.fold_expr(_binding_0),
|
|
- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
|
|
+ Token ! [;](tokens_helper(f, &_binding_1.spans)),
|
|
),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_trait_bound<F>(f: &mut F, node: TraitBound) -> TraitBound
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -2780,17 +2753,17 @@ where
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_trait_bound_modifier<F>(f: &mut F, node: TraitBoundModifier) -> TraitBoundModifier
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
TraitBoundModifier::None => TraitBoundModifier::None,
|
|
TraitBoundModifier::Maybe(_binding_0) => {
|
|
- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
|
|
+ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
|
|
}
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_trait_item<F>(f: &mut F, node: TraitItem) -> TraitItem
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -2807,69 +2780,69 @@ where
|
|
pub fn fold_trait_item_const<F>(f: &mut F, node: TraitItemConst) -> TraitItemConst
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TraitItemConst {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
|
|
+ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
|
|
ty: f.fold_type(node.ty),
|
|
default: (node.default).map(|it| {
|
|
(
|
|
- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
|
|
+ Token ! [=](tokens_helper(f, &(it).0.spans)),
|
|
f.fold_expr((it).1),
|
|
)
|
|
}),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_trait_item_macro<F>(f: &mut F, node: TraitItemMacro) -> TraitItemMacro
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TraitItemMacro {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
mac: f.fold_macro(node.mac),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_trait_item_method<F>(f: &mut F, node: TraitItemMethod) -> TraitItemMethod
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TraitItemMethod {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
sig: f.fold_signature(node.sig),
|
|
default: (node.default).map(|it| f.fold_block(it)),
|
|
- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
|
|
+ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_trait_item_type<F>(f: &mut F, node: TraitItemType) -> TraitItemType
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TraitItemType {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
|
|
ident: f.fold_ident(node.ident),
|
|
generics: f.fold_generics(node.generics),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
|
|
default: (node.default).map(|it| {
|
|
(
|
|
- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
|
|
+ Token ! [=](tokens_helper(f, &(it).0.spans)),
|
|
f.fold_type((it).1),
|
|
)
|
|
}),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type<F>(f: &mut F, node: Type) -> Type
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
@@ -2894,17 +2867,17 @@ where
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_array<F>(f: &mut F, node: TypeArray) -> TypeArray
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TypeArray {
|
|
bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
|
|
elem: Box::new(f.fold_type(*node.elem)),
|
|
- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
|
|
+ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
|
|
len: f.fold_expr(node.len),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_bare_fn<F>(f: &mut F, node: TypeBareFn) -> TypeBareFn
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -2969,19 +2942,19 @@ where
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_param<F>(f: &mut F, node: TypeParam) -> TypeParam
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TypeParam {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
ident: f.fold_ident(node.ident),
|
|
- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
|
|
+ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
|
|
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
|
|
- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
|
|
+ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
|
|
default: (node.default).map(|it| f.fold_type(it)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_param_bound<F>(f: &mut F, node: TypeParamBound) -> TypeParamBound
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -3013,29 +2986,29 @@ where
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_ptr<F>(f: &mut F, node: TypePtr) -> TypePtr
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TypePtr {
|
|
- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
|
|
+ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
|
|
const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
elem: Box::new(f.fold_type(*node.elem)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_reference<F>(f: &mut F, node: TypeReference) -> TypeReference
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
TypeReference {
|
|
- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
|
|
+ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
|
|
lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
|
|
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
|
|
elem: Box::new(f.fold_type(*node.elem)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_type_slice<F>(f: &mut F, node: TypeSlice) -> TypeSlice
|
|
where
|
|
@@ -3067,28 +3040,28 @@ where
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_un_op<F>(f: &mut F, node: UnOp) -> UnOp
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
match node {
|
|
- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
|
|
+ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
|
|
UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
|
|
- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
|
|
+ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_use_glob<F>(f: &mut F, node: UseGlob) -> UseGlob
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
UseGlob {
|
|
- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
|
|
+ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_use_group<F>(f: &mut F, node: UseGroup) -> UseGroup
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
UseGroup {
|
|
@@ -3107,17 +3080,17 @@ where
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_use_path<F>(f: &mut F, node: UsePath) -> UsePath
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
UsePath {
|
|
ident: f.fold_ident(node.ident),
|
|
- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
|
|
+ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
|
|
tree: Box::new(f.fold_use_tree(*node.tree)),
|
|
}
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn fold_use_rename<F>(f: &mut F, node: UseRename) -> UseRename
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
@@ -3142,31 +3115,31 @@ where
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_variadic<F>(f: &mut F, node: Variadic) -> Variadic
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Variadic {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
|
|
+ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_variant<F>(f: &mut F, node: Variant) -> Variant
|
|
where
|
|
F: Fold + ?Sized,
|
|
{
|
|
Variant {
|
|
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
|
|
ident: f.fold_ident(node.ident),
|
|
fields: f.fold_fields(node.fields),
|
|
discriminant: (node.discriminant).map(|it| {
|
|
(
|
|
- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
|
|
+ Token ! [=](tokens_helper(f, &(it).0.spans)),
|
|
f.fold_expr((it).1),
|
|
)
|
|
}),
|
|
}
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn fold_vis_crate<F>(f: &mut F, node: VisCrate) -> VisCrate
|
|
where
|
|
diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/gen/hash.rs
|
|
@@ -0,0 +1,2691 @@
|
|
+// This file is @generated by syn-internal-codegen.
|
|
+// It is not intended for manual editing.
|
|
+
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+use crate::tt::TokenStreamHelper;
|
|
+use crate::*;
|
|
+use std::hash::{Hash, Hasher};
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Abi {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.name.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for AngleBracketedGenericArguments {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.colon2_token.hash(state);
|
|
+ self.args.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Arm {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ self.guard.hash(state);
|
|
+ self.body.hash(state);
|
|
+ self.comma.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for AttrStyle {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ AttrStyle::Outer => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ AttrStyle::Inner(_) => {
|
|
+ state.write_u8(1u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Attribute {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.style.hash(state);
|
|
+ self.path.hash(state);
|
|
+ TokenStreamHelper(&self.tokens).hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for BareFnArg {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.name.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for BinOp {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ BinOp::Add(_) => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ BinOp::Sub(_) => {
|
|
+ state.write_u8(1u8);
|
|
+ }
|
|
+ BinOp::Mul(_) => {
|
|
+ state.write_u8(2u8);
|
|
+ }
|
|
+ BinOp::Div(_) => {
|
|
+ state.write_u8(3u8);
|
|
+ }
|
|
+ BinOp::Rem(_) => {
|
|
+ state.write_u8(4u8);
|
|
+ }
|
|
+ BinOp::And(_) => {
|
|
+ state.write_u8(5u8);
|
|
+ }
|
|
+ BinOp::Or(_) => {
|
|
+ state.write_u8(6u8);
|
|
+ }
|
|
+ BinOp::BitXor(_) => {
|
|
+ state.write_u8(7u8);
|
|
+ }
|
|
+ BinOp::BitAnd(_) => {
|
|
+ state.write_u8(8u8);
|
|
+ }
|
|
+ BinOp::BitOr(_) => {
|
|
+ state.write_u8(9u8);
|
|
+ }
|
|
+ BinOp::Shl(_) => {
|
|
+ state.write_u8(10u8);
|
|
+ }
|
|
+ BinOp::Shr(_) => {
|
|
+ state.write_u8(11u8);
|
|
+ }
|
|
+ BinOp::Eq(_) => {
|
|
+ state.write_u8(12u8);
|
|
+ }
|
|
+ BinOp::Lt(_) => {
|
|
+ state.write_u8(13u8);
|
|
+ }
|
|
+ BinOp::Le(_) => {
|
|
+ state.write_u8(14u8);
|
|
+ }
|
|
+ BinOp::Ne(_) => {
|
|
+ state.write_u8(15u8);
|
|
+ }
|
|
+ BinOp::Ge(_) => {
|
|
+ state.write_u8(16u8);
|
|
+ }
|
|
+ BinOp::Gt(_) => {
|
|
+ state.write_u8(17u8);
|
|
+ }
|
|
+ BinOp::AddEq(_) => {
|
|
+ state.write_u8(18u8);
|
|
+ }
|
|
+ BinOp::SubEq(_) => {
|
|
+ state.write_u8(19u8);
|
|
+ }
|
|
+ BinOp::MulEq(_) => {
|
|
+ state.write_u8(20u8);
|
|
+ }
|
|
+ BinOp::DivEq(_) => {
|
|
+ state.write_u8(21u8);
|
|
+ }
|
|
+ BinOp::RemEq(_) => {
|
|
+ state.write_u8(22u8);
|
|
+ }
|
|
+ BinOp::BitXorEq(_) => {
|
|
+ state.write_u8(23u8);
|
|
+ }
|
|
+ BinOp::BitAndEq(_) => {
|
|
+ state.write_u8(24u8);
|
|
+ }
|
|
+ BinOp::BitOrEq(_) => {
|
|
+ state.write_u8(25u8);
|
|
+ }
|
|
+ BinOp::ShlEq(_) => {
|
|
+ state.write_u8(26u8);
|
|
+ }
|
|
+ BinOp::ShrEq(_) => {
|
|
+ state.write_u8(27u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Binding {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Block {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.stmts.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for BoundLifetimes {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lifetimes.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ConstParam {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ self.eq_token.hash(state);
|
|
+ self.default.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Constraint {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ident.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Hash for Data {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Data::Struct(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Data::Enum(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Data::Union(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Hash for DataEnum {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.variants.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Hash for DataStruct {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.fields.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Hash for DataUnion {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.fields.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "derive")]
|
|
+impl Hash for DeriveInput {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.data.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Expr {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Array(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Assign(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::AssignOp(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Async(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Await(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Binary(v0) => {
|
|
+ state.write_u8(5u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Block(v0) => {
|
|
+ state.write_u8(6u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Box(v0) => {
|
|
+ state.write_u8(7u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Break(v0) => {
|
|
+ state.write_u8(8u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Call(v0) => {
|
|
+ state.write_u8(9u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Cast(v0) => {
|
|
+ state.write_u8(10u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Closure(v0) => {
|
|
+ state.write_u8(11u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Continue(v0) => {
|
|
+ state.write_u8(12u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Field(v0) => {
|
|
+ state.write_u8(13u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::ForLoop(v0) => {
|
|
+ state.write_u8(14u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Group(v0) => {
|
|
+ state.write_u8(15u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::If(v0) => {
|
|
+ state.write_u8(16u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Index(v0) => {
|
|
+ state.write_u8(17u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Let(v0) => {
|
|
+ state.write_u8(18u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Lit(v0) => {
|
|
+ state.write_u8(19u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Loop(v0) => {
|
|
+ state.write_u8(20u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Macro(v0) => {
|
|
+ state.write_u8(21u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Match(v0) => {
|
|
+ state.write_u8(22u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::MethodCall(v0) => {
|
|
+ state.write_u8(23u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Paren(v0) => {
|
|
+ state.write_u8(24u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Path(v0) => {
|
|
+ state.write_u8(25u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Range(v0) => {
|
|
+ state.write_u8(26u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Reference(v0) => {
|
|
+ state.write_u8(27u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Repeat(v0) => {
|
|
+ state.write_u8(28u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Return(v0) => {
|
|
+ state.write_u8(29u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Struct(v0) => {
|
|
+ state.write_u8(30u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Try(v0) => {
|
|
+ state.write_u8(31u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::TryBlock(v0) => {
|
|
+ state.write_u8(32u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Tuple(v0) => {
|
|
+ state.write_u8(33u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Type(v0) => {
|
|
+ state.write_u8(34u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Unary(v0) => {
|
|
+ state.write_u8(35u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Unsafe(v0) => {
|
|
+ state.write_u8(36u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Expr::Verbatim(v0) => {
|
|
+ state.write_u8(37u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::While(v0) => {
|
|
+ state.write_u8(38u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ #[cfg(feature = "full")]
|
|
+ Expr::Yield(v0) => {
|
|
+ state.write_u8(39u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprArray {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.elems.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprAssign {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.left.hash(state);
|
|
+ self.right.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprAssignOp {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.left.hash(state);
|
|
+ self.op.hash(state);
|
|
+ self.right.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprAsync {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.capture.hash(state);
|
|
+ self.block.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprAwait {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.base.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprBinary {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.left.hash(state);
|
|
+ self.op.hash(state);
|
|
+ self.right.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprBlock {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.label.hash(state);
|
|
+ self.block.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprBox {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprBreak {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.label.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprCall {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.func.hash(state);
|
|
+ self.args.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprCast {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprClosure {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.asyncness.hash(state);
|
|
+ self.movability.hash(state);
|
|
+ self.capture.hash(state);
|
|
+ self.inputs.hash(state);
|
|
+ self.output.hash(state);
|
|
+ self.body.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprContinue {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.label.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprField {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.base.hash(state);
|
|
+ self.member.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprForLoop {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.label.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ self.body.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprGroup {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprIf {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.cond.hash(state);
|
|
+ self.then_branch.hash(state);
|
|
+ self.else_branch.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprIndex {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ self.index.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprLet {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprLit {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.lit.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprLoop {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.label.hash(state);
|
|
+ self.body.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mac.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprMatch {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ self.arms.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprMethodCall {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.receiver.hash(state);
|
|
+ self.method.hash(state);
|
|
+ self.turbofish.hash(state);
|
|
+ self.args.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprParen {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprPath {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.qself.hash(state);
|
|
+ self.path.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprRange {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.from.hash(state);
|
|
+ self.limits.hash(state);
|
|
+ self.to.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprReference {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprRepeat {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ self.len.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprReturn {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprStruct {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.path.hash(state);
|
|
+ self.fields.hash(state);
|
|
+ self.dot2_token.hash(state);
|
|
+ self.rest.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprTry {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprTryBlock {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.block.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprTuple {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.elems.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ExprUnary {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.op.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprUnsafe {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.block.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprWhile {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.label.hash(state);
|
|
+ self.cond.hash(state);
|
|
+ self.body.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ExprYield {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Field {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for FieldPat {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.member.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for FieldValue {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.member.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Fields {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Fields::Named(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Fields::Unnamed(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Fields::Unit => {
|
|
+ state.write_u8(2u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for FieldsNamed {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.named.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for FieldsUnnamed {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.unnamed.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for File {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.shebang.hash(state);
|
|
+ self.attrs.hash(state);
|
|
+ self.items.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for FnArg {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ FnArg::Receiver(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ FnArg::Typed(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ForeignItem {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ ForeignItem::Fn(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ForeignItem::Static(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ForeignItem::Type(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ForeignItem::Macro(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ForeignItem::Verbatim(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ForeignItemFn {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.sig.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ForeignItemMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mac.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ForeignItemStatic {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ForeignItemType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for GenericArgument {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ GenericArgument::Lifetime(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericArgument::Type(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericArgument::Binding(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericArgument::Constraint(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericArgument::Const(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for GenericMethodArgument {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ GenericMethodArgument::Type(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericMethodArgument::Const(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for GenericParam {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ GenericParam::Type(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericParam::Lifetime(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ GenericParam::Const(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Generics {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lt_token.hash(state);
|
|
+ self.params.hash(state);
|
|
+ self.gt_token.hash(state);
|
|
+ self.where_clause.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ImplItem {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ ImplItem::Const(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ImplItem::Method(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ImplItem::Type(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ImplItem::Macro(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ ImplItem::Verbatim(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ImplItemConst {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.defaultness.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ImplItemMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mac.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ImplItemMethod {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.defaultness.hash(state);
|
|
+ self.sig.hash(state);
|
|
+ self.block.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ImplItemType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.defaultness.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Item {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Item::Const(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Enum(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::ExternCrate(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Fn(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::ForeignMod(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Impl(v0) => {
|
|
+ state.write_u8(5u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Macro(v0) => {
|
|
+ state.write_u8(6u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Macro2(v0) => {
|
|
+ state.write_u8(7u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Mod(v0) => {
|
|
+ state.write_u8(8u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Static(v0) => {
|
|
+ state.write_u8(9u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Struct(v0) => {
|
|
+ state.write_u8(10u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Trait(v0) => {
|
|
+ state.write_u8(11u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::TraitAlias(v0) => {
|
|
+ state.write_u8(12u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Type(v0) => {
|
|
+ state.write_u8(13u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Union(v0) => {
|
|
+ state.write_u8(14u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Use(v0) => {
|
|
+ state.write_u8(15u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Item::Verbatim(v0) => {
|
|
+ state.write_u8(16u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemConst {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemEnum {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.variants.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemExternCrate {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.rename.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemFn {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.sig.hash(state);
|
|
+ self.block.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemForeignMod {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.abi.hash(state);
|
|
+ self.items.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemImpl {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.defaultness.hash(state);
|
|
+ self.unsafety.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.trait_.hash(state);
|
|
+ self.self_ty.hash(state);
|
|
+ self.items.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.mac.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemMacro2 {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ TokenStreamHelper(&self.rules).hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemMod {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.content.hash(state);
|
|
+ self.semi.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemStatic {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemStruct {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.fields.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemTrait {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.unsafety.hash(state);
|
|
+ self.auto_token.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.supertraits.hash(state);
|
|
+ self.items.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemTraitAlias {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemUnion {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.fields.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for ItemUse {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.vis.hash(state);
|
|
+ self.leading_colon.hash(state);
|
|
+ self.tree.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Label {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.name.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for LifetimeDef {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.lifetime.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+impl Hash for Lit {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Lit::Str(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::ByteStr(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::Byte(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::Char(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::Int(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::Float(v0) => {
|
|
+ state.write_u8(5u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::Bool(v0) => {
|
|
+ state.write_u8(6u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Lit::Verbatim(v0) => {
|
|
+ state.write_u8(7u8);
|
|
+ v0.to_string().hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+impl Hash for LitBool {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.value.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Local {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ self.init.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Macro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.path.hash(state);
|
|
+ self.delimiter.hash(state);
|
|
+ TokenStreamHelper(&self.tokens).hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for MacroDelimiter {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ MacroDelimiter::Paren(_) => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ MacroDelimiter::Brace(_) => {
|
|
+ state.write_u8(1u8);
|
|
+ }
|
|
+ MacroDelimiter::Bracket(_) => {
|
|
+ state.write_u8(2u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Meta {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Meta::Path(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Meta::List(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Meta::NameValue(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for MetaList {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.path.hash(state);
|
|
+ self.nested.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for MetaNameValue {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.path.hash(state);
|
|
+ self.lit.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for MethodTurbofish {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.args.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for NestedMeta {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ NestedMeta::Meta(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ NestedMeta::Lit(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ParenthesizedGenericArguments {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.inputs.hash(state);
|
|
+ self.output.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Pat {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Pat::Box(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Ident(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Lit(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Macro(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Or(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Path(v0) => {
|
|
+ state.write_u8(5u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Range(v0) => {
|
|
+ state.write_u8(6u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Reference(v0) => {
|
|
+ state.write_u8(7u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Rest(v0) => {
|
|
+ state.write_u8(8u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Slice(v0) => {
|
|
+ state.write_u8(9u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Struct(v0) => {
|
|
+ state.write_u8(10u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Tuple(v0) => {
|
|
+ state.write_u8(11u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::TupleStruct(v0) => {
|
|
+ state.write_u8(12u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Type(v0) => {
|
|
+ state.write_u8(13u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Pat::Verbatim(v0) => {
|
|
+ state.write_u8(14u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ Pat::Wild(v0) => {
|
|
+ state.write_u8(15u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatBox {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatIdent {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.by_ref.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.subpat.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatLit {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.expr.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mac.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatOr {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.leading_vert.hash(state);
|
|
+ self.cases.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatPath {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.qself.hash(state);
|
|
+ self.path.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatRange {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.lo.hash(state);
|
|
+ self.limits.hash(state);
|
|
+ self.hi.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatReference {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatRest {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatSlice {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.elems.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatStruct {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.path.hash(state);
|
|
+ self.fields.hash(state);
|
|
+ self.dot2_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatTuple {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.elems.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatTupleStruct {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.path.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.pat.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for PatWild {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Path {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.leading_colon.hash(state);
|
|
+ self.segments.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for PathArguments {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ PathArguments::None => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ PathArguments::AngleBracketed(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ PathArguments::Parenthesized(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for PathSegment {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ident.hash(state);
|
|
+ self.arguments.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for PredicateEq {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lhs_ty.hash(state);
|
|
+ self.rhs_ty.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for PredicateLifetime {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lifetime.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for PredicateType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lifetimes.hash(state);
|
|
+ self.bounded_ty.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for QSelf {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ty.hash(state);
|
|
+ self.position.hash(state);
|
|
+ self.as_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for RangeLimits {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ RangeLimits::HalfOpen(_) => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ RangeLimits::Closed(_) => {
|
|
+ state.write_u8(1u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Receiver {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.reference.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for ReturnType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ ReturnType::Default => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ ReturnType::Type(_, v1) => {
|
|
+ state.write_u8(1u8);
|
|
+ v1.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Signature {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.constness.hash(state);
|
|
+ self.asyncness.hash(state);
|
|
+ self.unsafety.hash(state);
|
|
+ self.abi.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.inputs.hash(state);
|
|
+ self.variadic.hash(state);
|
|
+ self.output.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for Stmt {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Stmt::Local(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Stmt::Item(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Stmt::Expr(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Stmt::Semi(v0, _) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TraitBound {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.paren_token.hash(state);
|
|
+ self.modifier.hash(state);
|
|
+ self.lifetimes.hash(state);
|
|
+ self.path.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TraitBoundModifier {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ TraitBoundModifier::None => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ TraitBoundModifier::Maybe(_) => {
|
|
+ state.write_u8(1u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for TraitItem {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ TraitItem::Const(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ TraitItem::Method(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ TraitItem::Type(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ TraitItem::Macro(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ TraitItem::Verbatim(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for TraitItemConst {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.ty.hash(state);
|
|
+ self.default.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for TraitItemMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.mac.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for TraitItemMethod {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.sig.hash(state);
|
|
+ self.default.hash(state);
|
|
+ self.semi_token.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for TraitItemType {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.generics.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ self.default.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Type {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Type::Array(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::BareFn(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Group(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::ImplTrait(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Infer(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Macro(v0) => {
|
|
+ state.write_u8(5u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Never(v0) => {
|
|
+ state.write_u8(6u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Paren(v0) => {
|
|
+ state.write_u8(7u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Path(v0) => {
|
|
+ state.write_u8(8u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Ptr(v0) => {
|
|
+ state.write_u8(9u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Reference(v0) => {
|
|
+ state.write_u8(10u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Slice(v0) => {
|
|
+ state.write_u8(11u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::TraitObject(v0) => {
|
|
+ state.write_u8(12u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Tuple(v0) => {
|
|
+ state.write_u8(13u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Type::Verbatim(v0) => {
|
|
+ state.write_u8(14u8);
|
|
+ TokenStreamHelper(v0).hash(state);
|
|
+ }
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeArray {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.elem.hash(state);
|
|
+ self.len.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeBareFn {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lifetimes.hash(state);
|
|
+ self.unsafety.hash(state);
|
|
+ self.abi.hash(state);
|
|
+ self.inputs.hash(state);
|
|
+ self.variadic.hash(state);
|
|
+ self.output.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeGroup {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.elem.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeImplTrait {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeInfer {
|
|
+ fn hash<H>(&self, _state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeMacro {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.mac.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeNever {
|
|
+ fn hash<H>(&self, _state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeParam {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.colon_token.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ self.eq_token.hash(state);
|
|
+ self.default.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeParamBound {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ TypeParamBound::Trait(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ TypeParamBound::Lifetime(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeParen {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.elem.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypePath {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.qself.hash(state);
|
|
+ self.path.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypePtr {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.const_token.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.elem.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeReference {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.lifetime.hash(state);
|
|
+ self.mutability.hash(state);
|
|
+ self.elem.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeSlice {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.elem.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeTraitObject {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.dyn_token.hash(state);
|
|
+ self.bounds.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for TypeTuple {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.elems.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for UnOp {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ UnOp::Deref(_) => {
|
|
+ state.write_u8(0u8);
|
|
+ }
|
|
+ UnOp::Not(_) => {
|
|
+ state.write_u8(1u8);
|
|
+ }
|
|
+ UnOp::Neg(_) => {
|
|
+ state.write_u8(2u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for UseGlob {
|
|
+ fn hash<H>(&self, _state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for UseGroup {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.items.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for UseName {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ident.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for UsePath {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ident.hash(state);
|
|
+ self.tree.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for UseRename {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.ident.hash(state);
|
|
+ self.rename.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(feature = "full")]
|
|
+impl Hash for UseTree {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ UseTree::Path(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ UseTree::Name(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ UseTree::Rename(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ UseTree::Glob(v0) => {
|
|
+ state.write_u8(3u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ UseTree::Group(v0) => {
|
|
+ state.write_u8(4u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Variadic {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Variant {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.attrs.hash(state);
|
|
+ self.ident.hash(state);
|
|
+ self.fields.hash(state);
|
|
+ self.discriminant.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for VisCrate {
|
|
+ fn hash<H>(&self, _state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for VisPublic {
|
|
+ fn hash<H>(&self, _state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for VisRestricted {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.in_token.hash(state);
|
|
+ self.path.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for Visibility {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ Visibility::Public(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Visibility::Crate(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Visibility::Restricted(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ Visibility::Inherited => {
|
|
+ state.write_u8(3u8);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for WhereClause {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ self.predicates.hash(state);
|
|
+ }
|
|
+}
|
|
+#[cfg(any(feature = "derive", feature = "full"))]
|
|
+impl Hash for WherePredicate {
|
|
+ fn hash<H>(&self, state: &mut H)
|
|
+ where
|
|
+ H: Hasher,
|
|
+ {
|
|
+ match self {
|
|
+ WherePredicate::Type(v0) => {
|
|
+ state.write_u8(0u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ WherePredicate::Lifetime(v0) => {
|
|
+ state.write_u8(1u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ WherePredicate::Eq(v0) => {
|
|
+ state.write_u8(2u8);
|
|
+ v0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
|
|
--- a/third_party/rust/syn/src/gen/visit.rs
|
|
+++ b/third_party/rust/syn/src/gen/visit.rs
|
|
@@ -15,27 +15,26 @@ macro_rules! full {
|
|
};
|
|
}
|
|
#[cfg(all(feature = "derive", not(feature = "full")))]
|
|
macro_rules! full {
|
|
($e:expr) => {
|
|
unreachable!()
|
|
};
|
|
}
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
macro_rules! skip {
|
|
($($tt:tt)*) => {};
|
|
}
|
|
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
|
|
///
|
|
/// See the [module documentation] for details.
|
|
///
|
|
/// [module documentation]: self
|
|
///
|
|
-/// *This trait is available if Syn is built with the `"visit"` feature.*
|
|
+/// *This trait is available only if Syn is built with the `"visit"` feature.*
|
|
pub trait Visit<'ast> {
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_abi(&mut self, i: &'ast Abi) {
|
|
visit_abi(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_angle_bracketed_generic_arguments(&mut self, i: &'ast AngleBracketedGenericArguments) {
|
|
visit_angle_bracketed_generic_arguments(self, i)
|
|
@@ -429,45 +428,37 @@ pub trait Visit<'ast> {
|
|
}
|
|
fn visit_lifetime(&mut self, i: &'ast Lifetime) {
|
|
visit_lifetime(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
|
|
visit_lifetime_def(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit(&mut self, i: &'ast Lit) {
|
|
visit_lit(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_bool(&mut self, i: &'ast LitBool) {
|
|
visit_lit_bool(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_byte(&mut self, i: &'ast LitByte) {
|
|
visit_lit_byte(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
|
|
visit_lit_byte_str(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_char(&mut self, i: &'ast LitChar) {
|
|
visit_lit_char(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_float(&mut self, i: &'ast LitFloat) {
|
|
visit_lit_float(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_int(&mut self, i: &'ast LitInt) {
|
|
visit_lit_int(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_str(&mut self, i: &'ast LitStr) {
|
|
visit_lit_str(self, i)
|
|
}
|
|
#[cfg(feature = "full")]
|
|
fn visit_local(&mut self, i: &'ast Local) {
|
|
visit_local(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
@@ -2532,17 +2523,16 @@ where
|
|
for el in Punctuated::pairs(&node.bounds) {
|
|
let (it, p) = el.into_tuple();
|
|
v.visit_lifetime(it);
|
|
if let Some(p) = p {
|
|
tokens_helper(v, &p.spans);
|
|
}
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
match node {
|
|
Lit::Str(_binding_0) => {
|
|
v.visit_lit_str(_binding_0);
|
|
}
|
|
@@ -2564,55 +2554,48 @@ where
|
|
Lit::Bool(_binding_0) => {
|
|
v.visit_lit_bool(_binding_0);
|
|
}
|
|
Lit::Verbatim(_binding_0) => {
|
|
skip!(_binding_0);
|
|
}
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
skip!(node.value);
|
|
v.visit_span(&node.span);
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
|
|
where
|
|
V: Visit<'ast> + ?Sized,
|
|
{
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn visit_local<'ast, V>(v: &mut V, node: &'ast Local)
|
|
where
|
|
diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
|
|
--- a/third_party/rust/syn/src/gen/visit_mut.rs
|
|
+++ b/third_party/rust/syn/src/gen/visit_mut.rs
|
|
@@ -15,28 +15,27 @@ macro_rules! full {
|
|
};
|
|
}
|
|
#[cfg(all(feature = "derive", not(feature = "full")))]
|
|
macro_rules! full {
|
|
($e:expr) => {
|
|
unreachable!()
|
|
};
|
|
}
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
macro_rules! skip {
|
|
($($tt:tt)*) => {};
|
|
}
|
|
/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
|
|
/// place.
|
|
///
|
|
/// See the [module documentation] for details.
|
|
///
|
|
/// [module documentation]: self
|
|
///
|
|
-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
|
|
+/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
|
|
pub trait VisitMut {
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_abi_mut(&mut self, i: &mut Abi) {
|
|
visit_abi_mut(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_angle_bracketed_generic_arguments_mut(
|
|
&mut self,
|
|
@@ -433,45 +432,37 @@ pub trait VisitMut {
|
|
}
|
|
fn visit_lifetime_mut(&mut self, i: &mut Lifetime) {
|
|
visit_lifetime_mut(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
|
|
visit_lifetime_def_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_mut(&mut self, i: &mut Lit) {
|
|
visit_lit_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
|
|
visit_lit_bool_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
|
|
visit_lit_byte_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
|
|
visit_lit_byte_str_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
|
|
visit_lit_char_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
|
|
visit_lit_float_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
|
|
visit_lit_int_mut(self, i)
|
|
}
|
|
- #[cfg(any(feature = "derive", feature = "full"))]
|
|
fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
|
|
visit_lit_str_mut(self, i)
|
|
}
|
|
#[cfg(feature = "full")]
|
|
fn visit_local_mut(&mut self, i: &mut Local) {
|
|
visit_local_mut(self, i)
|
|
}
|
|
#[cfg(any(feature = "derive", feature = "full"))]
|
|
@@ -2538,17 +2529,16 @@ where
|
|
for el in Punctuated::pairs_mut(&mut node.bounds) {
|
|
let (it, p) = el.into_tuple();
|
|
v.visit_lifetime_mut(it);
|
|
if let Some(p) = p {
|
|
tokens_helper(v, &mut p.spans);
|
|
}
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
match node {
|
|
Lit::Str(_binding_0) => {
|
|
v.visit_lit_str_mut(_binding_0);
|
|
}
|
|
@@ -2570,55 +2560,48 @@ where
|
|
Lit::Bool(_binding_0) => {
|
|
v.visit_lit_bool_mut(_binding_0);
|
|
}
|
|
Lit::Verbatim(_binding_0) => {
|
|
skip!(_binding_0);
|
|
}
|
|
}
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
skip!(node.value);
|
|
v.visit_span_mut(&mut node.span);
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
}
|
|
-#[cfg(any(feature = "derive", feature = "full"))]
|
|
pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
|
|
where
|
|
V: VisitMut + ?Sized,
|
|
{
|
|
}
|
|
#[cfg(feature = "full")]
|
|
pub fn visit_local_mut<V>(v: &mut V, node: &mut Local)
|
|
where
|
|
diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
|
|
--- a/third_party/rust/syn/src/generics.rs
|
|
+++ b/third_party/rust/syn/src/generics.rs
|
|
@@ -1,97 +1,108 @@
|
|
use super::*;
|
|
use crate::punctuated::{Iter, IterMut, Punctuated};
|
|
+#[cfg(all(feature = "printing", feature = "extra-traits"))]
|
|
+use std::fmt::{self, Debug};
|
|
+#[cfg(all(feature = "printing", feature = "extra-traits"))]
|
|
+use std::hash::{Hash, Hasher};
|
|
|
|
ast_struct! {
|
|
/// Lifetimes and type parameters attached to a declaration of a function,
|
|
/// enum, trait, etc.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- #[derive(Default)]
|
|
pub struct Generics {
|
|
pub lt_token: Option<Token![<]>,
|
|
pub params: Punctuated<GenericParam, Token![,]>,
|
|
pub gt_token: Option<Token![>]>,
|
|
pub where_clause: Option<WhereClause>,
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
|
|
/// `'a: 'b`, `const LEN: usize`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
- //
|
|
- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
- // blocked on https://github.com/rust-lang/rust/issues/62833
|
|
pub enum GenericParam {
|
|
/// A generic type parameter: `T: Into<String>`.
|
|
Type(TypeParam),
|
|
|
|
/// A lifetime definition: `'a: 'b + 'c + 'd`.
|
|
Lifetime(LifetimeDef),
|
|
|
|
/// A const generic parameter: `const LENGTH: usize`.
|
|
Const(ConstParam),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A generic type parameter: `T: Into<String>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeParam {
|
|
pub attrs: Vec<Attribute>,
|
|
pub ident: Ident,
|
|
pub colon_token: Option<Token![:]>,
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
pub eq_token: Option<Token![=]>,
|
|
pub default: Option<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A lifetime definition: `'a: 'b + 'c + 'd`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct LifetimeDef {
|
|
pub attrs: Vec<Attribute>,
|
|
pub lifetime: Lifetime,
|
|
pub colon_token: Option<Token![:]>,
|
|
pub bounds: Punctuated<Lifetime, Token![+]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A const generic parameter: `const LENGTH: usize`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct ConstParam {
|
|
pub attrs: Vec<Attribute>,
|
|
pub const_token: Token![const],
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub ty: Type,
|
|
pub eq_token: Option<Token![=]>,
|
|
pub default: Option<Expr>,
|
|
}
|
|
}
|
|
|
|
+impl Default for Generics {
|
|
+ fn default() -> Self {
|
|
+ Generics {
|
|
+ lt_token: None,
|
|
+ params: Punctuated::new(),
|
|
+ gt_token: None,
|
|
+ where_clause: None,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
impl Generics {
|
|
/// Returns an
|
|
/// <code
|
|
/// style="padding-right:0;">Iterator<Item = &</code><a
|
|
/// href="struct.TypeParam.html"><code
|
|
/// style="padding-left:0;padding-right:0;">TypeParam</code></a><code
|
|
/// style="padding-left:0;">></code>
|
|
/// over the type parameters in `self.params`.
|
|
@@ -275,100 +286,148 @@ impl<'a> Iterator for ConstParamsMut<'a>
|
|
} else {
|
|
self.next()
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Returned by `Generics::split_for_impl`.
|
|
///
|
|
-/// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature and the `"printing"` feature.*
|
|
#[cfg(feature = "printing")]
|
|
-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
pub struct ImplGenerics<'a>(&'a Generics);
|
|
|
|
/// Returned by `Generics::split_for_impl`.
|
|
///
|
|
-/// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature and the `"printing"` feature.*
|
|
#[cfg(feature = "printing")]
|
|
-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
pub struct TypeGenerics<'a>(&'a Generics);
|
|
|
|
/// Returned by `TypeGenerics::as_turbofish`.
|
|
///
|
|
-/// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature and the `"printing"` feature.*
|
|
#[cfg(feature = "printing")]
|
|
-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
pub struct Turbofish<'a>(&'a Generics);
|
|
|
|
#[cfg(feature = "printing")]
|
|
impl Generics {
|
|
/// Split a type's generics into the pieces required for impl'ing a trait
|
|
/// for that type.
|
|
///
|
|
/// ```
|
|
/// # use proc_macro2::{Span, Ident};
|
|
/// # use quote::quote;
|
|
/// #
|
|
- /// # fn main() {
|
|
- /// # let generics: syn::Generics = Default::default();
|
|
- /// # let name = Ident::new("MyType", Span::call_site());
|
|
+ /// # let generics: syn::Generics = Default::default();
|
|
+ /// # let name = Ident::new("MyType", Span::call_site());
|
|
/// #
|
|
/// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
|
|
/// quote! {
|
|
/// impl #impl_generics MyTrait for #name #ty_generics #where_clause {
|
|
/// // ...
|
|
/// }
|
|
/// }
|
|
- /// # ;
|
|
- /// # }
|
|
+ /// # ;
|
|
/// ```
|
|
///
|
|
- /// *This method is available if Syn is built with the `"derive"` or
|
|
+ /// *This method is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature and the `"printing"` feature.*
|
|
pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
|
|
(
|
|
ImplGenerics(self),
|
|
TypeGenerics(self),
|
|
self.where_clause.as_ref(),
|
|
)
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "printing")]
|
|
+macro_rules! generics_wrapper_impls {
|
|
+ ($ty:ident) => {
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl<'a> Clone for $ty<'a> {
|
|
+ fn clone(&self) -> Self {
|
|
+ $ty(self.0)
|
|
+ }
|
|
+ }
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ impl<'a> Debug for $ty<'a> {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ formatter
|
|
+ .debug_tuple(stringify!($ty))
|
|
+ .field(self.0)
|
|
+ .finish()
|
|
+ }
|
|
+ }
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ impl<'a> Eq for $ty<'a> {}
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ impl<'a> PartialEq for $ty<'a> {
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ self.0 == other.0
|
|
+ }
|
|
+ }
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ impl<'a> Hash for $ty<'a> {
|
|
+ fn hash<H: Hasher>(&self, state: &mut H) {
|
|
+ self.0.hash(state);
|
|
+ }
|
|
+ }
|
|
+ };
|
|
+}
|
|
+
|
|
+#[cfg(feature = "printing")]
|
|
+generics_wrapper_impls!(ImplGenerics);
|
|
+#[cfg(feature = "printing")]
|
|
+generics_wrapper_impls!(TypeGenerics);
|
|
+#[cfg(feature = "printing")]
|
|
+generics_wrapper_impls!(Turbofish);
|
|
+
|
|
+#[cfg(feature = "printing")]
|
|
impl<'a> TypeGenerics<'a> {
|
|
/// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
|
|
///
|
|
- /// *This method is available if Syn is built with the `"derive"` or
|
|
+ /// *This method is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature and the `"printing"` feature.*
|
|
pub fn as_turbofish(&self) -> Turbofish {
|
|
Turbofish(self.0)
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A set of bound lifetimes: `for<'a, 'b, 'c>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- #[derive(Default)]
|
|
pub struct BoundLifetimes {
|
|
pub for_token: Token![for],
|
|
pub lt_token: Token![<],
|
|
pub lifetimes: Punctuated<LifetimeDef, Token![,]>,
|
|
pub gt_token: Token![>],
|
|
}
|
|
}
|
|
|
|
+impl Default for BoundLifetimes {
|
|
+ fn default() -> Self {
|
|
+ BoundLifetimes {
|
|
+ for_token: Default::default(),
|
|
+ lt_token: Default::default(),
|
|
+ lifetimes: Punctuated::new(),
|
|
+ gt_token: Default::default(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
impl LifetimeDef {
|
|
pub fn new(lifetime: Lifetime) -> Self {
|
|
LifetimeDef {
|
|
attrs: Vec::new(),
|
|
lifetime,
|
|
colon_token: None,
|
|
bounds: Punctuated::new(),
|
|
}
|
|
@@ -386,122 +445,118 @@ impl From<Ident> for TypeParam {
|
|
default: None,
|
|
}
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// A trait or lifetime used as a bound on a type parameter.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub enum TypeParamBound {
|
|
Trait(TraitBound),
|
|
Lifetime(Lifetime),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A trait used as a bound on a type parameter.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct TraitBound {
|
|
pub paren_token: Option<token::Paren>,
|
|
pub modifier: TraitBoundModifier,
|
|
/// The `for<'a>` in `for<'a> Foo<&'a T>`
|
|
pub lifetimes: Option<BoundLifetimes>,
|
|
/// The `Foo<&'a T>` in `for<'a> Foo<&'a T>`
|
|
pub path: Path,
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// A modifier on a trait bound, currently only used for the `?` in
|
|
/// `?Sized`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy))]
|
|
pub enum TraitBoundModifier {
|
|
None,
|
|
Maybe(Token![?]),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A `where` clause in a definition: `where T: Deserialize<'de>, D:
|
|
/// 'static`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct WhereClause {
|
|
pub where_token: Token![where],
|
|
pub predicates: Punctuated<WherePredicate, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// A single predicate in a `where` clause: `T: Deserialize<'de>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
- //
|
|
- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
- // blocked on https://github.com/rust-lang/rust/issues/62833
|
|
pub enum WherePredicate {
|
|
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
|
|
Type(PredicateType),
|
|
|
|
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
|
|
Lifetime(PredicateLifetime),
|
|
|
|
/// An equality predicate in a `where` clause (unsupported).
|
|
Eq(PredicateEq),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct PredicateType {
|
|
/// Any lifetimes from a `for` binding
|
|
pub lifetimes: Option<BoundLifetimes>,
|
|
/// The type being bounded
|
|
pub bounded_ty: Type,
|
|
pub colon_token: Token![:],
|
|
/// Trait and lifetime bounds (`Clone+Send+'static`)
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct PredicateLifetime {
|
|
pub lifetime: Lifetime,
|
|
pub colon_token: Token![:],
|
|
pub bounds: Punctuated<Lifetime, Token![+]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An equality predicate in a `where` clause (unsupported).
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct PredicateEq {
|
|
pub lhs_ty: Type,
|
|
pub eq_token: Token![=],
|
|
pub rhs_ty: Type,
|
|
}
|
|
}
|
|
|
|
@@ -516,38 +571,36 @@ pub mod parsing {
|
|
if !input.peek(Token![<]) {
|
|
return Ok(Generics::default());
|
|
}
|
|
|
|
let lt_token: Token![<] = input.parse()?;
|
|
|
|
let mut params = Punctuated::new();
|
|
let mut allow_lifetime_param = true;
|
|
- let mut allow_type_param = true;
|
|
loop {
|
|
if input.peek(Token![>]) {
|
|
break;
|
|
}
|
|
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
let lookahead = input.lookahead1();
|
|
if allow_lifetime_param && lookahead.peek(Lifetime) {
|
|
params.push_value(GenericParam::Lifetime(LifetimeDef {
|
|
attrs,
|
|
..input.parse()?
|
|
}));
|
|
- } else if allow_type_param && lookahead.peek(Ident) {
|
|
+ } else if lookahead.peek(Ident) {
|
|
allow_lifetime_param = false;
|
|
params.push_value(GenericParam::Type(TypeParam {
|
|
attrs,
|
|
..input.parse()?
|
|
}));
|
|
} else if lookahead.peek(Token![const]) {
|
|
allow_lifetime_param = false;
|
|
- allow_type_param = false;
|
|
params.push_value(GenericParam::Const(ConstParam {
|
|
attrs,
|
|
..input.parse()?
|
|
}));
|
|
} else {
|
|
return Err(lookahead.error());
|
|
}
|
|
|
|
@@ -660,67 +713,63 @@ pub mod parsing {
|
|
} else {
|
|
Ok(None)
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeParam {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- let has_colon;
|
|
- let has_default;
|
|
- Ok(TypeParam {
|
|
- attrs: input.call(Attribute::parse_outer)?,
|
|
- ident: input.parse()?,
|
|
- colon_token: {
|
|
- if input.peek(Token![:]) {
|
|
- has_colon = true;
|
|
- Some(input.parse()?)
|
|
- } else {
|
|
- has_colon = false;
|
|
- None
|
|
+ let attrs = input.call(Attribute::parse_outer)?;
|
|
+ let ident: Ident = input.parse()?;
|
|
+ let colon_token: Option<Token![:]> = input.parse()?;
|
|
+
|
|
+ let begin_bound = input.fork();
|
|
+ let mut is_maybe_const = false;
|
|
+ let mut bounds = Punctuated::new();
|
|
+ if colon_token.is_some() {
|
|
+ loop {
|
|
+ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
|
|
+ break;
|
|
+ }
|
|
+ if input.peek(Token![?]) && input.peek2(Token![const]) {
|
|
+ input.parse::<Token![?]>()?;
|
|
+ input.parse::<Token![const]>()?;
|
|
+ is_maybe_const = true;
|
|
+ }
|
|
+ let value: TypeParamBound = input.parse()?;
|
|
+ bounds.push_value(value);
|
|
+ if !input.peek(Token![+]) {
|
|
+ break;
|
|
}
|
|
- },
|
|
- bounds: {
|
|
- let mut bounds = Punctuated::new();
|
|
- if has_colon {
|
|
- loop {
|
|
- if input.peek(Token![,])
|
|
- || input.peek(Token![>])
|
|
- || input.peek(Token![=])
|
|
- {
|
|
- break;
|
|
- }
|
|
- let value = input.parse()?;
|
|
- bounds.push_value(value);
|
|
- if !input.peek(Token![+]) {
|
|
- break;
|
|
- }
|
|
- let punct = input.parse()?;
|
|
- bounds.push_punct(punct);
|
|
- }
|
|
- }
|
|
- bounds
|
|
- },
|
|
- eq_token: {
|
|
- if input.peek(Token![=]) {
|
|
- has_default = true;
|
|
- Some(input.parse()?)
|
|
- } else {
|
|
- has_default = false;
|
|
- None
|
|
- }
|
|
- },
|
|
- default: {
|
|
- if has_default {
|
|
- Some(input.parse()?)
|
|
- } else {
|
|
- None
|
|
- }
|
|
- },
|
|
+ let punct: Token![+] = input.parse()?;
|
|
+ bounds.push_punct(punct);
|
|
+ }
|
|
+ }
|
|
+
|
|
+ let mut eq_token: Option<Token![=]> = input.parse()?;
|
|
+ let mut default = if eq_token.is_some() {
|
|
+ Some(input.parse::<Type>()?)
|
|
+ } else {
|
|
+ None
|
|
+ };
|
|
+
|
|
+ if is_maybe_const {
|
|
+ bounds.clear();
|
|
+ eq_token = None;
|
|
+ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
|
|
+ }
|
|
+
|
|
+ Ok(TypeParam {
|
|
+ attrs,
|
|
+ ident,
|
|
+ colon_token,
|
|
+ bounds,
|
|
+ eq_token,
|
|
+ default,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeParamBound {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
if input.peek(Lifetime) {
|
|
return input.parse().map(TypeParamBound::Lifetime);
|
|
@@ -893,16 +942,18 @@ pub mod parsing {
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "printing")]
|
|
mod printing {
|
|
use super::*;
|
|
|
|
use proc_macro2::TokenStream;
|
|
+ #[cfg(feature = "full")]
|
|
+ use proc_macro2::TokenTree;
|
|
use quote::{ToTokens, TokenStreamExt};
|
|
|
|
use crate::attr::FilterAttrs;
|
|
use crate::print::TokensOrDefault;
|
|
|
|
impl ToTokens for Generics {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
if self.params.is_empty() {
|
|
@@ -1075,19 +1126,35 @@ mod printing {
|
|
impl ToTokens for TypeParam {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
tokens.append_all(self.attrs.outer());
|
|
self.ident.to_tokens(tokens);
|
|
if !self.bounds.is_empty() {
|
|
TokensOrDefault(&self.colon_token).to_tokens(tokens);
|
|
self.bounds.to_tokens(tokens);
|
|
}
|
|
- if self.default.is_some() {
|
|
+ if let Some(default) = &self.default {
|
|
+ #[cfg(feature = "full")]
|
|
+ {
|
|
+ if self.eq_token.is_none() {
|
|
+ if let Type::Verbatim(default) = default {
|
|
+ let mut iter = default.clone().into_iter();
|
|
+ match (iter.next(), iter.next()) {
|
|
+ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
|
|
+ if q.as_char() == '?' && c == "const" =>
|
|
+ {
|
|
+ return default.to_tokens(tokens);
|
|
+ }
|
|
+ _ => {}
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+ }
|
|
TokensOrDefault(&self.eq_token).to_tokens(tokens);
|
|
- self.default.to_tokens(tokens);
|
|
+ default.to_tokens(tokens);
|
|
}
|
|
}
|
|
}
|
|
|
|
impl ToTokens for TraitBound {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
let to_tokens = |tokens: &mut TokenStream| {
|
|
self.modifier.to_tokens(tokens);
|
|
@@ -1112,19 +1179,19 @@ mod printing {
|
|
|
|
impl ToTokens for ConstParam {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
tokens.append_all(self.attrs.outer());
|
|
self.const_token.to_tokens(tokens);
|
|
self.ident.to_tokens(tokens);
|
|
self.colon_token.to_tokens(tokens);
|
|
self.ty.to_tokens(tokens);
|
|
- if self.default.is_some() {
|
|
+ if let Some(default) = &self.default {
|
|
TokensOrDefault(&self.eq_token).to_tokens(tokens);
|
|
- self.default.to_tokens(tokens);
|
|
+ default.to_tokens(tokens);
|
|
}
|
|
}
|
|
}
|
|
|
|
impl ToTokens for WhereClause {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
if !self.predicates.is_empty() {
|
|
self.where_token.to_tokens(tokens);
|
|
diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
|
|
--- a/third_party/rust/syn/src/item.rs
|
|
+++ b/third_party/rust/syn/src/item.rs
|
|
@@ -1,32 +1,30 @@
|
|
use super::*;
|
|
-use crate::derive::{Data, DeriveInput};
|
|
+use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
|
|
use crate::punctuated::Punctuated;
|
|
use proc_macro2::TokenStream;
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use crate::tt::TokenStreamHelper;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use std::hash::{Hash, Hasher};
|
|
+#[cfg(feature = "parsing")]
|
|
+use std::mem;
|
|
|
|
ast_enum_of_structs! {
|
|
/// Things that can appear directly inside of a module or scope.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum Item #manual_extra_traits {
|
|
+ pub enum Item {
|
|
/// A constant item: `const MAX: u16 = 65535`.
|
|
Const(ItemConst),
|
|
|
|
/// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
|
|
Enum(ItemEnum),
|
|
|
|
/// An `extern crate` item: `extern crate serde`.
|
|
ExternCrate(ItemExternCrate),
|
|
@@ -78,90 +76,90 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A constant item: `const MAX: u16 = 65535`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemConst {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub const_token: Token![const],
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub ty: Box<Type>,
|
|
pub eq_token: Token![=],
|
|
pub expr: Box<Expr>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemEnum {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub enum_token: Token![enum],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub brace_token: token::Brace,
|
|
pub variants: Punctuated<Variant, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An `extern crate` item: `extern crate serde`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemExternCrate {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub extern_token: Token![extern],
|
|
pub crate_token: Token![crate],
|
|
pub ident: Ident,
|
|
pub rename: Option<(Token![as], Ident)>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A free-standing function: `fn process(n: usize) -> Result<()> { ...
|
|
/// }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemFn {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub sig: Signature,
|
|
pub block: Box<Block>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A block of foreign items: `extern "C" { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemForeignMod {
|
|
pub attrs: Vec<Attribute>,
|
|
pub abi: Abi,
|
|
pub brace_token: token::Brace,
|
|
pub items: Vec<ForeignItem>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An impl block providing trait or associated items: `impl<A> Trait
|
|
/// for Data<A> { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemImpl {
|
|
pub attrs: Vec<Attribute>,
|
|
pub defaultness: Option<Token![default]>,
|
|
pub unsafety: Option<Token![unsafe]>,
|
|
pub impl_token: Token![impl],
|
|
pub generics: Generics,
|
|
/// Trait this impl implements.
|
|
pub trait_: Option<(Option<Token![!]>, Path, Token![for])>,
|
|
@@ -170,57 +168,57 @@ ast_struct! {
|
|
pub brace_token: token::Brace,
|
|
pub items: Vec<ImplItem>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro invocation, which includes `macro_rules!` definitions.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemMacro {
|
|
pub attrs: Vec<Attribute>,
|
|
/// The `example` in `macro_rules! example { ... }`.
|
|
pub ident: Option<Ident>,
|
|
pub mac: Macro,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A 2.0-style declarative macro introduced by the `macro` keyword.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
- pub struct ItemMacro2 #manual_extra_traits {
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
+ pub struct ItemMacro2 {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub macro_token: Token![macro],
|
|
pub ident: Ident,
|
|
pub rules: TokenStream,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A module or module declaration: `mod m` or `mod m { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemMod {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub mod_token: Token![mod],
|
|
pub ident: Ident,
|
|
pub content: Option<(token::Brace, Vec<Item>)>,
|
|
pub semi: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A static item: `static BIKE: Shed = Shed(42)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemStatic {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub static_token: Token![static],
|
|
pub mutability: Option<Token![mut]>,
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub ty: Box<Type>,
|
|
@@ -228,32 +226,32 @@ ast_struct! {
|
|
pub expr: Box<Expr>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A struct definition: `struct Foo<A> { x: A }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemStruct {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub struct_token: Token![struct],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub fields: Fields,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A trait definition: `pub trait Iterator { ... }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemTrait {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub unsafety: Option<Token![unsafe]>,
|
|
pub auto_token: Option<Token![auto]>,
|
|
pub trait_token: Token![trait],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
@@ -262,212 +260,99 @@ ast_struct! {
|
|
pub brace_token: token::Brace,
|
|
pub items: Vec<TraitItem>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemTraitAlias {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub trait_token: Token![trait],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub eq_token: Token![=],
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemType {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub type_token: Token![type],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub eq_token: Token![=],
|
|
pub ty: Box<Type>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A union definition: `union Foo<A, B> { x: A, y: B }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemUnion {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub union_token: Token![union],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub fields: FieldsNamed,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A use declaration: `use std::collections::HashMap`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ItemUse {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub use_token: Token![use],
|
|
pub leading_colon: Option<Token![::]>,
|
|
pub tree: UseTree,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Item {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Item {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (Item::Const(this), Item::Const(other)) => this == other,
|
|
- (Item::Enum(this), Item::Enum(other)) => this == other,
|
|
- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
|
|
- (Item::Fn(this), Item::Fn(other)) => this == other,
|
|
- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
|
|
- (Item::Impl(this), Item::Impl(other)) => this == other,
|
|
- (Item::Macro(this), Item::Macro(other)) => this == other,
|
|
- (Item::Macro2(this), Item::Macro2(other)) => this == other,
|
|
- (Item::Mod(this), Item::Mod(other)) => this == other,
|
|
- (Item::Static(this), Item::Static(other)) => this == other,
|
|
- (Item::Struct(this), Item::Struct(other)) => this == other,
|
|
- (Item::Trait(this), Item::Trait(other)) => this == other,
|
|
- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
|
|
- (Item::Type(this), Item::Type(other)) => this == other,
|
|
- (Item::Union(this), Item::Union(other)) => this == other,
|
|
- (Item::Use(this), Item::Use(other)) => this == other,
|
|
- (Item::Verbatim(this), Item::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Item {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
+impl Item {
|
|
+ #[cfg(feature = "parsing")]
|
|
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
|
|
match self {
|
|
- Item::Const(item) => {
|
|
- state.write_u8(0);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Enum(item) => {
|
|
- state.write_u8(1);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::ExternCrate(item) => {
|
|
- state.write_u8(2);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Fn(item) => {
|
|
- state.write_u8(3);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::ForeignMod(item) => {
|
|
- state.write_u8(4);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Impl(item) => {
|
|
- state.write_u8(5);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Macro(item) => {
|
|
- state.write_u8(6);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Macro2(item) => {
|
|
- state.write_u8(7);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Mod(item) => {
|
|
- state.write_u8(8);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Static(item) => {
|
|
- state.write_u8(9);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Struct(item) => {
|
|
- state.write_u8(10);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Trait(item) => {
|
|
- state.write_u8(11);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::TraitAlias(item) => {
|
|
- state.write_u8(12);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Type(item) => {
|
|
- state.write_u8(13);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Union(item) => {
|
|
- state.write_u8(14);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Use(item) => {
|
|
- state.write_u8(15);
|
|
- item.hash(state);
|
|
- }
|
|
- Item::Verbatim(item) => {
|
|
- state.write_u8(16);
|
|
- TokenStreamHelper(item).hash(state);
|
|
- }
|
|
+ Item::ExternCrate(ItemExternCrate { attrs, .. })
|
|
+ | Item::Use(ItemUse { attrs, .. })
|
|
+ | Item::Static(ItemStatic { attrs, .. })
|
|
+ | Item::Const(ItemConst { attrs, .. })
|
|
+ | Item::Fn(ItemFn { attrs, .. })
|
|
+ | Item::Mod(ItemMod { attrs, .. })
|
|
+ | Item::ForeignMod(ItemForeignMod { attrs, .. })
|
|
+ | Item::Type(ItemType { attrs, .. })
|
|
+ | Item::Struct(ItemStruct { attrs, .. })
|
|
+ | Item::Enum(ItemEnum { attrs, .. })
|
|
+ | Item::Union(ItemUnion { attrs, .. })
|
|
+ | Item::Trait(ItemTrait { attrs, .. })
|
|
+ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
|
|
+ | Item::Impl(ItemImpl { attrs, .. })
|
|
+ | Item::Macro(ItemMacro { attrs, .. })
|
|
+ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
|
|
+ Item::Verbatim(_) => Vec::new(),
|
|
Item::__Nonexhaustive => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for ItemMacro2 {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for ItemMacro2 {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- self.attrs == other.attrs
|
|
- && self.vis == other.vis
|
|
- && self.macro_token == other.macro_token
|
|
- && self.ident == other.ident
|
|
- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for ItemMacro2 {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- self.attrs.hash(state);
|
|
- self.vis.hash(state);
|
|
- self.macro_token.hash(state);
|
|
- self.ident.hash(state);
|
|
- TokenStreamHelper(&self.rules).hash(state);
|
|
- }
|
|
-}
|
|
-
|
|
impl From<DeriveInput> for Item {
|
|
fn from(input: DeriveInput) -> Item {
|
|
match input.data {
|
|
Data::Struct(data) => Item::Struct(ItemStruct {
|
|
attrs: input.attrs,
|
|
vis: input.vis,
|
|
struct_token: data.struct_token,
|
|
ident: input.ident,
|
|
@@ -491,20 +376,67 @@ impl From<DeriveInput> for Item {
|
|
ident: input.ident,
|
|
generics: input.generics,
|
|
fields: data.fields,
|
|
}),
|
|
}
|
|
}
|
|
}
|
|
|
|
+impl From<ItemStruct> for DeriveInput {
|
|
+ fn from(input: ItemStruct) -> DeriveInput {
|
|
+ DeriveInput {
|
|
+ attrs: input.attrs,
|
|
+ vis: input.vis,
|
|
+ ident: input.ident,
|
|
+ generics: input.generics,
|
|
+ data: Data::Struct(DataStruct {
|
|
+ struct_token: input.struct_token,
|
|
+ fields: input.fields,
|
|
+ semi_token: input.semi_token,
|
|
+ }),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+impl From<ItemEnum> for DeriveInput {
|
|
+ fn from(input: ItemEnum) -> DeriveInput {
|
|
+ DeriveInput {
|
|
+ attrs: input.attrs,
|
|
+ vis: input.vis,
|
|
+ ident: input.ident,
|
|
+ generics: input.generics,
|
|
+ data: Data::Enum(DataEnum {
|
|
+ enum_token: input.enum_token,
|
|
+ brace_token: input.brace_token,
|
|
+ variants: input.variants,
|
|
+ }),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+impl From<ItemUnion> for DeriveInput {
|
|
+ fn from(input: ItemUnion) -> DeriveInput {
|
|
+ DeriveInput {
|
|
+ attrs: input.attrs,
|
|
+ vis: input.vis,
|
|
+ ident: input.ident,
|
|
+ generics: input.generics,
|
|
+ data: Data::Union(DataUnion {
|
|
+ union_token: input.union_token,
|
|
+ fields: input.fields,
|
|
+ }),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
ast_enum_of_structs! {
|
|
/// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
@@ -525,77 +457,77 @@ ast_enum_of_structs! {
|
|
/// A braced group of imports in a `use` item: `{A, B, C}`.
|
|
Group(UseGroup),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A path prefix of imports in a `use` item: `std::...`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct UsePath {
|
|
pub ident: Ident,
|
|
pub colon2_token: Token![::],
|
|
pub tree: Box<UseTree>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An identifier imported by a `use` item: `HashMap`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct UseName {
|
|
pub ident: Ident,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An renamed identifier imported by a `use` item: `HashMap as Map`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct UseRename {
|
|
pub ident: Ident,
|
|
pub as_token: Token![as],
|
|
pub rename: Ident,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A glob import in a `use` item: `*`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct UseGlob {
|
|
pub star_token: Token![*],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A braced group of imports in a `use` item: `{A, B, C}`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct UseGroup {
|
|
pub brace_token: token::Brace,
|
|
pub items: Punctuated<UseTree, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_enum_of_structs! {
|
|
/// An item within an `extern` block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum ForeignItem #manual_extra_traits {
|
|
+ pub enum ForeignItem {
|
|
/// A foreign function in an `extern` block.
|
|
Fn(ForeignItemFn),
|
|
|
|
/// A foreign static item in an `extern` block: `static ext: u8`.
|
|
Static(ForeignItemStatic),
|
|
|
|
/// A foreign type in an `extern` block: `type void`.
|
|
Type(ForeignItemType),
|
|
@@ -609,130 +541,79 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A foreign function in an `extern` block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ForeignItemFn {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub sig: Signature,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A foreign static item in an `extern` block: `static ext: u8`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ForeignItemStatic {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub static_token: Token![static],
|
|
pub mutability: Option<Token![mut]>,
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub ty: Box<Type>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A foreign type in an `extern` block: `type void`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ForeignItemType {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub type_token: Token![type],
|
|
pub ident: Ident,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro invocation within an extern block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ForeignItemMacro {
|
|
pub attrs: Vec<Attribute>,
|
|
pub mac: Macro,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for ForeignItem {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for ForeignItem {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
|
|
- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
|
|
- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
|
|
- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
|
|
- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for ForeignItem {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- ForeignItem::Fn(item) => {
|
|
- state.write_u8(0);
|
|
- item.hash(state);
|
|
- }
|
|
- ForeignItem::Static(item) => {
|
|
- state.write_u8(1);
|
|
- item.hash(state);
|
|
- }
|
|
- ForeignItem::Type(item) => {
|
|
- state.write_u8(2);
|
|
- item.hash(state);
|
|
- }
|
|
- ForeignItem::Macro(item) => {
|
|
- state.write_u8(3);
|
|
- item.hash(state);
|
|
- }
|
|
- ForeignItem::Verbatim(item) => {
|
|
- state.write_u8(4);
|
|
- TokenStreamHelper(item).hash(state);
|
|
- }
|
|
- ForeignItem::__Nonexhaustive => unreachable!(),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
ast_enum_of_structs! {
|
|
/// An item declaration within the definition of a trait.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum TraitItem #manual_extra_traits {
|
|
+ pub enum TraitItem {
|
|
/// An associated constant within the definition of a trait.
|
|
Const(TraitItemConst),
|
|
|
|
/// A trait method within the definition of a trait.
|
|
Method(TraitItemMethod),
|
|
|
|
/// An associated type within the definition of a trait.
|
|
Type(TraitItemType),
|
|
@@ -746,132 +627,81 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An associated constant within the definition of a trait.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct TraitItemConst {
|
|
pub attrs: Vec<Attribute>,
|
|
pub const_token: Token![const],
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub ty: Type,
|
|
pub default: Option<(Token![=], Expr)>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A trait method within the definition of a trait.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct TraitItemMethod {
|
|
pub attrs: Vec<Attribute>,
|
|
pub sig: Signature,
|
|
pub default: Option<Block>,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An associated type within the definition of a trait.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct TraitItemType {
|
|
pub attrs: Vec<Attribute>,
|
|
pub type_token: Token![type],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub colon_token: Option<Token![:]>,
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
pub default: Option<(Token![=], Type)>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro invocation within the definition of a trait.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct TraitItemMacro {
|
|
pub attrs: Vec<Attribute>,
|
|
pub mac: Macro,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for TraitItem {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for TraitItem {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
|
|
- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
|
|
- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
|
|
- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
|
|
- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for TraitItem {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- TraitItem::Const(item) => {
|
|
- state.write_u8(0);
|
|
- item.hash(state);
|
|
- }
|
|
- TraitItem::Method(item) => {
|
|
- state.write_u8(1);
|
|
- item.hash(state);
|
|
- }
|
|
- TraitItem::Type(item) => {
|
|
- state.write_u8(2);
|
|
- item.hash(state);
|
|
- }
|
|
- TraitItem::Macro(item) => {
|
|
- state.write_u8(3);
|
|
- item.hash(state);
|
|
- }
|
|
- TraitItem::Verbatim(item) => {
|
|
- state.write_u8(4);
|
|
- TokenStreamHelper(item).hash(state);
|
|
- }
|
|
- TraitItem::__Nonexhaustive => unreachable!(),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
ast_enum_of_structs! {
|
|
/// An item within an impl block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum ImplItem #manual_extra_traits {
|
|
+ pub enum ImplItem {
|
|
/// An associated constant within an impl block.
|
|
Const(ImplItemConst),
|
|
|
|
/// A method within an impl block.
|
|
Method(ImplItemMethod),
|
|
|
|
/// An associated type within an impl block.
|
|
Type(ImplItemType),
|
|
@@ -885,17 +715,17 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An associated constant within an impl block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ImplItemConst {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub defaultness: Option<Token![default]>,
|
|
pub const_token: Token![const],
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub ty: Type,
|
|
@@ -903,144 +733,117 @@ ast_struct! {
|
|
pub expr: Expr,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A method within an impl block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ImplItemMethod {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub defaultness: Option<Token![default]>,
|
|
pub sig: Signature,
|
|
pub block: Block,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An associated type within an impl block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ImplItemType {
|
|
pub attrs: Vec<Attribute>,
|
|
pub vis: Visibility,
|
|
pub defaultness: Option<Token![default]>,
|
|
pub type_token: Token![type],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub eq_token: Token![=],
|
|
pub ty: Type,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro invocation within an impl block.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct ImplItemMacro {
|
|
pub attrs: Vec<Attribute>,
|
|
pub mac: Macro,
|
|
pub semi_token: Option<Token![;]>,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for ImplItem {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for ImplItem {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
|
|
- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
|
|
- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
|
|
- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
|
|
- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for ImplItem {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- ImplItem::Const(item) => {
|
|
- state.write_u8(0);
|
|
- item.hash(state);
|
|
- }
|
|
- ImplItem::Method(item) => {
|
|
- state.write_u8(1);
|
|
- item.hash(state);
|
|
- }
|
|
- ImplItem::Type(item) => {
|
|
- state.write_u8(2);
|
|
- item.hash(state);
|
|
- }
|
|
- ImplItem::Macro(item) => {
|
|
- state.write_u8(3);
|
|
- item.hash(state);
|
|
- }
|
|
- ImplItem::Verbatim(item) => {
|
|
- state.write_u8(4);
|
|
- TokenStreamHelper(item).hash(state);
|
|
- }
|
|
- ImplItem::__Nonexhaustive => unreachable!(),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
ast_struct! {
|
|
/// A function signature in a trait or implementation: `unsafe fn
|
|
/// initialize(&self)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct Signature {
|
|
pub constness: Option<Token![const]>,
|
|
pub asyncness: Option<Token![async]>,
|
|
pub unsafety: Option<Token![unsafe]>,
|
|
pub abi: Option<Abi>,
|
|
pub fn_token: Token![fn],
|
|
pub ident: Ident,
|
|
pub generics: Generics,
|
|
pub paren_token: token::Paren,
|
|
pub inputs: Punctuated<FnArg, Token![,]>,
|
|
pub variadic: Option<Variadic>,
|
|
pub output: ReturnType,
|
|
}
|
|
}
|
|
|
|
+impl Signature {
|
|
+ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
|
|
+ pub fn receiver(&self) -> Option<&FnArg> {
|
|
+ let arg = self.inputs.first()?;
|
|
+ match arg {
|
|
+ FnArg::Receiver(_) => Some(arg),
|
|
+ FnArg::Typed(PatType { pat, .. }) => {
|
|
+ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
|
|
+ if ident == "self" {
|
|
+ return Some(arg);
|
|
+ }
|
|
+ }
|
|
+ None
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
ast_enum_of_structs! {
|
|
/// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub enum FnArg {
|
|
/// The `self` argument of an associated method, whether taken by value
|
|
/// or by reference.
|
|
+ ///
|
|
+ /// Note that `self` receivers with a specified type, such as `self:
|
|
+ /// Box<Self>`, are parsed as a `FnArg::Typed`.
|
|
Receiver(Receiver),
|
|
|
|
/// A function argument accepted by pattern and type.
|
|
Typed(PatType),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// The `self` argument of an associated method, whether taken by value
|
|
/// or by reference.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// Note that `self` receivers with a specified type, such as `self:
|
|
+ /// Box<Self>`, are parsed as a `FnArg::Typed`.
|
|
+ ///
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct Receiver {
|
|
pub attrs: Vec<Attribute>,
|
|
pub reference: Option<(Token![&], Option<Lifetime>)>,
|
|
pub mutability: Option<Token![mut]>,
|
|
pub self_token: Token![self],
|
|
}
|
|
}
|
|
|
|
@@ -1051,149 +854,233 @@ impl Receiver {
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parsing {
|
|
use super::*;
|
|
|
|
use crate::ext::IdentExt;
|
|
use crate::parse::discouraged::Speculative;
|
|
- use crate::parse::{Parse, ParseStream, Result};
|
|
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
|
|
+ use crate::token::Brace;
|
|
use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
|
|
use std::iter::{self, FromIterator};
|
|
|
|
crate::custom_keyword!(existential);
|
|
|
|
impl Parse for Item {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let begin = input.fork();
|
|
let mut attrs = input.call(Attribute::parse_outer)?;
|
|
let ahead = input.fork();
|
|
let vis: Visibility = ahead.parse()?;
|
|
|
|
let lookahead = ahead.lookahead1();
|
|
- let mut item = if lookahead.peek(Token![extern]) {
|
|
+ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
|
|
+ let vis: Visibility = input.parse()?;
|
|
+ let sig = parse_signature(input)?;
|
|
+ if input.peek(Token![;]) {
|
|
+ input.parse::<Token![;]>()?;
|
|
+ Ok(Item::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
|
|
+ }
|
|
+ } else if lookahead.peek(Token![extern]) {
|
|
ahead.parse::<Token![extern]>()?;
|
|
let lookahead = ahead.lookahead1();
|
|
if lookahead.peek(Token![crate]) {
|
|
input.parse().map(Item::ExternCrate)
|
|
- } else if lookahead.peek(Token![fn]) {
|
|
- input.parse().map(Item::Fn)
|
|
} else if lookahead.peek(token::Brace) {
|
|
input.parse().map(Item::ForeignMod)
|
|
} else if lookahead.peek(LitStr) {
|
|
ahead.parse::<LitStr>()?;
|
|
let lookahead = ahead.lookahead1();
|
|
if lookahead.peek(token::Brace) {
|
|
input.parse().map(Item::ForeignMod)
|
|
- } else if lookahead.peek(Token![fn]) {
|
|
- input.parse().map(Item::Fn)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
} else if lookahead.peek(Token![use]) {
|
|
input.parse().map(Item::Use)
|
|
} else if lookahead.peek(Token![static]) {
|
|
- input.parse().map(Item::Static)
|
|
+ let vis = input.parse()?;
|
|
+ let static_token = input.parse()?;
|
|
+ let mutability = input.parse()?;
|
|
+ let ident = input.parse()?;
|
|
+ let colon_token = input.parse()?;
|
|
+ let ty = input.parse()?;
|
|
+ if input.peek(Token![;]) {
|
|
+ input.parse::<Token![;]>()?;
|
|
+ Ok(Item::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ Ok(Item::Static(ItemStatic {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ static_token,
|
|
+ mutability,
|
|
+ ident,
|
|
+ colon_token,
|
|
+ ty,
|
|
+ eq_token: input.parse()?,
|
|
+ expr: input.parse()?,
|
|
+ semi_token: input.parse()?,
|
|
+ }))
|
|
+ }
|
|
} else if lookahead.peek(Token![const]) {
|
|
ahead.parse::<Token![const]>()?;
|
|
let lookahead = ahead.lookahead1();
|
|
if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
|
|
- input.parse().map(Item::Const)
|
|
- } else if lookahead.peek(Token![unsafe])
|
|
- || lookahead.peek(Token![async])
|
|
- || lookahead.peek(Token![extern])
|
|
- || lookahead.peek(Token![fn])
|
|
- {
|
|
- input.parse().map(Item::Fn)
|
|
+ let vis = input.parse()?;
|
|
+ let const_token = input.parse()?;
|
|
+ let ident = {
|
|
+ let lookahead = input.lookahead1();
|
|
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
|
|
+ input.call(Ident::parse_any)?
|
|
+ } else {
|
|
+ return Err(lookahead.error());
|
|
+ }
|
|
+ };
|
|
+ let colon_token = input.parse()?;
|
|
+ let ty = input.parse()?;
|
|
+ if input.peek(Token![;]) {
|
|
+ input.parse::<Token![;]>()?;
|
|
+ Ok(Item::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ Ok(Item::Const(ItemConst {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ const_token,
|
|
+ ident,
|
|
+ colon_token,
|
|
+ ty,
|
|
+ eq_token: input.parse()?,
|
|
+ expr: input.parse()?,
|
|
+ semi_token: input.parse()?,
|
|
+ }))
|
|
+ }
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
} else if lookahead.peek(Token![unsafe]) {
|
|
ahead.parse::<Token![unsafe]>()?;
|
|
let lookahead = ahead.lookahead1();
|
|
if lookahead.peek(Token![trait])
|
|
|| lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
|
|
{
|
|
input.parse().map(Item::Trait)
|
|
} else if lookahead.peek(Token![impl]) {
|
|
- input.parse().map(Item::Impl)
|
|
- } else if lookahead.peek(Token![async])
|
|
- || lookahead.peek(Token![extern])
|
|
- || lookahead.peek(Token![fn])
|
|
- {
|
|
- input.parse().map(Item::Fn)
|
|
+ let allow_const_impl = true;
|
|
+ if let Some(item) = parse_impl(input, allow_const_impl)? {
|
|
+ Ok(Item::Impl(item))
|
|
+ } else {
|
|
+ Ok(Item::Verbatim(verbatim::between(begin, input)))
|
|
+ }
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
|
|
- input.parse().map(Item::Fn)
|
|
} else if lookahead.peek(Token![mod]) {
|
|
input.parse().map(Item::Mod)
|
|
} else if lookahead.peek(Token![type]) {
|
|
- input.parse().map(Item::Type)
|
|
+ parse_item_type(begin, input)
|
|
} else if lookahead.peek(existential) {
|
|
input.call(item_existential).map(Item::Verbatim)
|
|
} else if lookahead.peek(Token![struct]) {
|
|
input.parse().map(Item::Struct)
|
|
} else if lookahead.peek(Token![enum]) {
|
|
input.parse().map(Item::Enum)
|
|
} else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
|
|
input.parse().map(Item::Union)
|
|
} else if lookahead.peek(Token![trait]) {
|
|
input.call(parse_trait_or_trait_alias)
|
|
} else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
|
|
input.parse().map(Item::Trait)
|
|
} else if lookahead.peek(Token![impl])
|
|
|| lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
|
|
{
|
|
- input.parse().map(Item::Impl)
|
|
+ let allow_const_impl = true;
|
|
+ if let Some(item) = parse_impl(input, allow_const_impl)? {
|
|
+ Ok(Item::Impl(item))
|
|
+ } else {
|
|
+ Ok(Item::Verbatim(verbatim::between(begin, input)))
|
|
+ }
|
|
} else if lookahead.peek(Token![macro]) {
|
|
input.parse().map(Item::Macro2)
|
|
} else if vis.is_inherited()
|
|
&& (lookahead.peek(Ident)
|
|
|| lookahead.peek(Token![self])
|
|
|| lookahead.peek(Token![super])
|
|
- || lookahead.peek(Token![extern])
|
|
|| lookahead.peek(Token![crate])
|
|
|| lookahead.peek(Token![::]))
|
|
{
|
|
input.parse().map(Item::Macro)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}?;
|
|
|
|
- {
|
|
- let item_attrs = match &mut item {
|
|
- Item::ExternCrate(item) => &mut item.attrs,
|
|
- Item::Use(item) => &mut item.attrs,
|
|
- Item::Static(item) => &mut item.attrs,
|
|
- Item::Const(item) => &mut item.attrs,
|
|
- Item::Fn(item) => &mut item.attrs,
|
|
- Item::Mod(item) => &mut item.attrs,
|
|
- Item::ForeignMod(item) => &mut item.attrs,
|
|
- Item::Type(item) => &mut item.attrs,
|
|
- Item::Struct(item) => &mut item.attrs,
|
|
- Item::Enum(item) => &mut item.attrs,
|
|
- Item::Union(item) => &mut item.attrs,
|
|
- Item::Trait(item) => &mut item.attrs,
|
|
- Item::TraitAlias(item) => &mut item.attrs,
|
|
- Item::Impl(item) => &mut item.attrs,
|
|
- Item::Macro(item) => &mut item.attrs,
|
|
- Item::Macro2(item) => &mut item.attrs,
|
|
- Item::Verbatim(_) => return Ok(item),
|
|
- Item::__Nonexhaustive => unreachable!(),
|
|
- };
|
|
- attrs.extend(item_attrs.drain(..));
|
|
- *item_attrs = attrs;
|
|
+ attrs.extend(item.replace_attrs(Vec::new()));
|
|
+ item.replace_attrs(attrs);
|
|
+ Ok(item)
|
|
+ }
|
|
+ }
|
|
+
|
|
+ struct FlexibleItemType {
|
|
+ vis: Visibility,
|
|
+ defaultness: Option<Token![default]>,
|
|
+ type_token: Token![type],
|
|
+ ident: Ident,
|
|
+ generics: Generics,
|
|
+ colon_token: Option<Token![:]>,
|
|
+ bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
+ ty: Option<(Token![=], Type)>,
|
|
+ semi_token: Token![;],
|
|
+ }
|
|
+
|
|
+ impl Parse for FlexibleItemType {
|
|
+ fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let vis: Visibility = input.parse()?;
|
|
+ let defaultness: Option<Token![default]> = input.parse()?;
|
|
+ let type_token: Token![type] = input.parse()?;
|
|
+ let ident: Ident = input.parse()?;
|
|
+ let mut generics: Generics = input.parse()?;
|
|
+ let colon_token: Option<Token![:]> = input.parse()?;
|
|
+ let mut bounds = Punctuated::new();
|
|
+ if colon_token.is_some() {
|
|
+ loop {
|
|
+ bounds.push_value(input.parse::<TypeParamBound>()?);
|
|
+ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
|
|
+ break;
|
|
+ }
|
|
+ bounds.push_punct(input.parse::<Token![+]>()?);
|
|
+ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
}
|
|
+ generics.where_clause = input.parse()?;
|
|
+ let ty = if let Some(eq_token) = input.parse()? {
|
|
+ Some((eq_token, input.parse::<Type>()?))
|
|
+ } else {
|
|
+ None
|
|
+ };
|
|
+ let semi_token: Token![;] = input.parse()?;
|
|
|
|
- Ok(item)
|
|
+ Ok(FlexibleItemType {
|
|
+ vis,
|
|
+ defaultness,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ colon_token,
|
|
+ bounds,
|
|
+ ty,
|
|
+ semi_token,
|
|
+ })
|
|
}
|
|
}
|
|
|
|
impl Parse for ItemMacro {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
let path = input.call(Path::parse_mod_style)?;
|
|
let bang_token: Token![!] = input.parse()?;
|
|
@@ -1305,17 +1192,16 @@ pub mod parsing {
|
|
|
|
impl Parse for UseTree {
|
|
fn parse(input: ParseStream) -> Result<UseTree> {
|
|
let lookahead = input.lookahead1();
|
|
if lookahead.peek(Ident)
|
|
|| lookahead.peek(Token![self])
|
|
|| lookahead.peek(Token![super])
|
|
|| lookahead.peek(Token![crate])
|
|
- || lookahead.peek(Token![extern])
|
|
{
|
|
let ident = input.call(Ident::parse_any)?;
|
|
if input.peek(Token![::]) {
|
|
Ok(UseTree::Path(UsePath {
|
|
ident,
|
|
colon2_token: input.parse()?,
|
|
tree: Box::new(input.parse()?),
|
|
}))
|
|
@@ -1387,77 +1273,134 @@ pub mod parsing {
|
|
ty: input.parse()?,
|
|
eq_token: input.parse()?,
|
|
expr: input.parse()?,
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
|
|
+ let trailing_punct = args.trailing_punct();
|
|
+
|
|
+ let last = match args.last_mut()? {
|
|
+ FnArg::Typed(last) => last,
|
|
+ _ => return None,
|
|
+ };
|
|
+
|
|
+ let ty = match last.ty.as_ref() {
|
|
+ Type::Verbatim(ty) => ty,
|
|
+ _ => return None,
|
|
+ };
|
|
+
|
|
+ let mut variadic = Variadic {
|
|
+ attrs: Vec::new(),
|
|
+ dots: parse2(ty.clone()).ok()?,
|
|
+ };
|
|
+
|
|
+ if let Pat::Verbatim(pat) = last.pat.as_ref() {
|
|
+ if pat.to_string() == "..." && !trailing_punct {
|
|
+ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
|
|
+ args.pop();
|
|
+ }
|
|
+ }
|
|
+
|
|
+ Some(variadic)
|
|
+ }
|
|
+
|
|
+ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
|
|
+ TokenStream::from_iter(vec![
|
|
+ TokenTree::Punct({
|
|
+ let mut dot = Punct::new('.', Spacing::Joint);
|
|
+ dot.set_span(dots.spans[0]);
|
|
+ dot
|
|
+ }),
|
|
+ TokenTree::Punct({
|
|
+ let mut dot = Punct::new('.', Spacing::Joint);
|
|
+ dot.set_span(dots.spans[1]);
|
|
+ dot
|
|
+ }),
|
|
+ TokenTree::Punct({
|
|
+ let mut dot = Punct::new('.', Spacing::Alone);
|
|
+ dot.set_span(dots.spans[2]);
|
|
+ dot
|
|
+ }),
|
|
+ ])
|
|
+ }
|
|
+
|
|
+ fn peek_signature(input: ParseStream) -> bool {
|
|
+ let fork = input.fork();
|
|
+ fork.parse::<Option<Token![const]>>().is_ok()
|
|
+ && fork.parse::<Option<Token![async]>>().is_ok()
|
|
+ && fork.parse::<Option<Token![unsafe]>>().is_ok()
|
|
+ && fork.parse::<Option<Abi>>().is_ok()
|
|
+ && fork.peek(Token![fn])
|
|
+ }
|
|
+
|
|
+ fn parse_signature(input: ParseStream) -> Result<Signature> {
|
|
+ let constness: Option<Token![const]> = input.parse()?;
|
|
+ let asyncness: Option<Token![async]> = input.parse()?;
|
|
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
+ let abi: Option<Abi> = input.parse()?;
|
|
+ let fn_token: Token![fn] = input.parse()?;
|
|
+ let ident: Ident = input.parse()?;
|
|
+ let generics: Generics = input.parse()?;
|
|
+
|
|
+ let content;
|
|
+ let paren_token = parenthesized!(content in input);
|
|
+ let mut inputs = parse_fn_args(&content)?;
|
|
+ let variadic = pop_variadic(&mut inputs);
|
|
+
|
|
+ let output: ReturnType = input.parse()?;
|
|
+ let where_clause: Option<WhereClause> = input.parse()?;
|
|
+
|
|
+ Ok(Signature {
|
|
+ constness,
|
|
+ asyncness,
|
|
+ unsafety,
|
|
+ abi,
|
|
+ fn_token,
|
|
+ ident,
|
|
+ paren_token,
|
|
+ inputs,
|
|
+ output,
|
|
+ variadic,
|
|
+ generics: Generics {
|
|
+ where_clause,
|
|
+ ..generics
|
|
+ },
|
|
+ })
|
|
+ }
|
|
+
|
|
impl Parse for ItemFn {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
let vis: Visibility = input.parse()?;
|
|
- let constness: Option<Token![const]> = input.parse()?;
|
|
- let asyncness: Option<Token![async]> = input.parse()?;
|
|
- let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
- let abi: Option<Abi> = input.parse()?;
|
|
- let fn_token: Token![fn] = input.parse()?;
|
|
- let ident: Ident = input.parse()?;
|
|
- let generics: Generics = input.parse()?;
|
|
-
|
|
- let content;
|
|
- let paren_token = parenthesized!(content in input);
|
|
- let inputs = content.parse_terminated(FnArg::parse)?;
|
|
- let variadic = inputs.last().as_ref().and_then(get_variadic);
|
|
-
|
|
- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
|
|
- if let FnArg::Typed(PatType { ty, .. }) = input {
|
|
- if let Type::Verbatim(tokens) = &**ty {
|
|
- if let Ok(dots) = parse2(tokens.clone()) {
|
|
- return Some(Variadic {
|
|
- attrs: Vec::new(),
|
|
- dots,
|
|
- });
|
|
- }
|
|
- }
|
|
- }
|
|
- None
|
|
- }
|
|
+ let sig = parse_signature(input)?;
|
|
+ parse_rest_of_fn(input, outer_attrs, vis, sig)
|
|
+ }
|
|
+ }
|
|
|
|
- let output: ReturnType = input.parse()?;
|
|
- let where_clause: Option<WhereClause> = input.parse()?;
|
|
-
|
|
- let content;
|
|
- let brace_token = braced!(content in input);
|
|
- let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
- let stmts = content.call(Block::parse_within)?;
|
|
+ fn parse_rest_of_fn(
|
|
+ input: ParseStream,
|
|
+ outer_attrs: Vec<Attribute>,
|
|
+ vis: Visibility,
|
|
+ sig: Signature,
|
|
+ ) -> Result<ItemFn> {
|
|
+ let content;
|
|
+ let brace_token = braced!(content in input);
|
|
+ let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
+ let stmts = content.call(Block::parse_within)?;
|
|
|
|
- Ok(ItemFn {
|
|
- attrs: private::attrs(outer_attrs, inner_attrs),
|
|
- vis,
|
|
- sig: Signature {
|
|
- constness,
|
|
- asyncness,
|
|
- unsafety,
|
|
- abi,
|
|
- fn_token,
|
|
- ident,
|
|
- paren_token,
|
|
- inputs,
|
|
- output,
|
|
- variadic,
|
|
- generics: Generics {
|
|
- where_clause,
|
|
- ..generics
|
|
- },
|
|
- },
|
|
- block: Box::new(Block { brace_token, stmts }),
|
|
- })
|
|
- }
|
|
+ Ok(ItemFn {
|
|
+ attrs: private::attrs(outer_attrs, inner_attrs),
|
|
+ vis,
|
|
+ sig,
|
|
+ block: Box::new(Block { brace_token, stmts }),
|
|
+ })
|
|
}
|
|
|
|
impl Parse for FnArg {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
|
|
let ahead = input.fork();
|
|
if let Ok(mut receiver) = ahead.parse::<Receiver>() {
|
|
@@ -1486,36 +1429,89 @@ pub mod parsing {
|
|
}
|
|
},
|
|
mutability: input.parse()?,
|
|
self_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
|
|
+ let mut args = Punctuated::new();
|
|
+ let mut has_receiver = false;
|
|
+
|
|
+ while !input.is_empty() {
|
|
+ let attrs = input.call(Attribute::parse_outer)?;
|
|
+
|
|
+ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
|
|
+ FnArg::Typed(PatType {
|
|
+ attrs,
|
|
+ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
|
|
+ colon_token: Token![:](dots.spans[0]),
|
|
+ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
|
|
+ })
|
|
+ } else {
|
|
+ let mut arg: FnArg = input.parse()?;
|
|
+ match &mut arg {
|
|
+ FnArg::Receiver(receiver) if has_receiver => {
|
|
+ return Err(Error::new(
|
|
+ receiver.self_token.span,
|
|
+ "unexpected second method receiver",
|
|
+ ));
|
|
+ }
|
|
+ FnArg::Receiver(receiver) if !args.is_empty() => {
|
|
+ return Err(Error::new(
|
|
+ receiver.self_token.span,
|
|
+ "unexpected method receiver",
|
|
+ ));
|
|
+ }
|
|
+ FnArg::Receiver(receiver) => {
|
|
+ has_receiver = true;
|
|
+ receiver.attrs = attrs;
|
|
+ }
|
|
+ FnArg::Typed(arg) => arg.attrs = attrs,
|
|
+ }
|
|
+ arg
|
|
+ };
|
|
+ args.push_value(arg);
|
|
+
|
|
+ if input.is_empty() {
|
|
+ break;
|
|
+ }
|
|
+
|
|
+ let comma: Token![,] = input.parse()?;
|
|
+ args.push_punct(comma);
|
|
+ }
|
|
+
|
|
+ Ok(args)
|
|
+ }
|
|
+
|
|
fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
|
|
+ // Hack to parse pre-2018 syntax in
|
|
+ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
|
|
+ // because the rest of the test case is valuable.
|
|
+ if input.peek(Ident) && input.peek2(Token![<]) {
|
|
+ let span = input.fork().parse::<Ident>()?.span();
|
|
+ return Ok(PatType {
|
|
+ attrs: Vec::new(),
|
|
+ pat: Box::new(Pat::Wild(PatWild {
|
|
+ attrs: Vec::new(),
|
|
+ underscore_token: Token![_](span),
|
|
+ })),
|
|
+ colon_token: Token![:](span),
|
|
+ ty: input.parse()?,
|
|
+ });
|
|
+ }
|
|
+
|
|
Ok(PatType {
|
|
attrs: Vec::new(),
|
|
- pat: input.parse()?,
|
|
+ pat: Box::new(pat::parsing::multi_pat(input)?),
|
|
colon_token: input.parse()?,
|
|
ty: Box::new(match input.parse::<Option<Token![...]>>()? {
|
|
- Some(dot3) => {
|
|
- let args = vec![
|
|
- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
|
|
- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
|
|
- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
|
|
- ];
|
|
- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
|
|
- |(mut arg, span)| {
|
|
- arg.set_span(*span);
|
|
- arg
|
|
- },
|
|
- ));
|
|
- Type::Verbatim(tokens)
|
|
- }
|
|
+ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
|
|
None => input.parse()?,
|
|
}),
|
|
})
|
|
}
|
|
|
|
impl Parse for ItemMod {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
@@ -1576,109 +1572,103 @@ pub mod parsing {
|
|
brace_token,
|
|
items,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for ForeignItem {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let begin = input.fork();
|
|
let mut attrs = input.call(Attribute::parse_outer)?;
|
|
let ahead = input.fork();
|
|
let vis: Visibility = ahead.parse()?;
|
|
|
|
let lookahead = ahead.lookahead1();
|
|
- let mut item = if lookahead.peek(Token![fn]) {
|
|
- input.parse().map(ForeignItem::Fn)
|
|
+ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
|
|
+ let vis: Visibility = input.parse()?;
|
|
+ let sig = parse_signature(input)?;
|
|
+ if input.peek(token::Brace) {
|
|
+ let content;
|
|
+ braced!(content in input);
|
|
+ content.call(Attribute::parse_inner)?;
|
|
+ content.call(Block::parse_within)?;
|
|
+
|
|
+ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ Ok(ForeignItem::Fn(ForeignItemFn {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ sig,
|
|
+ semi_token: input.parse()?,
|
|
+ }))
|
|
+ }
|
|
} else if lookahead.peek(Token![static]) {
|
|
- input.parse().map(ForeignItem::Static)
|
|
+ let vis = input.parse()?;
|
|
+ let static_token = input.parse()?;
|
|
+ let mutability = input.parse()?;
|
|
+ let ident = input.parse()?;
|
|
+ let colon_token = input.parse()?;
|
|
+ let ty = input.parse()?;
|
|
+ if input.peek(Token![=]) {
|
|
+ input.parse::<Token![=]>()?;
|
|
+ input.parse::<Expr>()?;
|
|
+ input.parse::<Token![;]>()?;
|
|
+ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ Ok(ForeignItem::Static(ForeignItemStatic {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ static_token,
|
|
+ mutability,
|
|
+ ident,
|
|
+ colon_token,
|
|
+ ty,
|
|
+ semi_token: input.parse()?,
|
|
+ }))
|
|
+ }
|
|
} else if lookahead.peek(Token![type]) {
|
|
- input.parse().map(ForeignItem::Type)
|
|
+ parse_foreign_item_type(begin, input)
|
|
} else if vis.is_inherited()
|
|
&& (lookahead.peek(Ident)
|
|
|| lookahead.peek(Token![self])
|
|
|| lookahead.peek(Token![super])
|
|
- || lookahead.peek(Token![extern])
|
|
|| lookahead.peek(Token![crate])
|
|
|| lookahead.peek(Token![::]))
|
|
{
|
|
input.parse().map(ForeignItem::Macro)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}?;
|
|
|
|
- {
|
|
- let item_attrs = match &mut item {
|
|
- ForeignItem::Fn(item) => &mut item.attrs,
|
|
- ForeignItem::Static(item) => &mut item.attrs,
|
|
- ForeignItem::Type(item) => &mut item.attrs,
|
|
- ForeignItem::Macro(item) => &mut item.attrs,
|
|
- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
|
|
- };
|
|
- attrs.extend(item_attrs.drain(..));
|
|
- *item_attrs = attrs;
|
|
- }
|
|
+ let item_attrs = match &mut item {
|
|
+ ForeignItem::Fn(item) => &mut item.attrs,
|
|
+ ForeignItem::Static(item) => &mut item.attrs,
|
|
+ ForeignItem::Type(item) => &mut item.attrs,
|
|
+ ForeignItem::Macro(item) => &mut item.attrs,
|
|
+ ForeignItem::Verbatim(_) => return Ok(item),
|
|
+ ForeignItem::__Nonexhaustive => unreachable!(),
|
|
+ };
|
|
+ attrs.extend(item_attrs.drain(..));
|
|
+ *item_attrs = attrs;
|
|
|
|
Ok(item)
|
|
}
|
|
}
|
|
|
|
impl Parse for ForeignItemFn {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
let vis: Visibility = input.parse()?;
|
|
- let fn_token: Token![fn] = input.parse()?;
|
|
- let ident: Ident = input.parse()?;
|
|
- let generics: Generics = input.parse()?;
|
|
-
|
|
- let content;
|
|
- let paren_token = parenthesized!(content in input);
|
|
- let mut inputs = Punctuated::new();
|
|
- let mut variadic = None;
|
|
- while !content.is_empty() {
|
|
- let attrs = content.call(Attribute::parse_outer)?;
|
|
-
|
|
- if let Some(dots) = content.parse()? {
|
|
- variadic = Some(Variadic { attrs, dots });
|
|
- break;
|
|
- }
|
|
-
|
|
- let mut arg = content.call(fn_arg_typed)?;
|
|
- arg.attrs = attrs;
|
|
- inputs.push_value(FnArg::Typed(arg));
|
|
- if content.is_empty() {
|
|
- break;
|
|
- }
|
|
-
|
|
- inputs.push_punct(content.parse()?);
|
|
- }
|
|
-
|
|
- let output: ReturnType = input.parse()?;
|
|
- let where_clause: Option<WhereClause> = input.parse()?;
|
|
+ let sig = parse_signature(input)?;
|
|
let semi_token: Token![;] = input.parse()?;
|
|
-
|
|
Ok(ForeignItemFn {
|
|
attrs,
|
|
vis,
|
|
- sig: Signature {
|
|
- constness: None,
|
|
- asyncness: None,
|
|
- unsafety: None,
|
|
- abi: None,
|
|
- fn_token,
|
|
- ident,
|
|
- paren_token,
|
|
- inputs,
|
|
- output,
|
|
- variadic,
|
|
- generics: Generics {
|
|
- where_clause,
|
|
- ..generics
|
|
- },
|
|
- },
|
|
+ sig,
|
|
semi_token,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for ForeignItemStatic {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(ForeignItemStatic {
|
|
@@ -1701,16 +1691,47 @@ pub mod parsing {
|
|
vis: input.parse()?,
|
|
type_token: input.parse()?,
|
|
ident: input.parse()?,
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
|
|
+ let FlexibleItemType {
|
|
+ vis,
|
|
+ defaultness,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ colon_token,
|
|
+ bounds: _,
|
|
+ ty,
|
|
+ semi_token,
|
|
+ } = input.parse()?;
|
|
+
|
|
+ if defaultness.is_some()
|
|
+ || generics.lt_token.is_some()
|
|
+ || generics.where_clause.is_some()
|
|
+ || colon_token.is_some()
|
|
+ || ty.is_some()
|
|
+ {
|
|
+ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ Ok(ForeignItem::Type(ForeignItemType {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ type_token,
|
|
+ ident,
|
|
+ semi_token,
|
|
+ }))
|
|
+ }
|
|
+ }
|
|
+
|
|
impl Parse for ForeignItemMacro {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
let mac: Macro = input.parse()?;
|
|
let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
|
|
None
|
|
} else {
|
|
Some(input.parse()?)
|
|
@@ -1737,16 +1758,46 @@ pub mod parsing {
|
|
},
|
|
eq_token: input.parse()?,
|
|
ty: input.parse()?,
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
|
|
+ let FlexibleItemType {
|
|
+ vis,
|
|
+ defaultness,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ colon_token,
|
|
+ bounds: _,
|
|
+ ty,
|
|
+ semi_token,
|
|
+ } = input.parse()?;
|
|
+
|
|
+ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
|
|
+ Ok(Item::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ let (eq_token, ty) = ty.unwrap();
|
|
+ Ok(Item::Type(ItemType {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ eq_token,
|
|
+ ty: Box::new(ty),
|
|
+ semi_token,
|
|
+ }))
|
|
+ }
|
|
+ }
|
|
+
|
|
#[cfg(not(feature = "printing"))]
|
|
fn item_existential(input: ParseStream) -> Result<TokenStream> {
|
|
Err(input.error("existential type is not supported"))
|
|
}
|
|
|
|
#[cfg(feature = "printing")]
|
|
fn item_existential(input: ParseStream) -> Result<TokenStream> {
|
|
use crate::attr::FilterAttrs;
|
|
@@ -1882,39 +1933,39 @@ pub mod parsing {
|
|
.map(Item::TraitAlias)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
}
|
|
|
|
impl Parse for ItemTrait {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- let attrs = input.call(Attribute::parse_outer)?;
|
|
+ let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
let vis: Visibility = input.parse()?;
|
|
let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
let auto_token: Option<Token![auto]> = input.parse()?;
|
|
let trait_token: Token![trait] = input.parse()?;
|
|
let ident: Ident = input.parse()?;
|
|
let generics: Generics = input.parse()?;
|
|
parse_rest_of_trait(
|
|
input,
|
|
- attrs,
|
|
+ outer_attrs,
|
|
vis,
|
|
unsafety,
|
|
auto_token,
|
|
trait_token,
|
|
ident,
|
|
generics,
|
|
)
|
|
}
|
|
}
|
|
|
|
fn parse_rest_of_trait(
|
|
input: ParseStream,
|
|
- attrs: Vec<Attribute>,
|
|
+ outer_attrs: Vec<Attribute>,
|
|
vis: Visibility,
|
|
unsafety: Option<Token![unsafe]>,
|
|
auto_token: Option<Token![auto]>,
|
|
trait_token: Token![trait],
|
|
ident: Ident,
|
|
mut generics: Generics,
|
|
) -> Result<ItemTrait> {
|
|
let colon_token: Option<Token![:]> = input.parse()?;
|
|
@@ -1932,23 +1983,24 @@ pub mod parsing {
|
|
}
|
|
}
|
|
}
|
|
|
|
generics.where_clause = input.parse()?;
|
|
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
+ let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
let mut items = Vec::new();
|
|
while !content.is_empty() {
|
|
items.push(content.parse()?);
|
|
}
|
|
|
|
Ok(ItemTrait {
|
|
- attrs,
|
|
+ attrs: private::attrs(outer_attrs, inner_attrs),
|
|
vis,
|
|
unsafety,
|
|
auto_token,
|
|
trait_token,
|
|
ident,
|
|
generics,
|
|
colon_token,
|
|
supertraits,
|
|
@@ -2009,76 +2061,83 @@ pub mod parsing {
|
|
eq_token,
|
|
bounds,
|
|
semi_token,
|
|
})
|
|
}
|
|
|
|
impl Parse for TraitItem {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let begin = input.fork();
|
|
let mut attrs = input.call(Attribute::parse_outer)?;
|
|
+ let vis: Visibility = input.parse()?;
|
|
+ let defaultness: Option<Token![default]> = input.parse()?;
|
|
let ahead = input.fork();
|
|
|
|
let lookahead = ahead.lookahead1();
|
|
- let mut item = if lookahead.peek(Token![const]) {
|
|
+ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
|
|
+ input.parse().map(TraitItem::Method)
|
|
+ } else if lookahead.peek(Token![const]) {
|
|
ahead.parse::<Token![const]>()?;
|
|
let lookahead = ahead.lookahead1();
|
|
- if lookahead.peek(Ident) {
|
|
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
|
|
input.parse().map(TraitItem::Const)
|
|
} else if lookahead.peek(Token![async])
|
|
|| lookahead.peek(Token![unsafe])
|
|
|| lookahead.peek(Token![extern])
|
|
|| lookahead.peek(Token![fn])
|
|
{
|
|
input.parse().map(TraitItem::Method)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
- } else if lookahead.peek(Token![async])
|
|
- || lookahead.peek(Token![unsafe])
|
|
- || lookahead.peek(Token![extern])
|
|
- || lookahead.peek(Token![fn])
|
|
- {
|
|
- input.parse().map(TraitItem::Method)
|
|
} else if lookahead.peek(Token![type]) {
|
|
- input.parse().map(TraitItem::Type)
|
|
+ parse_trait_item_type(begin.fork(), input)
|
|
} else if lookahead.peek(Ident)
|
|
|| lookahead.peek(Token![self])
|
|
|| lookahead.peek(Token![super])
|
|
- || lookahead.peek(Token![extern])
|
|
|| lookahead.peek(Token![crate])
|
|
|| lookahead.peek(Token![::])
|
|
{
|
|
input.parse().map(TraitItem::Macro)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}?;
|
|
|
|
- {
|
|
- let item_attrs = match &mut item {
|
|
- TraitItem::Const(item) => &mut item.attrs,
|
|
- TraitItem::Method(item) => &mut item.attrs,
|
|
- TraitItem::Type(item) => &mut item.attrs,
|
|
- TraitItem::Macro(item) => &mut item.attrs,
|
|
- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
|
|
- };
|
|
- attrs.extend(item_attrs.drain(..));
|
|
- *item_attrs = attrs;
|
|
+ match (vis, defaultness) {
|
|
+ (Visibility::Inherited, None) => {}
|
|
+ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
|
|
}
|
|
|
|
+ let item_attrs = match &mut item {
|
|
+ TraitItem::Const(item) => &mut item.attrs,
|
|
+ TraitItem::Method(item) => &mut item.attrs,
|
|
+ TraitItem::Type(item) => &mut item.attrs,
|
|
+ TraitItem::Macro(item) => &mut item.attrs,
|
|
+ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
|
|
+ };
|
|
+ attrs.extend(item_attrs.drain(..));
|
|
+ *item_attrs = attrs;
|
|
Ok(item)
|
|
}
|
|
}
|
|
|
|
impl Parse for TraitItemConst {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(TraitItemConst {
|
|
attrs: input.call(Attribute::parse_outer)?,
|
|
const_token: input.parse()?,
|
|
- ident: input.parse()?,
|
|
+ ident: {
|
|
+ let lookahead = input.lookahead1();
|
|
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
|
|
+ input.call(Ident::parse_any)?
|
|
+ } else {
|
|
+ return Err(lookahead.error());
|
|
+ }
|
|
+ },
|
|
colon_token: input.parse()?,
|
|
ty: input.parse()?,
|
|
default: {
|
|
if input.peek(Token![=]) {
|
|
let eq_token: Token![=] = input.parse()?;
|
|
let default: Expr = input.parse()?;
|
|
Some((eq_token, default))
|
|
} else {
|
|
@@ -2088,30 +2147,17 @@ pub mod parsing {
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TraitItemMethod {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
- let constness: Option<Token![const]> = input.parse()?;
|
|
- let asyncness: Option<Token![async]> = input.parse()?;
|
|
- let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
- let abi: Option<Abi> = input.parse()?;
|
|
- let fn_token: Token![fn] = input.parse()?;
|
|
- let ident: Ident = input.parse()?;
|
|
- let generics: Generics = input.parse()?;
|
|
-
|
|
- let content;
|
|
- let paren_token = parenthesized!(content in input);
|
|
- let inputs = content.parse_terminated(FnArg::parse)?;
|
|
-
|
|
- let output: ReturnType = input.parse()?;
|
|
- let where_clause: Option<WhereClause> = input.parse()?;
|
|
+ let sig = parse_signature(input)?;
|
|
|
|
let lookahead = input.lookahead1();
|
|
let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
let stmts = content.call(Block::parse_within)?;
|
|
(Some(brace_token), inner_attrs, stmts, None)
|
|
@@ -2119,32 +2165,17 @@ pub mod parsing {
|
|
let semi_token: Token![;] = input.parse()?;
|
|
(None, Vec::new(), Vec::new(), Some(semi_token))
|
|
} else {
|
|
return Err(lookahead.error());
|
|
};
|
|
|
|
Ok(TraitItemMethod {
|
|
attrs: private::attrs(outer_attrs, inner_attrs),
|
|
- sig: Signature {
|
|
- constness,
|
|
- asyncness,
|
|
- unsafety,
|
|
- abi,
|
|
- fn_token,
|
|
- ident,
|
|
- paren_token,
|
|
- inputs,
|
|
- output,
|
|
- variadic: None,
|
|
- generics: Generics {
|
|
- where_clause,
|
|
- ..generics
|
|
- },
|
|
- },
|
|
+ sig,
|
|
default: brace_token.map(|brace_token| Block { brace_token, stmts }),
|
|
semi_token,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TraitItemType {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
@@ -2183,16 +2214,45 @@ pub mod parsing {
|
|
colon_token,
|
|
bounds,
|
|
default,
|
|
semi_token,
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
|
|
+ let FlexibleItemType {
|
|
+ vis,
|
|
+ defaultness,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ colon_token,
|
|
+ bounds,
|
|
+ ty,
|
|
+ semi_token,
|
|
+ } = input.parse()?;
|
|
+
|
|
+ if defaultness.is_some() || vis.is_some() {
|
|
+ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ Ok(TraitItem::Type(TraitItemType {
|
|
+ attrs: Vec::new(),
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ colon_token,
|
|
+ bounds,
|
|
+ default: ty,
|
|
+ semi_token,
|
|
+ }))
|
|
+ }
|
|
+ }
|
|
+
|
|
impl Parse for TraitItemMacro {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
let mac: Macro = input.parse()?;
|
|
let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
|
|
None
|
|
} else {
|
|
Some(input.parse()?)
|
|
@@ -2202,123 +2262,148 @@ pub mod parsing {
|
|
mac,
|
|
semi_token,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for ItemImpl {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
- let defaultness: Option<Token![default]> = input.parse()?;
|
|
- let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
- let impl_token: Token![impl] = input.parse()?;
|
|
+ let allow_const_impl = false;
|
|
+ parse_impl(input, allow_const_impl).map(Option::unwrap)
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
|
|
+ let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
+ let defaultness: Option<Token![default]> = input.parse()?;
|
|
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
+ let impl_token: Token![impl] = input.parse()?;
|
|
|
|
- let has_generics = input.peek(Token![<])
|
|
- && (input.peek2(Token![>])
|
|
- || input.peek2(Token![#])
|
|
- || (input.peek2(Ident) || input.peek2(Lifetime))
|
|
- && (input.peek3(Token![:])
|
|
- || input.peek3(Token![,])
|
|
- || input.peek3(Token![>])));
|
|
- let generics: Generics = if has_generics {
|
|
- input.parse()?
|
|
- } else {
|
|
- Generics::default()
|
|
- };
|
|
+ let has_generics = input.peek(Token![<])
|
|
+ && (input.peek2(Token![>])
|
|
+ || input.peek2(Token![#])
|
|
+ || (input.peek2(Ident) || input.peek2(Lifetime))
|
|
+ && (input.peek3(Token![:])
|
|
+ || input.peek3(Token![,])
|
|
+ || input.peek3(Token![>]))
|
|
+ || input.peek2(Token![const]));
|
|
+ let generics: Generics = if has_generics {
|
|
+ input.parse()?
|
|
+ } else {
|
|
+ Generics::default()
|
|
+ };
|
|
|
|
- let trait_ = {
|
|
- // TODO: optimize using advance_to
|
|
- let ahead = input.fork();
|
|
- if ahead.parse::<Option<Token![!]>>().is_ok()
|
|
- && ahead.parse::<Path>().is_ok()
|
|
- && ahead.parse::<Token![for]>().is_ok()
|
|
- {
|
|
- let polarity: Option<Token![!]> = input.parse()?;
|
|
- let path: Path = input.parse()?;
|
|
- let for_token: Token![for] = input.parse()?;
|
|
- Some((polarity, path, for_token))
|
|
- } else {
|
|
- None
|
|
- }
|
|
- };
|
|
- let self_ty: Type = input.parse()?;
|
|
- let where_clause: Option<WhereClause> = input.parse()?;
|
|
+ let is_const_impl = allow_const_impl
|
|
+ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
|
|
+ if is_const_impl {
|
|
+ input.parse::<Option<Token![?]>>()?;
|
|
+ input.parse::<Token![const]>()?;
|
|
+ }
|
|
|
|
- let content;
|
|
- let brace_token = braced!(content in input);
|
|
- let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
+ let trait_ = (|| -> Option<_> {
|
|
+ let ahead = input.fork();
|
|
+ let polarity: Option<Token![!]> = ahead.parse().ok()?;
|
|
+ let mut path: Path = ahead.parse().ok()?;
|
|
+ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
|
|
+ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
|
|
+ path.segments.last_mut().unwrap().arguments = parenthesized;
|
|
+ }
|
|
+ let for_token: Token![for] = ahead.parse().ok()?;
|
|
+ input.advance_to(&ahead);
|
|
+ Some((polarity, path, for_token))
|
|
+ })();
|
|
|
|
- let mut items = Vec::new();
|
|
- while !content.is_empty() {
|
|
- items.push(content.parse()?);
|
|
- }
|
|
+ let self_ty: Type = input.parse()?;
|
|
+ let where_clause: Option<WhereClause> = input.parse()?;
|
|
+
|
|
+ let content;
|
|
+ let brace_token = braced!(content in input);
|
|
+ let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
|
|
- Ok(ItemImpl {
|
|
+ let mut items = Vec::new();
|
|
+ while !content.is_empty() {
|
|
+ items.push(content.parse()?);
|
|
+ }
|
|
+
|
|
+ if is_const_impl {
|
|
+ Ok(None)
|
|
+ } else {
|
|
+ Ok(Some(ItemImpl {
|
|
attrs: private::attrs(outer_attrs, inner_attrs),
|
|
defaultness,
|
|
unsafety,
|
|
impl_token,
|
|
generics: Generics {
|
|
where_clause,
|
|
..generics
|
|
},
|
|
trait_,
|
|
self_ty: Box::new(self_ty),
|
|
brace_token,
|
|
items,
|
|
- })
|
|
+ }))
|
|
}
|
|
}
|
|
|
|
impl Parse for ImplItem {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let begin = input.fork();
|
|
let mut attrs = input.call(Attribute::parse_outer)?;
|
|
let ahead = input.fork();
|
|
let vis: Visibility = ahead.parse()?;
|
|
|
|
let mut lookahead = ahead.lookahead1();
|
|
let defaultness = if lookahead.peek(Token![default]) && !ahead.peek2(Token![!]) {
|
|
let defaultness: Token![default] = ahead.parse()?;
|
|
lookahead = ahead.lookahead1();
|
|
Some(defaultness)
|
|
} else {
|
|
None
|
|
};
|
|
|
|
- let mut item = if lookahead.peek(Token![const]) {
|
|
- ahead.parse::<Token![const]>()?;
|
|
+ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
|
|
+ input.parse().map(ImplItem::Method)
|
|
+ } else if lookahead.peek(Token![const]) {
|
|
+ let const_token: Token![const] = ahead.parse()?;
|
|
let lookahead = ahead.lookahead1();
|
|
- if lookahead.peek(Ident) {
|
|
- input.parse().map(ImplItem::Const)
|
|
- } else if lookahead.peek(Token![unsafe])
|
|
- || lookahead.peek(Token![async])
|
|
- || lookahead.peek(Token![extern])
|
|
- || lookahead.peek(Token![fn])
|
|
- {
|
|
- input.parse().map(ImplItem::Method)
|
|
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
|
|
+ input.advance_to(&ahead);
|
|
+ let ident: Ident = input.call(Ident::parse_any)?;
|
|
+ let colon_token: Token![:] = input.parse()?;
|
|
+ let ty: Type = input.parse()?;
|
|
+ if let Some(eq_token) = input.parse()? {
|
|
+ return Ok(ImplItem::Const(ImplItemConst {
|
|
+ attrs,
|
|
+ vis,
|
|
+ defaultness,
|
|
+ const_token,
|
|
+ ident,
|
|
+ colon_token,
|
|
+ ty,
|
|
+ eq_token,
|
|
+ expr: input.parse()?,
|
|
+ semi_token: input.parse()?,
|
|
+ }));
|
|
+ } else {
|
|
+ input.parse::<Token![;]>()?;
|
|
+ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
|
|
+ }
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
- } else if lookahead.peek(Token![unsafe])
|
|
- || lookahead.peek(Token![async])
|
|
- || lookahead.peek(Token![extern])
|
|
- || lookahead.peek(Token![fn])
|
|
- {
|
|
- input.parse().map(ImplItem::Method)
|
|
} else if lookahead.peek(Token![type]) {
|
|
- input.parse().map(ImplItem::Type)
|
|
+ parse_impl_item_type(begin, input)
|
|
} else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
|
|
input.call(item_existential).map(ImplItem::Verbatim)
|
|
} else if vis.is_inherited()
|
|
&& defaultness.is_none()
|
|
&& (lookahead.peek(Ident)
|
|
|| lookahead.peek(Token![self])
|
|
|| lookahead.peek(Token![super])
|
|
- || lookahead.peek(Token![extern])
|
|
|| lookahead.peek(Token![crate])
|
|
|| lookahead.peek(Token![::]))
|
|
{
|
|
input.parse().map(ImplItem::Macro)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}?;
|
|
|
|
@@ -2341,72 +2426,68 @@ pub mod parsing {
|
|
|
|
impl Parse for ImplItemConst {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(ImplItemConst {
|
|
attrs: input.call(Attribute::parse_outer)?,
|
|
vis: input.parse()?,
|
|
defaultness: input.parse()?,
|
|
const_token: input.parse()?,
|
|
- ident: input.parse()?,
|
|
+ ident: {
|
|
+ let lookahead = input.lookahead1();
|
|
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
|
|
+ input.call(Ident::parse_any)?
|
|
+ } else {
|
|
+ return Err(lookahead.error());
|
|
+ }
|
|
+ },
|
|
colon_token: input.parse()?,
|
|
ty: input.parse()?,
|
|
eq_token: input.parse()?,
|
|
expr: input.parse()?,
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for ImplItemMethod {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- let outer_attrs = input.call(Attribute::parse_outer)?;
|
|
+ let mut attrs = input.call(Attribute::parse_outer)?;
|
|
let vis: Visibility = input.parse()?;
|
|
let defaultness: Option<Token![default]> = input.parse()?;
|
|
- let constness: Option<Token![const]> = input.parse()?;
|
|
- let asyncness: Option<Token![async]> = input.parse()?;
|
|
- let unsafety: Option<Token![unsafe]> = input.parse()?;
|
|
- let abi: Option<Abi> = input.parse()?;
|
|
- let fn_token: Token![fn] = input.parse()?;
|
|
- let ident: Ident = input.parse()?;
|
|
- let generics: Generics = input.parse()?;
|
|
+ let sig = parse_signature(input)?;
|
|
|
|
- let content;
|
|
- let paren_token = parenthesized!(content in input);
|
|
- let inputs = content.parse_terminated(FnArg::parse)?;
|
|
-
|
|
- let output: ReturnType = input.parse()?;
|
|
- let where_clause: Option<WhereClause> = input.parse()?;
|
|
-
|
|
- let content;
|
|
- let brace_token = braced!(content in input);
|
|
- let inner_attrs = content.call(Attribute::parse_inner)?;
|
|
- let stmts = content.call(Block::parse_within)?;
|
|
+ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
|
|
+ // Accept methods without a body in an impl block because
|
|
+ // rustc's *parser* does not reject them (the compilation error
|
|
+ // is emitted later than parsing) and it can be useful for macro
|
|
+ // DSLs.
|
|
+ let mut punct = Punct::new(';', Spacing::Alone);
|
|
+ punct.set_span(semi.span);
|
|
+ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
|
|
+ Block {
|
|
+ brace_token: Brace::default(),
|
|
+ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
|
|
+ }
|
|
+ } else {
|
|
+ let content;
|
|
+ let brace_token = braced!(content in input);
|
|
+ attrs.extend(content.call(Attribute::parse_inner)?);
|
|
+ Block {
|
|
+ brace_token,
|
|
+ stmts: content.call(Block::parse_within)?,
|
|
+ }
|
|
+ };
|
|
|
|
Ok(ImplItemMethod {
|
|
- attrs: private::attrs(outer_attrs, inner_attrs),
|
|
+ attrs,
|
|
vis,
|
|
defaultness,
|
|
- sig: Signature {
|
|
- constness,
|
|
- asyncness,
|
|
- unsafety,
|
|
- abi,
|
|
- fn_token,
|
|
- ident,
|
|
- paren_token,
|
|
- inputs,
|
|
- output,
|
|
- variadic: None,
|
|
- generics: Generics {
|
|
- where_clause,
|
|
- ..generics
|
|
- },
|
|
- },
|
|
- block: Block { brace_token, stmts },
|
|
+ sig,
|
|
+ block,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for ImplItemType {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(ImplItemType {
|
|
attrs: input.call(Attribute::parse_outer)?,
|
|
@@ -2421,16 +2502,47 @@ pub mod parsing {
|
|
},
|
|
eq_token: input.parse()?,
|
|
ty: input.parse()?,
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
|
|
+ let FlexibleItemType {
|
|
+ vis,
|
|
+ defaultness,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ colon_token,
|
|
+ bounds: _,
|
|
+ ty,
|
|
+ semi_token,
|
|
+ } = input.parse()?;
|
|
+
|
|
+ if colon_token.is_some() || ty.is_none() {
|
|
+ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ let (eq_token, ty) = ty.unwrap();
|
|
+ Ok(ImplItem::Type(ImplItemType {
|
|
+ attrs: Vec::new(),
|
|
+ vis,
|
|
+ defaultness,
|
|
+ type_token,
|
|
+ ident,
|
|
+ generics,
|
|
+ eq_token,
|
|
+ ty,
|
|
+ semi_token,
|
|
+ }))
|
|
+ }
|
|
+ }
|
|
+
|
|
impl Parse for ImplItemMacro {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let attrs = input.call(Attribute::parse_outer)?;
|
|
let mac: Macro = input.parse()?;
|
|
let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
|
|
None
|
|
} else {
|
|
Some(input.parse()?)
|
|
@@ -2466,16 +2578,17 @@ pub mod parsing {
|
|
mod printing {
|
|
use super::*;
|
|
|
|
use proc_macro2::TokenStream;
|
|
use quote::{ToTokens, TokenStreamExt};
|
|
|
|
use crate::attr::FilterAttrs;
|
|
use crate::print::TokensOrDefault;
|
|
+ use crate::punctuated::Pair;
|
|
|
|
impl ToTokens for ItemExternCrate {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
tokens.append_all(self.attrs.outer());
|
|
self.vis.to_tokens(tokens);
|
|
self.extern_token.to_tokens(tokens);
|
|
self.crate_token.to_tokens(tokens);
|
|
self.ident.to_tokens(tokens);
|
|
@@ -2830,16 +2943,24 @@ mod printing {
|
|
}
|
|
|
|
impl ToTokens for ImplItemMethod {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
tokens.append_all(self.attrs.outer());
|
|
self.vis.to_tokens(tokens);
|
|
self.defaultness.to_tokens(tokens);
|
|
self.sig.to_tokens(tokens);
|
|
+ if self.block.stmts.len() == 1 {
|
|
+ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
|
|
+ if verbatim.to_string() == ";" {
|
|
+ verbatim.to_tokens(tokens);
|
|
+ return;
|
|
+ }
|
|
+ }
|
|
+ }
|
|
self.block.brace_token.surround(tokens, |tokens| {
|
|
tokens.append_all(self.attrs.inner());
|
|
tokens.append_all(&self.block.stmts);
|
|
});
|
|
}
|
|
}
|
|
|
|
impl ToTokens for ImplItemType {
|
|
@@ -2900,31 +3021,71 @@ mod printing {
|
|
impl ToTokens for ForeignItemMacro {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
tokens.append_all(self.attrs.outer());
|
|
self.mac.to_tokens(tokens);
|
|
self.semi_token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
+ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
|
|
+ let arg = match arg {
|
|
+ FnArg::Typed(arg) => arg,
|
|
+ FnArg::Receiver(receiver) => {
|
|
+ receiver.to_tokens(tokens);
|
|
+ return false;
|
|
+ }
|
|
+ };
|
|
+
|
|
+ match arg.ty.as_ref() {
|
|
+ Type::Verbatim(ty) if ty.to_string() == "..." => {
|
|
+ match arg.pat.as_ref() {
|
|
+ Pat::Verbatim(pat) if pat.to_string() == "..." => {
|
|
+ tokens.append_all(arg.attrs.outer());
|
|
+ pat.to_tokens(tokens);
|
|
+ }
|
|
+ _ => arg.to_tokens(tokens),
|
|
+ }
|
|
+ true
|
|
+ }
|
|
+ _ => {
|
|
+ arg.to_tokens(tokens);
|
|
+ false
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+
|
|
impl ToTokens for Signature {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
self.constness.to_tokens(tokens);
|
|
self.asyncness.to_tokens(tokens);
|
|
self.unsafety.to_tokens(tokens);
|
|
self.abi.to_tokens(tokens);
|
|
self.fn_token.to_tokens(tokens);
|
|
self.ident.to_tokens(tokens);
|
|
self.generics.to_tokens(tokens);
|
|
self.paren_token.surround(tokens, |tokens| {
|
|
- self.inputs.to_tokens(tokens);
|
|
- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
|
|
- <Token![,]>::default().to_tokens(tokens);
|
|
+ let mut last_is_variadic = false;
|
|
+ for input in self.inputs.pairs() {
|
|
+ match input {
|
|
+ Pair::Punctuated(input, comma) => {
|
|
+ maybe_variadic_to_tokens(input, tokens);
|
|
+ comma.to_tokens(tokens);
|
|
+ }
|
|
+ Pair::End(input) => {
|
|
+ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
|
|
+ }
|
|
+ }
|
|
}
|
|
- self.variadic.to_tokens(tokens);
|
|
+ if self.variadic.is_some() && !last_is_variadic {
|
|
+ if !self.inputs.empty_or_trailing() {
|
|
+ <Token![,]>::default().to_tokens(tokens);
|
|
+ }
|
|
+ self.variadic.to_tokens(tokens);
|
|
+ }
|
|
});
|
|
self.output.to_tokens(tokens);
|
|
self.generics.where_clause.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for Receiver {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
|
|
--- a/third_party/rust/syn/src/lib.rs
|
|
+++ b/third_party/rust/syn/src/lib.rs
|
|
@@ -1,8 +1,16 @@
|
|
+//! [![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](https://docs.rs/syn)
|
|
+//!
|
|
+//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
|
|
+//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
|
|
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
|
|
+//!
|
|
+//! <br>
|
|
+//!
|
|
//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
|
|
//! tree of Rust source code.
|
|
//!
|
|
//! Currently this library is geared toward use in Rust procedural macros, but
|
|
//! contains some APIs that may be useful more generally.
|
|
//!
|
|
//! - **Data structures** — Syn provides a complete syntax tree that can
|
|
//! represent any valid Rust source code. The syntax tree is rooted at
|
|
@@ -57,18 +65,18 @@
|
|
//! syn = "1.0"
|
|
//! quote = "1.0"
|
|
//!
|
|
//! [lib]
|
|
//! proc-macro = true
|
|
//! ```
|
|
//!
|
|
//! ```
|
|
-//! extern crate proc_macro;
|
|
-//!
|
|
+//! # extern crate proc_macro;
|
|
+//! #
|
|
//! use proc_macro::TokenStream;
|
|
//! use quote::quote;
|
|
//! use syn::{parse_macro_input, DeriveInput};
|
|
//!
|
|
//! # const IGNORE_TOKENS: &str = stringify! {
|
|
//! #[proc_macro_derive(MyMacro)]
|
|
//! # };
|
|
//! pub fn my_macro(input: TokenStream) -> TokenStream {
|
|
@@ -237,59 +245,71 @@
|
|
//! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
|
|
//! types.
|
|
//! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
|
|
//! types.
|
|
//! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
|
|
//! dynamic library libproc_macro from rustc toolchain.
|
|
|
|
// Syn types in rustdoc of other crates get linked to here.
|
|
-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
|
|
+#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
|
|
#![deny(clippy::all, clippy::pedantic)]
|
|
// Ignored clippy lints.
|
|
#![allow(
|
|
- clippy::block_in_if_condition_stmt,
|
|
+ clippy::blocks_in_if_conditions,
|
|
clippy::cognitive_complexity,
|
|
clippy::doc_markdown,
|
|
clippy::eval_order_dependence,
|
|
clippy::inherent_to_string,
|
|
clippy::large_enum_variant,
|
|
+ clippy::manual_non_exhaustive,
|
|
+ clippy::match_like_matches_macro,
|
|
+ clippy::match_on_vec_items,
|
|
+ clippy::needless_doctest_main,
|
|
clippy::needless_pass_by_value,
|
|
clippy::never_loop,
|
|
clippy::suspicious_op_assign_impl,
|
|
clippy::too_many_arguments,
|
|
- clippy::trivially_copy_pass_by_ref
|
|
+ clippy::trivially_copy_pass_by_ref,
|
|
+ clippy::unnecessary_unwrap
|
|
)]
|
|
// Ignored clippy_pedantic lints.
|
|
#![allow(
|
|
clippy::cast_possible_truncation,
|
|
+ clippy::default_trait_access,
|
|
clippy::empty_enum,
|
|
+ clippy::expl_impl_clone_on_copy,
|
|
clippy::if_not_else,
|
|
clippy::items_after_statements,
|
|
+ clippy::match_same_arms,
|
|
+ clippy::missing_errors_doc,
|
|
clippy::module_name_repetitions,
|
|
+ clippy::must_use_candidate,
|
|
+ clippy::option_if_let_else,
|
|
clippy::shadow_unrelated,
|
|
clippy::similar_names,
|
|
clippy::single_match_else,
|
|
+ clippy::too_many_lines,
|
|
clippy::unseparated_literal_suffix,
|
|
clippy::use_self,
|
|
- clippy::used_underscore_binding
|
|
+ clippy::used_underscore_binding,
|
|
+ clippy::wildcard_imports
|
|
)]
|
|
|
|
#[cfg(all(
|
|
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
|
feature = "proc-macro"
|
|
))]
|
|
extern crate proc_macro;
|
|
extern crate proc_macro2;
|
|
extern crate unicode_xid;
|
|
|
|
#[cfg(feature = "printing")]
|
|
extern crate quote;
|
|
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
// Not public API.
|
|
#[cfg(feature = "parsing")]
|
|
#[doc(hidden)]
|
|
#[macro_use]
|
|
pub mod group;
|
|
@@ -302,17 +322,16 @@ pub use crate::ident::Ident;
|
|
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
mod attr;
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
pub use crate::attr::{
|
|
AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
|
|
};
|
|
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
mod bigint;
|
|
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
mod data;
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
pub use crate::data::{
|
|
Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
|
|
Visibility,
|
|
@@ -359,19 +378,17 @@ pub use crate::item::{
|
|
#[cfg(feature = "full")]
|
|
mod file;
|
|
#[cfg(feature = "full")]
|
|
pub use crate::file::File;
|
|
|
|
mod lifetime;
|
|
pub use crate::lifetime::Lifetime;
|
|
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
mod lit;
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
pub use crate::lit::{
|
|
Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
|
|
};
|
|
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
mod mac;
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
pub use crate::mac::{Macro, MacroDelimiter};
|
|
@@ -436,16 +453,19 @@ pub mod parse_quote;
|
|
feature = "proc-macro"
|
|
))]
|
|
#[doc(hidden)]
|
|
pub mod parse_macro_input;
|
|
|
|
#[cfg(all(feature = "parsing", feature = "printing"))]
|
|
pub mod spanned;
|
|
|
|
+#[cfg(all(feature = "parsing", feature = "full"))]
|
|
+mod whitespace;
|
|
+
|
|
mod gen {
|
|
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
|
|
///
|
|
/// Each method of the [`Visit`] trait is a hook that can be overridden to
|
|
/// customize the behavior when visiting the corresponding type of node. By
|
|
/// default, every method recursively visits the substructure of the input
|
|
/// by invoking the right visitor method of each of its fields.
|
|
///
|
|
@@ -477,17 +497,17 @@ mod gen {
|
|
/// v.visit_expr(&*node.left);
|
|
/// v.visit_bin_op(&node.op);
|
|
/// v.visit_expr(&*node.right);
|
|
/// }
|
|
///
|
|
/// /* ... */
|
|
/// ```
|
|
///
|
|
- /// *This module is available if Syn is built with the `"visit"` feature.*
|
|
+ /// *This module is available only if Syn is built with the `"visit"` feature.*
|
|
///
|
|
/// <br>
|
|
///
|
|
/// # Example
|
|
///
|
|
/// This visitor will print the name of every freestanding function in the
|
|
/// syntax tree, including nested functions.
|
|
///
|
|
@@ -598,17 +618,17 @@ mod gen {
|
|
/// v.visit_expr_mut(&mut *node.left);
|
|
/// v.visit_bin_op_mut(&mut node.op);
|
|
/// v.visit_expr_mut(&mut *node.right);
|
|
/// }
|
|
///
|
|
/// /* ... */
|
|
/// ```
|
|
///
|
|
- /// *This module is available if Syn is built with the `"visit-mut"`
|
|
+ /// *This module is available only if Syn is built with the `"visit-mut"`
|
|
/// feature.*
|
|
///
|
|
/// <br>
|
|
///
|
|
/// # Example
|
|
///
|
|
/// This mut visitor replace occurrences of u256 suffixed integer literals
|
|
/// like `999u256` with a macro invocation `bigint::u256!(999)`.
|
|
@@ -697,17 +717,17 @@ mod gen {
|
|
/// op: v.fold_bin_op(node.op),
|
|
/// right: Box::new(v.fold_expr(*node.right)),
|
|
/// }
|
|
/// }
|
|
///
|
|
/// /* ... */
|
|
/// ```
|
|
///
|
|
- /// *This module is available if Syn is built with the `"fold"` feature.*
|
|
+ /// *This module is available only if Syn is built with the `"fold"` feature.*
|
|
///
|
|
/// <br>
|
|
///
|
|
/// # Example
|
|
///
|
|
/// This fold inserts parentheses to fully parenthesizes any expression.
|
|
///
|
|
/// ```
|
|
@@ -739,43 +759,63 @@ mod gen {
|
|
///
|
|
/// // Output: (((a)()) + (((b)((1))) * ((c).d)))
|
|
/// }
|
|
/// ```
|
|
#[cfg(feature = "fold")]
|
|
#[rustfmt::skip]
|
|
pub mod fold;
|
|
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ #[rustfmt::skip]
|
|
+ mod clone;
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ #[rustfmt::skip]
|
|
+ mod eq;
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ #[rustfmt::skip]
|
|
+ mod hash;
|
|
+
|
|
+ #[cfg(feature = "extra-traits")]
|
|
+ #[rustfmt::skip]
|
|
+ mod debug;
|
|
+
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
#[path = "../gen_helper.rs"]
|
|
mod helper;
|
|
}
|
|
pub use crate::gen::*;
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
pub mod export;
|
|
|
|
mod custom_keyword;
|
|
mod custom_punctuation;
|
|
mod sealed;
|
|
+mod span;
|
|
+mod thread;
|
|
|
|
#[cfg(feature = "parsing")]
|
|
mod lookahead;
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parse;
|
|
|
|
-mod span;
|
|
+#[cfg(feature = "full")]
|
|
+mod reserved;
|
|
+
|
|
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
|
|
+mod verbatim;
|
|
|
|
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
|
|
mod print;
|
|
|
|
-mod thread;
|
|
-
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
#[allow(dead_code, non_camel_case_types)]
|
|
struct private;
|
|
|
|
// https://github.com/rust-lang/rust/issues/62830
|
|
#[cfg(feature = "parsing")]
|
|
mod rustdoc_workaround {
|
|
@@ -795,24 +835,24 @@ pub use crate::error::{Error, Result};
|
|
/// messages.
|
|
///
|
|
/// This function parses a `proc_macro::TokenStream` which is the type used for
|
|
/// interop with the compiler in a procedural macro. To parse a
|
|
/// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
|
|
///
|
|
/// [`syn::parse2`]: parse2
|
|
///
|
|
-/// *This function is available if Syn is built with both the `"parsing"` and
|
|
+/// *This function is available only if Syn is built with both the `"parsing"` and
|
|
/// `"proc-macro"` features.*
|
|
///
|
|
/// # Examples
|
|
///
|
|
/// ```
|
|
-/// extern crate proc_macro;
|
|
-///
|
|
+/// # extern crate proc_macro;
|
|
+/// #
|
|
/// use proc_macro::TokenStream;
|
|
/// use quote::quote;
|
|
/// use syn::DeriveInput;
|
|
///
|
|
/// # const IGNORE_TOKENS: &str = stringify! {
|
|
/// #[proc_macro_derive(MyMacro)]
|
|
/// # };
|
|
/// pub fn my_macro(input: TokenStream) -> TokenStream {
|
|
@@ -842,25 +882,25 @@ pub fn parse<T: parse::Parse>(tokens: pr
|
|
/// This function parses a `proc_macro2::TokenStream` which is commonly useful
|
|
/// when the input comes from a node of the Syn syntax tree, for example the
|
|
/// body tokens of a [`Macro`] node. When in a procedural macro parsing the
|
|
/// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
|
|
/// instead.
|
|
///
|
|
/// [`syn::parse`]: parse()
|
|
///
|
|
-/// *This function is available if Syn is built with the `"parsing"` feature.*
|
|
+/// *This function is available only if Syn is built with the `"parsing"` feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
|
|
parse::Parser::parse2(T::parse, tokens)
|
|
}
|
|
|
|
/// Parse a string of Rust code into the chosen syntax tree node.
|
|
///
|
|
-/// *This function is available if Syn is built with the `"parsing"` feature.*
|
|
+/// *This function is available only if Syn is built with the `"parsing"` feature.*
|
|
///
|
|
/// # Hygiene
|
|
///
|
|
/// Every span in the resulting syntax tree will be set to resolve at the macro
|
|
/// call site.
|
|
///
|
|
/// # Examples
|
|
///
|
|
@@ -869,19 +909,17 @@ pub fn parse2<T: parse::Parse>(tokens: p
|
|
///
|
|
/// fn run() -> Result<()> {
|
|
/// let code = "assert_eq!(u8::max_value(), 255)";
|
|
/// let expr = syn::parse_str::<Expr>(code)?;
|
|
/// println!("{:#?}", expr);
|
|
/// Ok(())
|
|
/// }
|
|
/// #
|
|
-/// # fn main() {
|
|
-/// # run().unwrap();
|
|
-/// # }
|
|
+/// # run().unwrap();
|
|
/// ```
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
|
|
parse::Parser::parse_str(T::parse, s)
|
|
}
|
|
|
|
// FIXME the name parse_file makes it sound like you might pass in a path to a
|
|
// file, rather than the content.
|
|
@@ -889,17 +927,17 @@ pub fn parse_str<T: parse::Parse>(s: &st
|
|
///
|
|
/// This is different from `syn::parse_str::<File>(content)` in two ways:
|
|
///
|
|
/// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
|
|
/// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
|
|
///
|
|
/// If present, either of these would be an error using `from_str`.
|
|
///
|
|
-/// *This function is available if Syn is built with the `"parsing"` and
|
|
+/// *This function is available only if Syn is built with the `"parsing"` and
|
|
/// `"full"` features.*
|
|
///
|
|
/// # Examples
|
|
///
|
|
/// ```no_run
|
|
/// use std::error::Error;
|
|
/// use std::fs::File;
|
|
/// use std::io::Read;
|
|
@@ -913,35 +951,36 @@ pub fn parse_str<T: parse::Parse>(s: &st
|
|
/// if let Some(shebang) = ast.shebang {
|
|
/// println!("{}", shebang);
|
|
/// }
|
|
/// println!("{} items", ast.items.len());
|
|
///
|
|
/// Ok(())
|
|
/// }
|
|
/// #
|
|
-/// # fn main() {
|
|
-/// # run().unwrap();
|
|
-/// # }
|
|
+/// # run().unwrap();
|
|
/// ```
|
|
#[cfg(all(feature = "parsing", feature = "full"))]
|
|
pub fn parse_file(mut content: &str) -> Result<File> {
|
|
// Strip the BOM if it is present
|
|
const BOM: &str = "\u{feff}";
|
|
if content.starts_with(BOM) {
|
|
content = &content[BOM.len()..];
|
|
}
|
|
|
|
let mut shebang = None;
|
|
- if content.starts_with("#!") && !content.starts_with("#![") {
|
|
- if let Some(idx) = content.find('\n') {
|
|
- shebang = Some(content[..idx].to_string());
|
|
- content = &content[idx..];
|
|
- } else {
|
|
- shebang = Some(content.to_string());
|
|
- content = "";
|
|
+ if content.starts_with("#!") {
|
|
+ let rest = whitespace::skip(&content[2..]);
|
|
+ if !rest.starts_with('[') {
|
|
+ if let Some(idx) = content.find('\n') {
|
|
+ shebang = Some(content[..idx].to_string());
|
|
+ content = &content[idx..];
|
|
+ } else {
|
|
+ shebang = Some(content.to_string());
|
|
+ content = "";
|
|
+ }
|
|
}
|
|
}
|
|
|
|
let mut file: File = parse_str(content)?;
|
|
file.shebang = shebang;
|
|
Ok(file)
|
|
}
|
|
diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
|
|
--- a/third_party/rust/syn/src/lifetime.rs
|
|
+++ b/third_party/rust/syn/src/lifetime.rs
|
|
@@ -13,20 +13,18 @@ use crate::lookahead;
|
|
///
|
|
/// - Must start with an apostrophe.
|
|
/// - Must not consist of just an apostrophe: `'`.
|
|
/// - Character after the apostrophe must be `_` or a Unicode code point with
|
|
/// the XID_Start property.
|
|
/// - All following characters must be Unicode code points with the XID_Continue
|
|
/// property.
|
|
///
|
|
-/// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
-#[cfg_attr(feature = "extra-traits", derive(Debug))]
|
|
-#[derive(Clone)]
|
|
pub struct Lifetime {
|
|
pub apostrophe: Span,
|
|
pub ident: Ident,
|
|
}
|
|
|
|
impl Lifetime {
|
|
/// # Panics
|
|
///
|
|
@@ -67,16 +65,25 @@ impl Lifetime {
|
|
|
|
impl Display for Lifetime {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
"'".fmt(formatter)?;
|
|
self.ident.fmt(formatter)
|
|
}
|
|
}
|
|
|
|
+impl Clone for Lifetime {
|
|
+ fn clone(&self) -> Self {
|
|
+ Lifetime {
|
|
+ apostrophe: self.apostrophe,
|
|
+ ident: self.ident.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
impl PartialEq for Lifetime {
|
|
fn eq(&self, other: &Lifetime) -> bool {
|
|
self.ident.eq(&other.ident)
|
|
}
|
|
}
|
|
|
|
impl Eq for Lifetime {}
|
|
|
|
diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
|
|
--- a/third_party/rust/syn/src/lit.rs
|
|
+++ b/third_party/rust/syn/src/lit.rs
|
|
@@ -17,28 +17,25 @@ use std::hash::{Hash, Hasher};
|
|
use crate::lookahead;
|
|
#[cfg(feature = "parsing")]
|
|
use crate::parse::{Parse, Parser};
|
|
use crate::{Error, Result};
|
|
|
|
ast_enum_of_structs! {
|
|
/// A Rust literal such as a string or integer or boolean.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
- /// feature.*
|
|
- ///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum Lit #manual_extra_traits {
|
|
+ pub enum Lit {
|
|
/// A UTF-8 string literal: `"foo"`.
|
|
Str(LitStr),
|
|
|
|
/// A byte string literal: `b"foo"`.
|
|
ByteStr(LitByteStr),
|
|
|
|
/// A byte literal: `b'f'`.
|
|
Byte(LitByte),
|
|
@@ -59,184 +56,98 @@ ast_enum_of_structs! {
|
|
|
|
/// A raw token literal not interpreted by Syn.
|
|
Verbatim(Literal),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A UTF-8 string literal: `"foo"`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitStr #manual_extra_traits_debug {
|
|
- repr: Box<LitStrRepr>,
|
|
+ pub struct LitStr {
|
|
+ repr: Box<LitRepr>,
|
|
}
|
|
}
|
|
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
-struct LitStrRepr {
|
|
- token: Literal,
|
|
- suffix: Box<str>,
|
|
-}
|
|
-
|
|
ast_struct! {
|
|
/// A byte string literal: `b"foo"`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitByteStr #manual_extra_traits_debug {
|
|
- token: Literal,
|
|
+ pub struct LitByteStr {
|
|
+ repr: Box<LitRepr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A byte literal: `b'f'`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitByte #manual_extra_traits_debug {
|
|
- token: Literal,
|
|
+ pub struct LitByte {
|
|
+ repr: Box<LitRepr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A character literal: `'a'`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitChar #manual_extra_traits_debug {
|
|
- token: Literal,
|
|
+ pub struct LitChar {
|
|
+ repr: Box<LitRepr>,
|
|
}
|
|
}
|
|
|
|
+struct LitRepr {
|
|
+ token: Literal,
|
|
+ suffix: Box<str>,
|
|
+}
|
|
+
|
|
ast_struct! {
|
|
/// An integer literal: `1` or `1u16`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitInt #manual_extra_traits_debug {
|
|
+ pub struct LitInt {
|
|
repr: Box<LitIntRepr>,
|
|
}
|
|
}
|
|
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
struct LitIntRepr {
|
|
token: Literal,
|
|
digits: Box<str>,
|
|
suffix: Box<str>,
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A floating point literal: `1f64` or `1.0e10f64`.
|
|
///
|
|
/// Must be finite. May not be infinte or NaN.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitFloat #manual_extra_traits_debug {
|
|
+ pub struct LitFloat {
|
|
repr: Box<LitFloatRepr>,
|
|
}
|
|
}
|
|
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
struct LitFloatRepr {
|
|
token: Literal,
|
|
digits: Box<str>,
|
|
suffix: Box<str>,
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A boolean literal: `true` or `false`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
- /// `"full"` feature.*
|
|
- pub struct LitBool #manual_extra_traits_debug {
|
|
+ pub struct LitBool {
|
|
pub value: bool,
|
|
pub span: Span,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Lit {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Lit {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (Lit::Str(this), Lit::Str(other)) => this == other,
|
|
- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
|
|
- (Lit::Byte(this), Lit::Byte(other)) => this == other,
|
|
- (Lit::Char(this), Lit::Char(other)) => this == other,
|
|
- (Lit::Int(this), Lit::Int(other)) => this == other,
|
|
- (Lit::Float(this), Lit::Float(other)) => this == other,
|
|
- (Lit::Bool(this), Lit::Bool(other)) => this == other,
|
|
- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Lit {
|
|
- fn hash<H>(&self, hash: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- Lit::Str(lit) => {
|
|
- hash.write_u8(0);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::ByteStr(lit) => {
|
|
- hash.write_u8(1);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::Byte(lit) => {
|
|
- hash.write_u8(2);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::Char(lit) => {
|
|
- hash.write_u8(3);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::Int(lit) => {
|
|
- hash.write_u8(4);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::Float(lit) => {
|
|
- hash.write_u8(5);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::Bool(lit) => {
|
|
- hash.write_u8(6);
|
|
- lit.hash(hash);
|
|
- }
|
|
- Lit::Verbatim(lit) => {
|
|
- hash.write_u8(7);
|
|
- lit.to_string().hash(hash);
|
|
- }
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
impl LitStr {
|
|
pub fn new(value: &str, span: Span) -> Self {
|
|
- let mut lit = Literal::string(value);
|
|
- lit.set_span(span);
|
|
+ let mut token = Literal::string(value);
|
|
+ token.set_span(span);
|
|
LitStr {
|
|
- repr: Box::new(LitStrRepr {
|
|
- token: lit,
|
|
+ repr: Box::new(LitRepr {
|
|
+ token,
|
|
suffix: Box::<str>::default(),
|
|
}),
|
|
}
|
|
}
|
|
|
|
pub fn value(&self) -> String {
|
|
- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
|
|
+ let repr = self.repr.token.to_string();
|
|
+ let (value, _suffix) = value::parse_lit_str(&repr);
|
|
String::from(value)
|
|
}
|
|
|
|
/// Parse a syntax tree node from the content of this string literal.
|
|
///
|
|
/// All spans in the syntax tree will point to the span of this `LitStr`.
|
|
///
|
|
/// # Example
|
|
@@ -306,17 +217,17 @@ impl LitStr {
|
|
.map(|token| respan_token_tree(token, span))
|
|
.collect()
|
|
}
|
|
|
|
// Token tree with every span replaced by the given one.
|
|
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
|
|
match &mut token {
|
|
TokenTree::Group(g) => {
|
|
- let stream = respan_token_stream(g.stream().clone(), span);
|
|
+ let stream = respan_token_stream(g.stream(), span);
|
|
*g = Group::new(g.delimiter(), stream);
|
|
g.set_span(span);
|
|
}
|
|
other => other.set_span(span),
|
|
}
|
|
token
|
|
}
|
|
|
|
@@ -340,86 +251,124 @@ impl LitStr {
|
|
&self.repr.suffix
|
|
}
|
|
}
|
|
|
|
impl LitByteStr {
|
|
pub fn new(value: &[u8], span: Span) -> Self {
|
|
let mut token = Literal::byte_string(value);
|
|
token.set_span(span);
|
|
- LitByteStr { token }
|
|
+ LitByteStr {
|
|
+ repr: Box::new(LitRepr {
|
|
+ token,
|
|
+ suffix: Box::<str>::default(),
|
|
+ }),
|
|
+ }
|
|
}
|
|
|
|
pub fn value(&self) -> Vec<u8> {
|
|
- value::parse_lit_byte_str(&self.token.to_string())
|
|
+ let repr = self.repr.token.to_string();
|
|
+ let (value, _suffix) = value::parse_lit_byte_str(&repr);
|
|
+ value
|
|
}
|
|
|
|
pub fn span(&self) -> Span {
|
|
- self.token.span()
|
|
+ self.repr.token.span()
|
|
}
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
- self.token.set_span(span)
|
|
+ self.repr.token.set_span(span)
|
|
+ }
|
|
+
|
|
+ pub fn suffix(&self) -> &str {
|
|
+ &self.repr.suffix
|
|
}
|
|
}
|
|
|
|
impl LitByte {
|
|
pub fn new(value: u8, span: Span) -> Self {
|
|
let mut token = Literal::u8_suffixed(value);
|
|
token.set_span(span);
|
|
- LitByte { token }
|
|
+ LitByte {
|
|
+ repr: Box::new(LitRepr {
|
|
+ token,
|
|
+ suffix: Box::<str>::default(),
|
|
+ }),
|
|
+ }
|
|
}
|
|
|
|
pub fn value(&self) -> u8 {
|
|
- value::parse_lit_byte(&self.token.to_string())
|
|
+ let repr = self.repr.token.to_string();
|
|
+ let (value, _suffix) = value::parse_lit_byte(&repr);
|
|
+ value
|
|
}
|
|
|
|
pub fn span(&self) -> Span {
|
|
- self.token.span()
|
|
+ self.repr.token.span()
|
|
}
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
- self.token.set_span(span)
|
|
+ self.repr.token.set_span(span)
|
|
+ }
|
|
+
|
|
+ pub fn suffix(&self) -> &str {
|
|
+ &self.repr.suffix
|
|
}
|
|
}
|
|
|
|
impl LitChar {
|
|
pub fn new(value: char, span: Span) -> Self {
|
|
let mut token = Literal::character(value);
|
|
token.set_span(span);
|
|
- LitChar { token }
|
|
+ LitChar {
|
|
+ repr: Box::new(LitRepr {
|
|
+ token,
|
|
+ suffix: Box::<str>::default(),
|
|
+ }),
|
|
+ }
|
|
}
|
|
|
|
pub fn value(&self) -> char {
|
|
- value::parse_lit_char(&self.token.to_string())
|
|
+ let repr = self.repr.token.to_string();
|
|
+ let (value, _suffix) = value::parse_lit_char(&repr);
|
|
+ value
|
|
}
|
|
|
|
pub fn span(&self) -> Span {
|
|
- self.token.span()
|
|
+ self.repr.token.span()
|
|
}
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
- self.token.set_span(span)
|
|
+ self.repr.token.set_span(span)
|
|
+ }
|
|
+
|
|
+ pub fn suffix(&self) -> &str {
|
|
+ &self.repr.suffix
|
|
}
|
|
}
|
|
|
|
impl LitInt {
|
|
pub fn new(repr: &str, span: Span) -> Self {
|
|
- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
|
|
- let mut token = value::to_literal(repr);
|
|
- token.set_span(span);
|
|
- LitInt {
|
|
- repr: Box::new(LitIntRepr {
|
|
- token,
|
|
- digits,
|
|
- suffix,
|
|
- }),
|
|
- }
|
|
- } else {
|
|
- panic!("Not an integer literal: `{}`", repr);
|
|
+ let (digits, suffix) = match value::parse_lit_int(repr) {
|
|
+ Some(parse) => parse,
|
|
+ None => panic!("Not an integer literal: `{}`", repr),
|
|
+ };
|
|
+
|
|
+ let mut token = match value::to_literal(repr, &digits, &suffix) {
|
|
+ Some(token) => token,
|
|
+ None => panic!("Unsupported integer literal: `{}`", repr),
|
|
+ };
|
|
+
|
|
+ token.set_span(span);
|
|
+ LitInt {
|
|
+ repr: Box::new(LitIntRepr {
|
|
+ token,
|
|
+ digits,
|
|
+ suffix,
|
|
+ }),
|
|
}
|
|
}
|
|
|
|
pub fn base10_digits(&self) -> &str {
|
|
&self.repr.digits
|
|
}
|
|
|
|
/// Parses the literal into a selected number type.
|
|
@@ -487,28 +436,33 @@ impl From<Literal> for LitInt {
|
|
impl Display for LitInt {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
self.repr.token.fmt(formatter)
|
|
}
|
|
}
|
|
|
|
impl LitFloat {
|
|
pub fn new(repr: &str, span: Span) -> Self {
|
|
- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
|
|
- let mut token = value::to_literal(repr);
|
|
- token.set_span(span);
|
|
- LitFloat {
|
|
- repr: Box::new(LitFloatRepr {
|
|
- token,
|
|
- digits,
|
|
- suffix,
|
|
- }),
|
|
- }
|
|
- } else {
|
|
- panic!("Not a float literal: `{}`", repr);
|
|
+ let (digits, suffix) = match value::parse_lit_float(repr) {
|
|
+ Some(parse) => parse,
|
|
+ None => panic!("Not a float literal: `{}`", repr),
|
|
+ };
|
|
+
|
|
+ let mut token = match value::to_literal(repr, &digits, &suffix) {
|
|
+ Some(token) => token,
|
|
+ None => panic!("Unsupported float literal: `{}`", repr),
|
|
+ };
|
|
+
|
|
+ token.set_span(span);
|
|
+ LitFloat {
|
|
+ repr: Box::new(LitFloatRepr {
|
|
+ token,
|
|
+ digits,
|
|
+ suffix,
|
|
+ }),
|
|
}
|
|
}
|
|
|
|
pub fn base10_digits(&self) -> &str {
|
|
&self.repr.digits
|
|
}
|
|
|
|
pub fn base10_parse<N>(&self) -> Result<N>
|
|
@@ -570,35 +524,35 @@ mod debug_impls {
|
|
.finish()
|
|
}
|
|
}
|
|
|
|
impl Debug for LitByteStr {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
formatter
|
|
.debug_struct("LitByteStr")
|
|
- .field("token", &format_args!("{}", self.token))
|
|
+ .field("token", &format_args!("{}", self.repr.token))
|
|
.finish()
|
|
}
|
|
}
|
|
|
|
impl Debug for LitByte {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
formatter
|
|
.debug_struct("LitByte")
|
|
- .field("token", &format_args!("{}", self.token))
|
|
+ .field("token", &format_args!("{}", self.repr.token))
|
|
.finish()
|
|
}
|
|
}
|
|
|
|
impl Debug for LitChar {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
formatter
|
|
.debug_struct("LitChar")
|
|
- .field("token", &format_args!("{}", self.token))
|
|
+ .field("token", &format_args!("{}", self.repr.token))
|
|
.finish()
|
|
}
|
|
}
|
|
|
|
impl Debug for LitInt {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
formatter
|
|
.debug_struct("LitInt")
|
|
@@ -621,61 +575,102 @@ mod debug_impls {
|
|
formatter
|
|
.debug_struct("LitBool")
|
|
.field("value", &self.value)
|
|
.finish()
|
|
}
|
|
}
|
|
}
|
|
|
|
+#[cfg(feature = "clone-impls")]
|
|
+impl Clone for LitRepr {
|
|
+ fn clone(&self) -> Self {
|
|
+ LitRepr {
|
|
+ token: self.token.clone(),
|
|
+ suffix: self.suffix.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "clone-impls")]
|
|
+impl Clone for LitIntRepr {
|
|
+ fn clone(&self) -> Self {
|
|
+ LitIntRepr {
|
|
+ token: self.token.clone(),
|
|
+ digits: self.digits.clone(),
|
|
+ suffix: self.suffix.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "clone-impls")]
|
|
+impl Clone for LitFloatRepr {
|
|
+ fn clone(&self) -> Self {
|
|
+ LitFloatRepr {
|
|
+ token: self.token.clone(),
|
|
+ digits: self.digits.clone(),
|
|
+ suffix: self.suffix.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
macro_rules! lit_extra_traits {
|
|
- ($ty:ident, $($field:ident).+) => {
|
|
- #[cfg(feature = "extra-traits")]
|
|
- impl Eq for $ty {}
|
|
+ ($ty:ident) => {
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Clone for $ty {
|
|
+ fn clone(&self) -> Self {
|
|
+ $ty {
|
|
+ repr: self.repr.clone(),
|
|
+ }
|
|
+ }
|
|
+ }
|
|
|
|
#[cfg(feature = "extra-traits")]
|
|
impl PartialEq for $ty {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
- self.$($field).+.to_string() == other.$($field).+.to_string()
|
|
+ self.repr.token.to_string() == other.repr.token.to_string()
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "extra-traits")]
|
|
impl Hash for $ty {
|
|
fn hash<H>(&self, state: &mut H)
|
|
where
|
|
H: Hasher,
|
|
{
|
|
- self.$($field).+.to_string().hash(state);
|
|
+ self.repr.token.to_string().hash(state);
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
#[doc(hidden)]
|
|
#[allow(non_snake_case)]
|
|
pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
|
|
match marker {}
|
|
}
|
|
};
|
|
}
|
|
|
|
-lit_extra_traits!(LitStr, repr.token);
|
|
-lit_extra_traits!(LitByteStr, token);
|
|
-lit_extra_traits!(LitByte, token);
|
|
-lit_extra_traits!(LitChar, token);
|
|
-lit_extra_traits!(LitInt, repr.token);
|
|
-lit_extra_traits!(LitFloat, repr.token);
|
|
-lit_extra_traits!(LitBool, value);
|
|
+lit_extra_traits!(LitStr);
|
|
+lit_extra_traits!(LitByteStr);
|
|
+lit_extra_traits!(LitByte);
|
|
+lit_extra_traits!(LitChar);
|
|
+lit_extra_traits!(LitInt);
|
|
+lit_extra_traits!(LitFloat);
|
|
+
|
|
+#[cfg(feature = "parsing")]
|
|
+#[doc(hidden)]
|
|
+#[allow(non_snake_case)]
|
|
+pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
|
|
+ match marker {}
|
|
+}
|
|
|
|
ast_enum! {
|
|
/// The style of a string literal, either plain quoted or a raw string like
|
|
/// `r##"data"##`.
|
|
- ///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
- /// feature.*
|
|
pub enum StrStyle #no_visit {
|
|
/// An ordinary string like `"data"`.
|
|
Cooked,
|
|
/// A raw string like `r##"data"##`.
|
|
///
|
|
/// The unsigned integer is the number of `#` symbols used.
|
|
Raw(usize),
|
|
}
|
|
@@ -686,43 +681,93 @@ ast_enum! {
|
|
#[allow(non_snake_case)]
|
|
pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
|
|
match marker {}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parsing {
|
|
use super::*;
|
|
+ use crate::buffer::Cursor;
|
|
use crate::parse::{Parse, ParseStream, Result};
|
|
+ use proc_macro2::Punct;
|
|
|
|
impl Parse for Lit {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
input.step(|cursor| {
|
|
if let Some((lit, rest)) = cursor.literal() {
|
|
return Ok((Lit::new(lit), rest));
|
|
}
|
|
- while let Some((ident, rest)) = cursor.ident() {
|
|
- let value = if ident == "true" {
|
|
- true
|
|
- } else if ident == "false" {
|
|
- false
|
|
- } else {
|
|
- break;
|
|
- };
|
|
- let lit_bool = LitBool {
|
|
- value,
|
|
- span: ident.span(),
|
|
- };
|
|
- return Ok((Lit::Bool(lit_bool), rest));
|
|
+
|
|
+ if let Some((ident, rest)) = cursor.ident() {
|
|
+ let value = ident == "true";
|
|
+ if value || ident == "false" {
|
|
+ let lit_bool = LitBool {
|
|
+ value,
|
|
+ span: ident.span(),
|
|
+ };
|
|
+ return Ok((Lit::Bool(lit_bool), rest));
|
|
+ }
|
|
}
|
|
+
|
|
+ if let Some((punct, rest)) = cursor.punct() {
|
|
+ if punct.as_char() == '-' {
|
|
+ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
|
|
+ return Ok((lit, rest));
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+
|
|
Err(cursor.error("expected literal"))
|
|
})
|
|
}
|
|
}
|
|
|
|
+ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
|
|
+ let (lit, rest) = cursor.literal()?;
|
|
+
|
|
+ let mut span = neg.span();
|
|
+ span = span.join(lit.span()).unwrap_or(span);
|
|
+
|
|
+ let mut repr = lit.to_string();
|
|
+ repr.insert(0, '-');
|
|
+
|
|
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
|
|
+ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
|
|
+ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
|
|
+ token.set_span(span);
|
|
+ return Some((
|
|
+ Lit::Int(LitInt {
|
|
+ repr: Box::new(LitIntRepr {
|
|
+ token,
|
|
+ digits,
|
|
+ suffix,
|
|
+ }),
|
|
+ }),
|
|
+ rest,
|
|
+ ));
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+
|
|
+ let (digits, suffix) = value::parse_lit_float(&repr)?;
|
|
+ let mut token = value::to_literal(&repr, &digits, &suffix)?;
|
|
+ token.set_span(span);
|
|
+ Some((
|
|
+ Lit::Float(LitFloat {
|
|
+ repr: Box::new(LitFloatRepr {
|
|
+ token,
|
|
+ digits,
|
|
+ suffix,
|
|
+ }),
|
|
+ }),
|
|
+ rest,
|
|
+ ))
|
|
+ }
|
|
+
|
|
impl Parse for LitStr {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let head = input.fork();
|
|
match input.parse()? {
|
|
Lit::Str(lit) => Ok(lit),
|
|
_ => Err(head.error("expected string literal")),
|
|
}
|
|
}
|
|
@@ -798,29 +843,29 @@ mod printing {
|
|
impl ToTokens for LitStr {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
self.repr.token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for LitByteStr {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
- self.token.to_tokens(tokens);
|
|
+ self.repr.token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for LitByte {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
- self.token.to_tokens(tokens);
|
|
+ self.repr.token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for LitChar {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
- self.token.to_tokens(tokens);
|
|
+ self.repr.token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for LitInt {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
self.repr.token.to_tokens(tokens);
|
|
}
|
|
}
|
|
@@ -850,30 +895,39 @@ mod value {
|
|
/// Interpret a Syn literal from a proc-macro2 literal.
|
|
pub fn new(token: Literal) -> Self {
|
|
let repr = token.to_string();
|
|
|
|
match byte(&repr, 0) {
|
|
b'"' | b'r' => {
|
|
let (_, suffix) = parse_lit_str(&repr);
|
|
return Lit::Str(LitStr {
|
|
- repr: Box::new(LitStrRepr { token, suffix }),
|
|
+ repr: Box::new(LitRepr { token, suffix }),
|
|
});
|
|
}
|
|
b'b' => match byte(&repr, 1) {
|
|
b'"' | b'r' => {
|
|
- return Lit::ByteStr(LitByteStr { token });
|
|
+ let (_, suffix) = parse_lit_byte_str(&repr);
|
|
+ return Lit::ByteStr(LitByteStr {
|
|
+ repr: Box::new(LitRepr { token, suffix }),
|
|
+ });
|
|
}
|
|
b'\'' => {
|
|
- return Lit::Byte(LitByte { token });
|
|
+ let (_, suffix) = parse_lit_byte(&repr);
|
|
+ return Lit::Byte(LitByte {
|
|
+ repr: Box::new(LitRepr { token, suffix }),
|
|
+ });
|
|
}
|
|
_ => {}
|
|
},
|
|
b'\'' => {
|
|
- return Lit::Char(LitChar { token });
|
|
+ let (_, suffix) = parse_lit_char(&repr);
|
|
+ return Lit::Char(LitChar {
|
|
+ repr: Box::new(LitRepr { token, suffix }),
|
|
+ });
|
|
}
|
|
b'0'..=b'9' | b'-' => {
|
|
if !(repr.ends_with("f32") || repr.ends_with("f64")) {
|
|
if let Some((digits, suffix)) = parse_lit_int(&repr) {
|
|
return Lit::Int(LitInt {
|
|
repr: Box::new(LitIntRepr {
|
|
token,
|
|
digits,
|
|
@@ -900,16 +954,54 @@ mod value {
|
|
});
|
|
}
|
|
}
|
|
_ => {}
|
|
}
|
|
|
|
panic!("Unrecognized literal: `{}`", repr);
|
|
}
|
|
+
|
|
+ pub fn suffix(&self) -> &str {
|
|
+ match self {
|
|
+ Lit::Str(lit) => lit.suffix(),
|
|
+ Lit::ByteStr(lit) => lit.suffix(),
|
|
+ Lit::Byte(lit) => lit.suffix(),
|
|
+ Lit::Char(lit) => lit.suffix(),
|
|
+ Lit::Int(lit) => lit.suffix(),
|
|
+ Lit::Float(lit) => lit.suffix(),
|
|
+ Lit::Bool(_) | Lit::Verbatim(_) => "",
|
|
+ }
|
|
+ }
|
|
+
|
|
+ pub fn span(&self) -> Span {
|
|
+ match self {
|
|
+ Lit::Str(lit) => lit.span(),
|
|
+ Lit::ByteStr(lit) => lit.span(),
|
|
+ Lit::Byte(lit) => lit.span(),
|
|
+ Lit::Char(lit) => lit.span(),
|
|
+ Lit::Int(lit) => lit.span(),
|
|
+ Lit::Float(lit) => lit.span(),
|
|
+ Lit::Bool(lit) => lit.span,
|
|
+ Lit::Verbatim(lit) => lit.span(),
|
|
+ }
|
|
+ }
|
|
+
|
|
+ pub fn set_span(&mut self, span: Span) {
|
|
+ match self {
|
|
+ Lit::Str(lit) => lit.set_span(span),
|
|
+ Lit::ByteStr(lit) => lit.set_span(span),
|
|
+ Lit::Byte(lit) => lit.set_span(span),
|
|
+ Lit::Char(lit) => lit.set_span(span),
|
|
+ Lit::Int(lit) => lit.set_span(span),
|
|
+ Lit::Float(lit) => lit.set_span(span),
|
|
+ Lit::Bool(lit) => lit.span = span,
|
|
+ Lit::Verbatim(lit) => lit.set_span(span),
|
|
+ }
|
|
+ }
|
|
}
|
|
|
|
/// Get the byte at offset idx, or a default of `b'\0'` if we're looking
|
|
/// past the end of the input buffer.
|
|
pub fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 {
|
|
let s = s.as_ref();
|
|
if idx < s.len() {
|
|
s[idx]
|
|
@@ -999,140 +1091,144 @@ mod value {
|
|
assert_eq!(byte(s, 0), b'r');
|
|
s = &s[1..];
|
|
|
|
let mut pounds = 0;
|
|
while byte(s, pounds) == b'#' {
|
|
pounds += 1;
|
|
}
|
|
assert_eq!(byte(s, pounds), b'"');
|
|
- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
|
|
- for end in s[s.len() - pounds..].bytes() {
|
|
+ let close = s.rfind('"').unwrap();
|
|
+ for end in s[close + 1..close + 1 + pounds].bytes() {
|
|
assert_eq!(end, b'#');
|
|
}
|
|
|
|
- let content = s[pounds + 1..s.len() - pounds - 1]
|
|
- .to_owned()
|
|
- .into_boxed_str();
|
|
- let suffix = Box::<str>::default(); // todo
|
|
+ let content = s[pounds + 1..close].to_owned().into_boxed_str();
|
|
+ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
|
|
(content, suffix)
|
|
}
|
|
|
|
- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
|
|
+ // Returns (content, suffix).
|
|
+ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
|
|
assert_eq!(byte(s, 0), b'b');
|
|
match byte(s, 1) {
|
|
b'"' => parse_lit_byte_str_cooked(s),
|
|
b'r' => parse_lit_byte_str_raw(s),
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
|
|
// Clippy false positive
|
|
// https://github.com/rust-lang-nursery/rust-clippy/issues/2329
|
|
#[allow(clippy::needless_continue)]
|
|
- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
|
|
+ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
|
|
assert_eq!(byte(s, 0), b'b');
|
|
assert_eq!(byte(s, 1), b'"');
|
|
s = &s[2..];
|
|
|
|
// We're going to want to have slices which don't respect codepoint boundaries.
|
|
- let mut s = s.as_bytes();
|
|
+ let mut v = s.as_bytes();
|
|
|
|
let mut out = Vec::new();
|
|
'outer: loop {
|
|
- let byte = match byte(s, 0) {
|
|
+ let byte = match byte(v, 0) {
|
|
b'"' => break,
|
|
b'\\' => {
|
|
- let b = byte(s, 1);
|
|
- s = &s[2..];
|
|
+ let b = byte(v, 1);
|
|
+ v = &v[2..];
|
|
match b {
|
|
b'x' => {
|
|
- let (b, rest) = backslash_x(s);
|
|
- s = rest;
|
|
+ let (b, rest) = backslash_x(v);
|
|
+ v = rest;
|
|
b
|
|
}
|
|
b'n' => b'\n',
|
|
b'r' => b'\r',
|
|
b't' => b'\t',
|
|
b'\\' => b'\\',
|
|
b'0' => b'\0',
|
|
b'\'' => b'\'',
|
|
b'"' => b'"',
|
|
b'\r' | b'\n' => loop {
|
|
- let byte = byte(s, 0);
|
|
+ let byte = byte(v, 0);
|
|
let ch = char::from_u32(u32::from(byte)).unwrap();
|
|
if ch.is_whitespace() {
|
|
- s = &s[1..];
|
|
+ v = &v[1..];
|
|
} else {
|
|
continue 'outer;
|
|
}
|
|
},
|
|
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
|
|
}
|
|
}
|
|
b'\r' => {
|
|
- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
|
|
- s = &s[2..];
|
|
+ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
|
|
+ v = &v[2..];
|
|
b'\n'
|
|
}
|
|
b => {
|
|
- s = &s[1..];
|
|
+ v = &v[1..];
|
|
b
|
|
}
|
|
};
|
|
out.push(byte);
|
|
}
|
|
|
|
- assert_eq!(s, b"\"");
|
|
- out
|
|
+ assert_eq!(byte(v, 0), b'"');
|
|
+ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
|
|
+ (out, suffix)
|
|
}
|
|
|
|
- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
|
|
+ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
|
|
assert_eq!(byte(s, 0), b'b');
|
|
- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
|
|
+ let (value, suffix) = parse_lit_str_raw(&s[1..]);
|
|
+ (String::from(value).into_bytes(), suffix)
|
|
}
|
|
|
|
- pub fn parse_lit_byte(s: &str) -> u8 {
|
|
+ // Returns (value, suffix).
|
|
+ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
|
|
assert_eq!(byte(s, 0), b'b');
|
|
assert_eq!(byte(s, 1), b'\'');
|
|
|
|
// We're going to want to have slices which don't respect codepoint boundaries.
|
|
- let mut s = s[2..].as_bytes();
|
|
+ let mut v = s[2..].as_bytes();
|
|
|
|
- let b = match byte(s, 0) {
|
|
+ let b = match byte(v, 0) {
|
|
b'\\' => {
|
|
- let b = byte(s, 1);
|
|
- s = &s[2..];
|
|
+ let b = byte(v, 1);
|
|
+ v = &v[2..];
|
|
match b {
|
|
b'x' => {
|
|
- let (b, rest) = backslash_x(s);
|
|
- s = rest;
|
|
+ let (b, rest) = backslash_x(v);
|
|
+ v = rest;
|
|
b
|
|
}
|
|
b'n' => b'\n',
|
|
b'r' => b'\r',
|
|
b't' => b'\t',
|
|
b'\\' => b'\\',
|
|
b'0' => b'\0',
|
|
b'\'' => b'\'',
|
|
b'"' => b'"',
|
|
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
|
|
}
|
|
}
|
|
b => {
|
|
- s = &s[1..];
|
|
+ v = &v[1..];
|
|
b
|
|
}
|
|
};
|
|
|
|
- assert_eq!(byte(s, 0), b'\'');
|
|
- b
|
|
+ assert_eq!(byte(v, 0), b'\'');
|
|
+ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
|
|
+ (b, suffix)
|
|
}
|
|
|
|
- pub fn parse_lit_char(mut s: &str) -> char {
|
|
+ // Returns (value, suffix).
|
|
+ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
|
|
assert_eq!(byte(s, 0), b'\'');
|
|
s = &s[1..];
|
|
|
|
let ch = match byte(s, 0) {
|
|
b'\\' => {
|
|
let b = byte(s, 1);
|
|
s = &s[2..];
|
|
match b {
|
|
@@ -1158,18 +1254,19 @@ mod value {
|
|
}
|
|
}
|
|
_ => {
|
|
let ch = next_chr(s);
|
|
s = &s[ch.len_utf8()..];
|
|
ch
|
|
}
|
|
};
|
|
- assert_eq!(s, "\'", "Expected end of char literal");
|
|
- ch
|
|
+ assert_eq!(byte(s, 0), b'\'');
|
|
+ let suffix = s[1..].to_owned().into_boxed_str();
|
|
+ (ch, suffix)
|
|
}
|
|
|
|
fn backslash_x<S>(s: &S) -> (u8, &S)
|
|
where
|
|
S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
|
|
{
|
|
let mut ch = 0;
|
|
let b0 = byte(s, 0);
|
|
@@ -1329,17 +1426,21 @@ mod value {
|
|
if has_e || has_dot {
|
|
return None;
|
|
}
|
|
has_dot = true;
|
|
bytes[write] = b'.';
|
|
}
|
|
b'e' | b'E' => {
|
|
if has_e {
|
|
- return None;
|
|
+ if has_exponent {
|
|
+ break;
|
|
+ } else {
|
|
+ return None;
|
|
+ }
|
|
}
|
|
has_e = true;
|
|
bytes[write] = b'e';
|
|
}
|
|
b'-' | b'+' => {
|
|
if has_sign || has_exponent || !has_e {
|
|
return None;
|
|
}
|
|
@@ -1367,16 +1468,38 @@ mod value {
|
|
digits.truncate(write);
|
|
if suffix.is_empty() || crate::ident::xid_ok(&suffix) {
|
|
Some((digits.into_boxed_str(), suffix.into_boxed_str()))
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
|
|
- pub fn to_literal(s: &str) -> Literal {
|
|
- let stream = s.parse::<TokenStream>().unwrap();
|
|
- match stream.into_iter().next().unwrap() {
|
|
- TokenTree::Literal(l) => l,
|
|
- _ => unreachable!(),
|
|
+ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
|
|
+ if repr.starts_with('-') {
|
|
+ if suffix == "f64" {
|
|
+ digits.parse().ok().map(Literal::f64_suffixed)
|
|
+ } else if suffix == "f32" {
|
|
+ digits.parse().ok().map(Literal::f32_suffixed)
|
|
+ } else if suffix == "i64" {
|
|
+ digits.parse().ok().map(Literal::i64_suffixed)
|
|
+ } else if suffix == "i32" {
|
|
+ digits.parse().ok().map(Literal::i32_suffixed)
|
|
+ } else if suffix == "i16" {
|
|
+ digits.parse().ok().map(Literal::i16_suffixed)
|
|
+ } else if suffix == "i8" {
|
|
+ digits.parse().ok().map(Literal::i8_suffixed)
|
|
+ } else if !suffix.is_empty() {
|
|
+ None
|
|
+ } else if digits.contains('.') {
|
|
+ digits.parse().ok().map(Literal::f64_unsuffixed)
|
|
+ } else {
|
|
+ digits.parse().ok().map(Literal::i64_unsuffixed)
|
|
+ }
|
|
+ } else {
|
|
+ let stream = repr.parse::<TokenStream>().unwrap();
|
|
+ match stream.into_iter().next().unwrap() {
|
|
+ TokenTree::Literal(l) => Some(l),
|
|
+ _ => unreachable!(),
|
|
+ }
|
|
}
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
|
|
--- a/third_party/rust/syn/src/mac.rs
|
|
+++ b/third_party/rust/syn/src/mac.rs
|
|
@@ -1,79 +1,56 @@
|
|
use super::*;
|
|
use crate::token::{Brace, Bracket, Paren};
|
|
use proc_macro2::TokenStream;
|
|
#[cfg(feature = "parsing")]
|
|
-use proc_macro2::{Delimiter, Span, TokenTree};
|
|
+use proc_macro2::{Delimiter, Group, Span, TokenTree};
|
|
|
|
#[cfg(feature = "parsing")]
|
|
use crate::parse::{Parse, ParseStream, Parser, Result};
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use crate::tt::TokenStreamHelper;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use std::hash::{Hash, Hasher};
|
|
|
|
ast_struct! {
|
|
/// A macro invocation: `println!("{}", mac)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- pub struct Macro #manual_extra_traits {
|
|
+ pub struct Macro {
|
|
pub path: Path,
|
|
pub bang_token: Token![!],
|
|
pub delimiter: MacroDelimiter,
|
|
pub tokens: TokenStream,
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub enum MacroDelimiter {
|
|
Paren(Paren),
|
|
Brace(Brace),
|
|
Bracket(Bracket),
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Macro {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Macro {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- self.path == other.path
|
|
- && self.bang_token == other.bang_token
|
|
- && self.delimiter == other.delimiter
|
|
- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Macro {
|
|
- fn hash<H>(&self, state: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- self.path.hash(state);
|
|
- self.bang_token.hash(state);
|
|
- self.delimiter.hash(state);
|
|
- TokenStreamHelper(&self.tokens).hash(state);
|
|
- }
|
|
-}
|
|
-
|
|
#[cfg(feature = "parsing")]
|
|
-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
|
|
- match delimiter {
|
|
+fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
|
|
+ let delimiter = match macro_delimiter {
|
|
+ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
|
|
+ MacroDelimiter::Brace(_) => Delimiter::Brace,
|
|
+ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
|
|
+ };
|
|
+ let mut group = Group::new(delimiter, TokenStream::new());
|
|
+ group.set_span(match macro_delimiter {
|
|
MacroDelimiter::Paren(token) => token.span,
|
|
MacroDelimiter::Brace(token) => token.span,
|
|
MacroDelimiter::Bracket(token) => token.span,
|
|
- }
|
|
+ });
|
|
+ group.span_close()
|
|
}
|
|
|
|
impl Macro {
|
|
/// Parse the tokens within the macro invocation's delimiters into a syntax
|
|
/// tree.
|
|
///
|
|
/// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it
|
|
/// produces a more useful span when `tokens` is empty.
|
|
@@ -158,19 +135,17 @@ impl Macro {
|
|
pub fn parse_body<T: Parse>(&self) -> Result<T> {
|
|
self.parse_body_with(T::parse)
|
|
}
|
|
|
|
/// Parse the tokens within the macro invocation's delimiters using the
|
|
/// given parser.
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
|
- // TODO: see if we can get a group.span_close() span in here as the
|
|
- // scope, rather than the span of the whole group.
|
|
- let scope = delimiter_span(&self.delimiter);
|
|
+ let scope = delimiter_span_close(&self.delimiter);
|
|
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
|
|
input.step(|cursor| {
|
|
if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
|
|
diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
|
|
--- a/third_party/rust/syn/src/macros.rs
|
|
+++ b/third_party/rust/syn/src/macros.rs
|
|
@@ -1,56 +1,33 @@
|
|
macro_rules! ast_struct {
|
|
(
|
|
[$($attrs_pub:tt)*]
|
|
struct $name:ident #full $($rest:tt)*
|
|
) => {
|
|
#[cfg(feature = "full")]
|
|
- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
$($attrs_pub)* struct $name $($rest)*
|
|
|
|
#[cfg(not(feature = "full"))]
|
|
- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
$($attrs_pub)* struct $name {
|
|
- _noconstruct: (),
|
|
+ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
|
|
}
|
|
|
|
#[cfg(all(not(feature = "full"), feature = "printing"))]
|
|
impl ::quote::ToTokens for $name {
|
|
fn to_tokens(&self, _: &mut ::proc_macro2::TokenStream) {
|
|
unreachable!()
|
|
}
|
|
}
|
|
};
|
|
|
|
(
|
|
[$($attrs_pub:tt)*]
|
|
- struct $name:ident #manual_extra_traits $($rest:tt)*
|
|
- ) => {
|
|
- #[cfg_attr(feature = "extra-traits", derive(Debug))]
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
- $($attrs_pub)* struct $name $($rest)*
|
|
- };
|
|
-
|
|
- (
|
|
- [$($attrs_pub:tt)*]
|
|
- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
|
|
- ) => {
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
- $($attrs_pub)* struct $name $($rest)*
|
|
- };
|
|
-
|
|
- (
|
|
- [$($attrs_pub:tt)*]
|
|
struct $name:ident $($rest:tt)*
|
|
) => {
|
|
- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
$($attrs_pub)* struct $name $($rest)*
|
|
};
|
|
|
|
($($t:tt)*) => {
|
|
strip_attrs_pub!(ast_struct!($($t)*));
|
|
};
|
|
}
|
|
|
|
@@ -60,29 +37,18 @@ macro_rules! ast_enum {
|
|
[$($attrs_pub:tt)*]
|
|
enum $name:ident #no_visit $($rest:tt)*
|
|
) => (
|
|
ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
|
|
);
|
|
|
|
(
|
|
[$($attrs_pub:tt)*]
|
|
- enum $name:ident #manual_extra_traits $($rest:tt)*
|
|
- ) => (
|
|
- #[cfg_attr(feature = "extra-traits", derive(Debug))]
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
- $($attrs_pub)* enum $name $($rest)*
|
|
- );
|
|
-
|
|
- (
|
|
- [$($attrs_pub:tt)*]
|
|
enum $name:ident $($rest:tt)*
|
|
) => (
|
|
- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
|
|
- #[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
$($attrs_pub)* enum $name $($rest)*
|
|
);
|
|
|
|
($($t:tt)*) => {
|
|
strip_attrs_pub!(ast_enum!($($t)*));
|
|
};
|
|
}
|
|
|
|
@@ -115,36 +81,43 @@ macro_rules! ast_enum_of_structs_impl {
|
|
)*
|
|
}
|
|
|
|
$($remaining:tt)*
|
|
) => {
|
|
check_keyword_matches!(pub $pub);
|
|
check_keyword_matches!(enum $enum);
|
|
|
|
- $(
|
|
- $(
|
|
- impl From<$member> for $name {
|
|
- fn from(e: $member) -> $name {
|
|
- $name::$variant(e)
|
|
- }
|
|
- }
|
|
- )*
|
|
- )*
|
|
+ $($(
|
|
+ ast_enum_from_struct!($name::$variant, $member);
|
|
+ )*)*
|
|
|
|
#[cfg(feature = "printing")]
|
|
generate_to_tokens! {
|
|
$($remaining)*
|
|
()
|
|
tokens
|
|
$name { $($variant $($member)*,)* }
|
|
}
|
|
};
|
|
}
|
|
|
|
+macro_rules! ast_enum_from_struct {
|
|
+ // No From<TokenStream> for verbatim variants.
|
|
+ ($name:ident::Verbatim, $member:ident) => {};
|
|
+
|
|
+ ($name:ident::$variant:ident, $member:ident) => {
|
|
+ impl From<$member> for $name {
|
|
+ fn from(e: $member) -> $name {
|
|
+ $name::$variant(e)
|
|
+ }
|
|
+ }
|
|
+ };
|
|
+}
|
|
+
|
|
#[cfg(feature = "printing")]
|
|
macro_rules! generate_to_tokens {
|
|
(do_not_generate_to_tokens $($foo:tt)*) => ();
|
|
|
|
(($($arms:tt)*) $tokens:ident $name:ident { $variant:ident, $($next:tt)*}) => {
|
|
generate_to_tokens!(
|
|
($($arms)* $name::$variant => {})
|
|
$tokens $name { $($next)* }
|
|
diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
|
|
--- a/third_party/rust/syn/src/op.rs
|
|
+++ b/third_party/rust/syn/src/op.rs
|
|
@@ -1,14 +1,13 @@
|
|
ast_enum! {
|
|
/// A binary operator: `+`, `+=`, `&`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy))]
|
|
pub enum BinOp {
|
|
/// The `+` operator (addition)
|
|
Add(Token![+]),
|
|
/// The `-` operator (subtraction)
|
|
Sub(Token![-]),
|
|
/// The `*` operator (multiplication)
|
|
Mul(Token![*]),
|
|
/// The `/` operator (division)
|
|
@@ -62,19 +61,18 @@ ast_enum! {
|
|
/// The `>>=` operator
|
|
ShrEq(Token![>>=]),
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// A unary operator: `*`, `!`, `-`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy))]
|
|
pub enum UnOp {
|
|
/// The `*` operator for dereferencing
|
|
Deref(Token![*]),
|
|
/// The `!` operator for logical inversion
|
|
Not(Token![!]),
|
|
/// The `-` operator for negation
|
|
Neg(Token![-]),
|
|
}
|
|
diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
|
|
--- a/third_party/rust/syn/src/parse.rs
|
|
+++ b/third_party/rust/syn/src/parse.rs
|
|
@@ -21,18 +21,18 @@
|
|
//! procedural macro through [`parse_macro_input!`] as shown at the bottom of
|
|
//! the snippet. If the caller provides syntactically invalid input to the
|
|
//! procedural macro, they will receive a helpful compiler error message
|
|
//! pointing out the exact token that triggered the failure to parse.
|
|
//!
|
|
//! [`parse_macro_input!`]: ../macro.parse_macro_input.html
|
|
//!
|
|
//! ```
|
|
-//! extern crate proc_macro;
|
|
-//!
|
|
+//! # extern crate proc_macro;
|
|
+//! #
|
|
//! use proc_macro::TokenStream;
|
|
//! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
|
|
//! use syn::parse::{Parse, ParseStream};
|
|
//! use syn::punctuated::Punctuated;
|
|
//!
|
|
//! enum Item {
|
|
//! Struct(ItemStruct),
|
|
//! Enum(ItemEnum),
|
|
@@ -104,19 +104,17 @@
|
|
//! ```
|
|
//! use syn::Type;
|
|
//!
|
|
//! # fn run_parser() -> syn::Result<()> {
|
|
//! let t: Type = syn::parse_str("std::collections::HashMap<String, Value>")?;
|
|
//! # Ok(())
|
|
//! # }
|
|
//! #
|
|
-//! # fn main() {
|
|
-//! # run_parser().unwrap();
|
|
-//! # }
|
|
+//! # run_parser().unwrap();
|
|
//! ```
|
|
//!
|
|
//! The [`parse_quote!`] macro also uses this approach.
|
|
//!
|
|
//! [`parse_quote!`]: ../macro.parse_quote.html
|
|
//!
|
|
//! # The `Parser` trait
|
|
//!
|
|
@@ -150,18 +148,18 @@
|
|
//!
|
|
//! In these cases the types provide a choice of parser functions rather than a
|
|
//! single `Parse` implementation, and those parser functions can be invoked
|
|
//! through the [`Parser`] trait.
|
|
//!
|
|
//! [`Parser`]: trait.Parser.html
|
|
//!
|
|
//! ```
|
|
-//! extern crate proc_macro;
|
|
-//!
|
|
+//! # extern crate proc_macro;
|
|
+//! #
|
|
//! use proc_macro::TokenStream;
|
|
//! use syn::parse::Parser;
|
|
//! use syn::punctuated::Punctuated;
|
|
//! use syn::{Attribute, Expr, PathSegment, Result, Token};
|
|
//!
|
|
//! fn call_some_parser_methods(input: TokenStream) -> Result<()> {
|
|
//! // Parse a nonempty sequence of path segments separated by `::` punctuation
|
|
//! // with no trailing punctuation.
|
|
@@ -181,17 +179,17 @@
|
|
//! let _attrs = parser.parse(tokens)?;
|
|
//!
|
|
//! Ok(())
|
|
//! }
|
|
//! ```
|
|
//!
|
|
//! ---
|
|
//!
|
|
-//! *This module is available if Syn is built with the `"parsing"` feature.*
|
|
+//! *This module is available only if Syn is built with the `"parsing"` feature.*
|
|
|
|
#[path = "discouraged.rs"]
|
|
pub mod discouraged;
|
|
|
|
use std::cell::Cell;
|
|
use std::fmt::{self, Debug, Display};
|
|
use std::marker::PhantomData;
|
|
use std::mem;
|
|
@@ -212,16 +210,21 @@ use crate::lookahead;
|
|
use crate::punctuated::Punctuated;
|
|
use crate::token::Token;
|
|
|
|
pub use crate::error::{Error, Result};
|
|
pub use crate::lookahead::{Lookahead1, Peek};
|
|
|
|
/// Parsing interface implemented by all types that can be parsed in a default
|
|
/// way from a token stream.
|
|
+///
|
|
+/// Refer to the [module documentation] for details about implementing and using
|
|
+/// the `Parse` trait.
|
|
+///
|
|
+/// [module documentation]: self
|
|
pub trait Parse: Sized {
|
|
fn parse(input: ParseStream) -> Result<Self>;
|
|
}
|
|
|
|
/// Input to a Syn parser function.
|
|
///
|
|
/// See the methods of this type under the documentation of [`ParseBuffer`]. For
|
|
/// an overview of parsing in Syn, refer to the [module documentation].
|
|
@@ -258,23 +261,26 @@ pub struct ParseBuffer<'a> {
|
|
// ParseBuffer<'a>, upcast to ParseBuffer<'short> for some lifetime shorter
|
|
// than 'a, and then assign a Cursor<'short> into the Cell.
|
|
//
|
|
// By extension, it would not be safe to expose an API that accepts a
|
|
// Cursor<'a> and trusts that it lives as long as the cursor currently in
|
|
// the cell.
|
|
cell: Cell<Cursor<'static>>,
|
|
marker: PhantomData<Cursor<'a>>,
|
|
- unexpected: Rc<Cell<Option<Span>>>,
|
|
+ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
|
|
}
|
|
|
|
impl<'a> Drop for ParseBuffer<'a> {
|
|
fn drop(&mut self) {
|
|
- if !self.is_empty() && self.unexpected.get().is_none() {
|
|
- self.unexpected.set(Some(self.cursor().span()));
|
|
+ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
|
|
+ let (inner, old_span) = inner_unexpected(self);
|
|
+ if old_span.is_none() {
|
|
+ inner.set(Unexpected::Some(unexpected_span));
|
|
+ }
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<'a> Display for ParseBuffer<'a> {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
Display::fmt(&self.cursor().token_stream(), f)
|
|
}
|
|
@@ -319,25 +325,22 @@ impl<'a> Debug for ParseBuffer<'a> {
|
|
/// #
|
|
/// # fn remainder_after_skipping_past_next_at(
|
|
/// # input: ParseStream,
|
|
/// # ) -> Result<proc_macro2::TokenStream> {
|
|
/// # skip_past_next_at(input)?;
|
|
/// # input.parse()
|
|
/// # }
|
|
/// #
|
|
-/// # fn main() {
|
|
-/// # use syn::parse::Parser;
|
|
-/// # let remainder = remainder_after_skipping_past_next_at
|
|
-/// # .parse_str("a @ b c")
|
|
-/// # .unwrap();
|
|
-/// # assert_eq!(remainder.to_string(), "b c");
|
|
-/// # }
|
|
+/// # use syn::parse::Parser;
|
|
+/// # let remainder = remainder_after_skipping_past_next_at
|
|
+/// # .parse_str("a @ b c")
|
|
+/// # .unwrap();
|
|
+/// # assert_eq!(remainder.to_string(), "b c");
|
|
/// ```
|
|
-#[derive(Copy, Clone)]
|
|
pub struct StepCursor<'c, 'a> {
|
|
scope: Span,
|
|
// This field is covariant in 'c.
|
|
cursor: Cursor<'c>,
|
|
// This field is contravariant in 'c. Together these make StepCursor
|
|
// invariant in 'c. Also covariant in 'a. The user cannot cast 'c to a
|
|
// different lifetime but can upcast into a StepCursor with a shorter
|
|
// lifetime 'a.
|
|
@@ -351,16 +354,24 @@ pub struct StepCursor<'c, 'a> {
|
|
impl<'c, 'a> Deref for StepCursor<'c, 'a> {
|
|
type Target = Cursor<'c>;
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
&self.cursor
|
|
}
|
|
}
|
|
|
|
+impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
|
|
+
|
|
+impl<'c, 'a> Clone for StepCursor<'c, 'a> {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+}
|
|
+
|
|
impl<'c, 'a> StepCursor<'c, 'a> {
|
|
/// Triggers an error at the current position of the parse stream.
|
|
///
|
|
/// The `ParseStream::step` invocation will return this same error without
|
|
/// advancing the stream state.
|
|
pub fn error<T: Display>(self, message: T) -> Error {
|
|
error::new_at(self.scope, self.cursor, message)
|
|
}
|
|
@@ -370,46 +381,91 @@ pub(crate) fn advance_step_cursor<'c, 'a
|
|
// Refer to the comments within the StepCursor definition. We use the
|
|
// fact that a StepCursor<'c, 'a> exists as proof that 'c outlives 'a.
|
|
// Cursor is covariant in its lifetime parameter so we can cast a
|
|
// Cursor<'c> to one with the shorter lifetime Cursor<'a>.
|
|
let _ = proof;
|
|
unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
|
|
}
|
|
|
|
-fn skip(input: ParseStream) -> bool {
|
|
- input
|
|
- .step(|cursor| {
|
|
- if let Some((_lifetime, rest)) = cursor.lifetime() {
|
|
- Ok((true, rest))
|
|
- } else if let Some((_token, rest)) = cursor.token_tree() {
|
|
- Ok((true, rest))
|
|
- } else {
|
|
- Ok((false, *cursor))
|
|
- }
|
|
- })
|
|
- .unwrap()
|
|
-}
|
|
-
|
|
pub(crate) fn new_parse_buffer(
|
|
scope: Span,
|
|
cursor: Cursor,
|
|
- unexpected: Rc<Cell<Option<Span>>>,
|
|
+ unexpected: Rc<Cell<Unexpected>>,
|
|
) -> ParseBuffer {
|
|
ParseBuffer {
|
|
scope,
|
|
// See comment on `cell` in the struct definition.
|
|
cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
|
|
marker: PhantomData,
|
|
- unexpected,
|
|
+ unexpected: Cell::new(Some(unexpected)),
|
|
+ }
|
|
+}
|
|
+
|
|
+pub(crate) enum Unexpected {
|
|
+ None,
|
|
+ Some(Span),
|
|
+ Chain(Rc<Cell<Unexpected>>),
|
|
+}
|
|
+
|
|
+impl Default for Unexpected {
|
|
+ fn default() -> Self {
|
|
+ Unexpected::None
|
|
+ }
|
|
+}
|
|
+
|
|
+impl Clone for Unexpected {
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Unexpected::None => Unexpected::None,
|
|
+ Unexpected::Some(span) => Unexpected::Some(*span),
|
|
+ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
|
|
+ }
|
|
}
|
|
}
|
|
|
|
-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
|
|
- buffer.unexpected.clone()
|
|
+// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
|
|
+// swapping in a None is cheap.
|
|
+fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
|
|
+ let prev = cell.take();
|
|
+ let ret = prev.clone();
|
|
+ cell.set(prev);
|
|
+ ret
|
|
+}
|
|
+
|
|
+fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
|
|
+ let mut unexpected = get_unexpected(buffer);
|
|
+ loop {
|
|
+ match cell_clone(&unexpected) {
|
|
+ Unexpected::None => return (unexpected, None),
|
|
+ Unexpected::Some(span) => return (unexpected, Some(span)),
|
|
+ Unexpected::Chain(next) => unexpected = next,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
|
|
+ cell_clone(&buffer.unexpected).unwrap()
|
|
+}
|
|
+
|
|
+fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
|
|
+ if cursor.eof() {
|
|
+ return None;
|
|
+ }
|
|
+ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
|
|
+ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
|
|
+ return Some(unexpected);
|
|
+ }
|
|
+ cursor = rest;
|
|
+ }
|
|
+ if cursor.eof() {
|
|
+ None
|
|
+ } else {
|
|
+ Some(cursor.span())
|
|
+ }
|
|
}
|
|
|
|
impl<'a> ParseBuffer<'a> {
|
|
/// Parses a syntax tree node of type `T`, advancing the position of our
|
|
/// parse stream past it.
|
|
pub fn parse<T: Parse>(&self) -> Result<T> {
|
|
T::parse(self)
|
|
}
|
|
@@ -561,24 +617,27 @@ impl<'a> ParseBuffer<'a> {
|
|
/// input.parse().map(UnionOrMacro::Union)
|
|
/// } else {
|
|
/// input.parse().map(UnionOrMacro::Macro)
|
|
/// }
|
|
/// }
|
|
/// }
|
|
/// ```
|
|
pub fn peek2<T: Peek>(&self, token: T) -> bool {
|
|
- let ahead = self.fork();
|
|
- skip(&ahead) && ahead.peek(token)
|
|
+ let _ = token;
|
|
+ self.cursor().skip().map_or(false, T::Token::peek)
|
|
}
|
|
|
|
/// Looks at the third-next token in the parse stream.
|
|
pub fn peek3<T: Peek>(&self, token: T) -> bool {
|
|
- let ahead = self.fork();
|
|
- skip(&ahead) && skip(&ahead) && ahead.peek(token)
|
|
+ let _ = token;
|
|
+ self.cursor()
|
|
+ .skip()
|
|
+ .and_then(Cursor::skip)
|
|
+ .map_or(false, T::Token::peek)
|
|
}
|
|
|
|
/// Parses zero or more occurrences of `T` separated by punctuation of type
|
|
/// `P`, with optional trailing punctuation.
|
|
///
|
|
/// Parsing continues until the end of this parse stream. The entire content
|
|
/// of this parse stream must consist of `T` and `P`.
|
|
///
|
|
@@ -610,22 +669,20 @@ impl<'a> ParseBuffer<'a> {
|
|
/// ident: input.parse()?,
|
|
/// paren_token: parenthesized!(content in input),
|
|
/// fields: content.parse_terminated(Type::parse)?,
|
|
/// semi_token: input.parse()?,
|
|
/// })
|
|
/// }
|
|
/// }
|
|
/// #
|
|
- /// # fn main() {
|
|
- /// # let input = quote! {
|
|
- /// # struct S(A, B);
|
|
- /// # };
|
|
- /// # syn::parse2::<TupleStruct>(input).unwrap();
|
|
- /// # }
|
|
+ /// # let input = quote! {
|
|
+ /// # struct S(A, B);
|
|
+ /// # };
|
|
+ /// # syn::parse2::<TupleStruct>(input).unwrap();
|
|
/// ```
|
|
pub fn parse_terminated<T, P: Parse>(
|
|
&self,
|
|
parser: fn(ParseStream) -> Result<T>,
|
|
) -> Result<Punctuated<T, P>> {
|
|
Punctuated::parse_terminated_with(self, parser)
|
|
}
|
|
|
|
@@ -842,18 +899,18 @@ impl<'a> ParseBuffer<'a> {
|
|
/// }
|
|
/// ```
|
|
pub fn fork(&self) -> Self {
|
|
ParseBuffer {
|
|
scope: self.scope,
|
|
cell: self.cell.clone(),
|
|
marker: PhantomData,
|
|
// Not the parent's unexpected. Nothing cares whether the clone
|
|
- // parses all the way.
|
|
- unexpected: Rc::new(Cell::new(None)),
|
|
+ // parses all the way unless we `advance_to`.
|
|
+ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
|
|
}
|
|
}
|
|
|
|
/// Triggers an error at the current position of the parse stream.
|
|
///
|
|
/// # Example
|
|
///
|
|
/// ```
|
|
@@ -918,23 +975,21 @@ impl<'a> ParseBuffer<'a> {
|
|
/// #
|
|
/// # fn remainder_after_skipping_past_next_at(
|
|
/// # input: ParseStream,
|
|
/// # ) -> Result<proc_macro2::TokenStream> {
|
|
/// # skip_past_next_at(input)?;
|
|
/// # input.parse()
|
|
/// # }
|
|
/// #
|
|
- /// # fn main() {
|
|
- /// # use syn::parse::Parser;
|
|
- /// # let remainder = remainder_after_skipping_past_next_at
|
|
- /// # .parse_str("a @ b c")
|
|
- /// # .unwrap();
|
|
- /// # assert_eq!(remainder.to_string(), "b c");
|
|
- /// # }
|
|
+ /// # use syn::parse::Parser;
|
|
+ /// # let remainder = remainder_after_skipping_past_next_at
|
|
+ /// # .parse_str("a @ b c")
|
|
+ /// # .unwrap();
|
|
+ /// # assert_eq!(remainder.to_string(), "b c");
|
|
/// ```
|
|
pub fn step<F, R>(&self, function: F) -> Result<R>
|
|
where
|
|
F: for<'c> FnOnce(StepCursor<'c, 'a>) -> Result<(R, Cursor<'c>)>,
|
|
{
|
|
// Since the user's function is required to work for any 'c, we know
|
|
// that the Cursor<'c> they return is either derived from the input
|
|
// StepCursor<'c, 'a> or from a Cursor<'static>.
|
|
@@ -956,27 +1011,39 @@ impl<'a> ParseBuffer<'a> {
|
|
scope: self.scope,
|
|
cursor: self.cell.get(),
|
|
marker: PhantomData,
|
|
})?;
|
|
self.cell.set(rest);
|
|
Ok(node)
|
|
}
|
|
|
|
+ /// Returns the `Span` of the next token in the parse stream, or
|
|
+ /// `Span::call_site()` if this parse stream has completely exhausted its
|
|
+ /// input `TokenStream`.
|
|
+ pub fn span(&self) -> Span {
|
|
+ let cursor = self.cursor();
|
|
+ if cursor.eof() {
|
|
+ self.scope
|
|
+ } else {
|
|
+ crate::buffer::open_span_of_group(cursor)
|
|
+ }
|
|
+ }
|
|
+
|
|
/// Provides low-level access to the token representation underlying this
|
|
/// parse stream.
|
|
///
|
|
/// Cursors are immutable so no operations you perform against the cursor
|
|
/// will affect the state of this parse stream.
|
|
pub fn cursor(&self) -> Cursor<'a> {
|
|
self.cell.get()
|
|
}
|
|
|
|
fn check_unexpected(&self) -> Result<()> {
|
|
- match self.unexpected.get() {
|
|
+ match inner_unexpected(self).1 {
|
|
Some(span) => Err(Error::new(span, "unexpected token")),
|
|
None => Ok(()),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: Parse> Parse for Box<T> {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
@@ -1043,32 +1110,32 @@ impl Parse for Literal {
|
|
}
|
|
|
|
/// Parser that can parse Rust tokens into a particular syntax tree node.
|
|
///
|
|
/// Refer to the [module documentation] for details about parsing in Syn.
|
|
///
|
|
/// [module documentation]: self
|
|
///
|
|
-/// *This trait is available if Syn is built with the `"parsing"` feature.*
|
|
+/// *This trait is available only if Syn is built with the `"parsing"` feature.*
|
|
pub trait Parser: Sized {
|
|
type Output;
|
|
|
|
/// Parse a proc-macro2 token stream into the chosen syntax tree node.
|
|
///
|
|
/// This function will check that the input is fully parsed. If there are
|
|
/// any unparsed tokens at the end of the stream, an error is returned.
|
|
fn parse2(self, tokens: TokenStream) -> Result<Self::Output>;
|
|
|
|
/// Parse tokens of source code into the chosen syntax tree node.
|
|
///
|
|
/// This function will check that the input is fully parsed. If there are
|
|
/// any unparsed tokens at the end of the stream, an error is returned.
|
|
///
|
|
- /// *This method is available if Syn is built with both the `"parsing"` and
|
|
+ /// *This method is available only if Syn is built with both the `"parsing"` and
|
|
/// `"proc-macro"` features.*
|
|
#[cfg(all(
|
|
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
|
feature = "proc-macro"
|
|
))]
|
|
fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> {
|
|
self.parse2(proc_macro2::TokenStream::from(tokens))
|
|
}
|
|
@@ -1083,90 +1150,96 @@ pub trait Parser: Sized {
|
|
/// Every span in the resulting syntax tree will be set to resolve at the
|
|
/// macro call site.
|
|
fn parse_str(self, s: &str) -> Result<Self::Output> {
|
|
self.parse2(proc_macro2::TokenStream::from_str(s)?)
|
|
}
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
+ #[cfg(any(feature = "full", feature = "derive"))]
|
|
fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
|
|
let _ = scope;
|
|
self.parse2(tokens)
|
|
}
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
+ #[cfg(any(feature = "full", feature = "derive"))]
|
|
fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
|
|
input.parse().and_then(|tokens| self.parse2(tokens))
|
|
}
|
|
}
|
|
|
|
fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
|
|
let scope = Span::call_site();
|
|
let cursor = tokens.begin();
|
|
- let unexpected = Rc::new(Cell::new(None));
|
|
+ let unexpected = Rc::new(Cell::new(Unexpected::None));
|
|
new_parse_buffer(scope, cursor, unexpected)
|
|
}
|
|
|
|
impl<F, T> Parser for F
|
|
where
|
|
F: FnOnce(ParseStream) -> Result<T>,
|
|
{
|
|
type Output = T;
|
|
|
|
fn parse2(self, tokens: TokenStream) -> Result<T> {
|
|
let buf = TokenBuffer::new2(tokens);
|
|
let state = tokens_to_parse_buffer(&buf);
|
|
let node = self(&state)?;
|
|
state.check_unexpected()?;
|
|
- if state.is_empty() {
|
|
+ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
|
|
+ Err(Error::new(unexpected_span, "unexpected token"))
|
|
+ } else {
|
|
Ok(node)
|
|
- } else {
|
|
- Err(state.error("unexpected token"))
|
|
}
|
|
}
|
|
|
|
#[doc(hidden)]
|
|
+ #[cfg(any(feature = "full", feature = "derive"))]
|
|
fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
|
|
let buf = TokenBuffer::new2(tokens);
|
|
let cursor = buf.begin();
|
|
- let unexpected = Rc::new(Cell::new(None));
|
|
+ let unexpected = Rc::new(Cell::new(Unexpected::None));
|
|
let state = new_parse_buffer(scope, cursor, unexpected);
|
|
let node = self(&state)?;
|
|
state.check_unexpected()?;
|
|
- if state.is_empty() {
|
|
+ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
|
|
+ Err(Error::new(unexpected_span, "unexpected token"))
|
|
+ } else {
|
|
Ok(node)
|
|
- } else {
|
|
- Err(state.error("unexpected token"))
|
|
}
|
|
}
|
|
|
|
#[doc(hidden)]
|
|
+ #[cfg(any(feature = "full", feature = "derive"))]
|
|
fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
|
|
self(input)
|
|
}
|
|
}
|
|
|
|
+#[cfg(any(feature = "full", feature = "derive"))]
|
|
pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
|
|
f.__parse_scoped(scope, tokens)
|
|
}
|
|
|
|
+#[cfg(any(feature = "full", feature = "derive"))]
|
|
pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
|
|
f.__parse_stream(input)
|
|
}
|
|
|
|
/// An empty syntax tree node that consumes no tokens when parsed.
|
|
///
|
|
/// This is useful for attribute macros that want to ensure they are not
|
|
/// provided any attribute args.
|
|
///
|
|
/// ```
|
|
-/// extern crate proc_macro;
|
|
-///
|
|
+/// # extern crate proc_macro;
|
|
+/// #
|
|
/// use proc_macro::TokenStream;
|
|
/// use syn::parse_macro_input;
|
|
/// use syn::parse::Nothing;
|
|
///
|
|
/// # const IGNORE: &str = stringify! {
|
|
/// #[proc_macro_attribute]
|
|
/// # };
|
|
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
|
|
diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
|
|
--- a/third_party/rust/syn/src/parse_macro_input.rs
|
|
+++ b/third_party/rust/syn/src/parse_macro_input.rs
|
|
@@ -11,18 +11,18 @@
|
|
/// # Intended usage
|
|
///
|
|
/// This macro must be called from a function that returns
|
|
/// `proc_macro::TokenStream`. Usually this will be your proc macro entry point,
|
|
/// the function that has the #\[proc_macro\] / #\[proc_macro_derive\] /
|
|
/// #\[proc_macro_attribute\] attribute.
|
|
///
|
|
/// ```
|
|
-/// extern crate proc_macro;
|
|
-///
|
|
+/// # extern crate proc_macro;
|
|
+/// #
|
|
/// use proc_macro::TokenStream;
|
|
/// use syn::{parse_macro_input, Result};
|
|
/// use syn::parse::{Parse, ParseStream};
|
|
///
|
|
/// struct MyMacroInput {
|
|
/// /* ... */
|
|
/// }
|
|
///
|
|
@@ -38,28 +38,52 @@
|
|
/// # };
|
|
/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
|
|
/// let input = parse_macro_input!(tokens as MyMacroInput);
|
|
///
|
|
/// /* ... */
|
|
/// # "".parse().unwrap()
|
|
/// }
|
|
/// ```
|
|
-#[macro_export(local_inner_macros)]
|
|
+///
|
|
+/// <br>
|
|
+///
|
|
+/// # Expansion
|
|
+///
|
|
+/// `parse_macro_input!($variable as $Type)` expands to something like:
|
|
+///
|
|
+/// ```no_run
|
|
+/// # extern crate proc_macro;
|
|
+/// #
|
|
+/// # macro_rules! doc_test {
|
|
+/// # ($variable:ident as $Type:ty) => {
|
|
+/// match syn::parse::<$Type>($variable) {
|
|
+/// Ok(syntax_tree) => syntax_tree,
|
|
+/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
|
|
+/// }
|
|
+/// # };
|
|
+/// # }
|
|
+/// #
|
|
+/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
|
+/// # let _ = doc_test!(input as syn::Ident);
|
|
+/// # proc_macro::TokenStream::new()
|
|
+/// # }
|
|
+/// ```
|
|
+#[macro_export]
|
|
macro_rules! parse_macro_input {
|
|
($tokenstream:ident as $ty:ty) => {
|
|
match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
|
|
$crate::export::Ok(data) => data,
|
|
$crate::export::Err(err) => {
|
|
return $crate::export::TokenStream::from(err.to_compile_error());
|
|
}
|
|
}
|
|
};
|
|
($tokenstream:ident) => {
|
|
- parse_macro_input!($tokenstream as _)
|
|
+ $crate::parse_macro_input!($tokenstream as _)
|
|
};
|
|
}
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
// Can parse any type that implements Parse.
|
|
|
|
use crate::parse::{Parse, ParseStream, Parser, Result};
|
|
use proc_macro::TokenStream;
|
|
diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
|
|
--- a/third_party/rust/syn/src/parse_quote.rs
|
|
+++ b/third_party/rust/syn/src/parse_quote.rs
|
|
@@ -19,17 +19,17 @@
|
|
/// let stmt: Stmt = parse_quote! {
|
|
/// let #name: #ty = Default::default();
|
|
/// };
|
|
///
|
|
/// println!("{:#?}", stmt);
|
|
/// }
|
|
/// ```
|
|
///
|
|
-/// *This macro is available if Syn is built with the `"parsing"` feature,
|
|
+/// *This macro is available only if Syn is built with the `"parsing"` feature,
|
|
/// although interpolation of syntax tree nodes into the quoted tokens is only
|
|
/// supported if Syn is built with the `"printing"` feature as well.*
|
|
///
|
|
/// # Example
|
|
///
|
|
/// The following helper function adds a bound `T: HeapSize` to every type
|
|
/// parameter `T` in the input generics.
|
|
///
|
|
@@ -51,28 +51,30 @@
|
|
///
|
|
/// This macro can parse the following additional types as a special case even
|
|
/// though they do not implement the `Parse` trait.
|
|
///
|
|
/// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
|
|
/// or inner like `#![...]`
|
|
/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
|
|
/// `P` with optional trailing punctuation
|
|
+/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
|
|
///
|
|
/// [`Punctuated<T, P>`]: punctuated::Punctuated
|
|
+/// [`Vec<Stmt>`]: Block::parse_within
|
|
///
|
|
/// # Panics
|
|
///
|
|
/// Panics if the tokens fail to parse as the expected syntax tree type. The
|
|
/// caller is responsible for ensuring that the input tokens are syntactically
|
|
/// valid.
|
|
//
|
|
// TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
|
|
// https://github.com/rust-lang/rust/issues/62834
|
|
-#[macro_export(local_inner_macros)]
|
|
+#[macro_export]
|
|
macro_rules! parse_quote {
|
|
($($tt:tt)*) => {
|
|
$crate::parse_quote::parse(
|
|
$crate::export::From::from(
|
|
$crate::export::quote::quote!($($tt)*)
|
|
)
|
|
)
|
|
};
|
|
@@ -107,16 +109,18 @@ impl<T: Parse> ParseQuote for T {
|
|
}
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
// Any other types that we want `parse_quote!` to be able to parse.
|
|
|
|
use crate::punctuated::Punctuated;
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
use crate::{attr, Attribute};
|
|
+#[cfg(feature = "full")]
|
|
+use crate::{Block, Stmt};
|
|
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
impl ParseQuote for Attribute {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
if input.peek(Token![#]) && input.peek2(Token![!]) {
|
|
attr::parsing::single_parse_inner(input)
|
|
} else {
|
|
attr::parsing::single_parse_outer(input)
|
|
@@ -124,8 +128,15 @@ impl ParseQuote for Attribute {
|
|
}
|
|
}
|
|
|
|
impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Self::parse_terminated(input)
|
|
}
|
|
}
|
|
+
|
|
+#[cfg(feature = "full")]
|
|
+impl ParseQuote for Vec<Stmt> {
|
|
+ fn parse(input: ParseStream) -> Result<Self> {
|
|
+ Block::parse_within(input)
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
|
|
--- a/third_party/rust/syn/src/pat.rs
|
|
+++ b/third_party/rust/syn/src/pat.rs
|
|
@@ -1,31 +1,27 @@
|
|
use super::*;
|
|
use crate::punctuated::Punctuated;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use crate::tt::TokenStreamHelper;
|
|
use proc_macro2::TokenStream;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use std::hash::{Hash, Hasher};
|
|
|
|
ast_enum_of_structs! {
|
|
/// A pattern in a local binding, function signature, match expression, or
|
|
/// various other places.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum Pat #manual_extra_traits {
|
|
+ pub enum Pat {
|
|
/// A box pattern: `box v`.
|
|
Box(PatBox),
|
|
|
|
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
|
|
Ident(PatIdent),
|
|
|
|
/// A literal pattern: `0`.
|
|
///
|
|
@@ -81,321 +77,219 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A box pattern: `box v`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatBox {
|
|
pub attrs: Vec<Attribute>,
|
|
pub box_token: Token![box],
|
|
pub pat: Box<Pat>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// It may also be a unit struct or struct variant (e.g. `None`), or a
|
|
+ /// constant; these cannot be distinguished syntactically.
|
|
+ ///
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatIdent {
|
|
pub attrs: Vec<Attribute>,
|
|
pub by_ref: Option<Token![ref]>,
|
|
pub mutability: Option<Token![mut]>,
|
|
pub ident: Ident,
|
|
pub subpat: Option<(Token![@], Box<Pat>)>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A literal pattern: `0`.
|
|
///
|
|
/// This holds an `Expr` rather than a `Lit` because negative numbers
|
|
/// are represented as an `Expr::Unary`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatLit {
|
|
pub attrs: Vec<Attribute>,
|
|
pub expr: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro in pattern position.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatMacro {
|
|
pub attrs: Vec<Attribute>,
|
|
pub mac: Macro,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A pattern that matches any one of a set of cases.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatOr {
|
|
pub attrs: Vec<Attribute>,
|
|
pub leading_vert: Option<Token![|]>,
|
|
pub cases: Punctuated<Pat, Token![|]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A path pattern like `Color::Red`, optionally qualified with a
|
|
/// self-type.
|
|
///
|
|
/// Unqualified path patterns can legally refer to variants, structs,
|
|
/// constants or associated constants. Qualified path patterns like
|
|
/// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
|
|
/// associated constants.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatPath {
|
|
pub attrs: Vec<Attribute>,
|
|
pub qself: Option<QSelf>,
|
|
pub path: Path,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A range pattern: `1..=2`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatRange {
|
|
pub attrs: Vec<Attribute>,
|
|
pub lo: Box<Expr>,
|
|
pub limits: RangeLimits,
|
|
pub hi: Box<Expr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A reference pattern: `&mut var`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatReference {
|
|
pub attrs: Vec<Attribute>,
|
|
pub and_token: Token![&],
|
|
pub mutability: Option<Token![mut]>,
|
|
pub pat: Box<Pat>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// The dots in a tuple or slice pattern: `[0, 1, ..]`
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatRest {
|
|
pub attrs: Vec<Attribute>,
|
|
pub dot2_token: Token![..],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatSlice {
|
|
pub attrs: Vec<Attribute>,
|
|
pub bracket_token: token::Bracket,
|
|
pub elems: Punctuated<Pat, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatStruct {
|
|
pub attrs: Vec<Attribute>,
|
|
pub path: Path,
|
|
pub brace_token: token::Brace,
|
|
pub fields: Punctuated<FieldPat, Token![,]>,
|
|
pub dot2_token: Option<Token![..]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A tuple pattern: `(a, b)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatTuple {
|
|
pub attrs: Vec<Attribute>,
|
|
pub paren_token: token::Paren,
|
|
pub elems: Punctuated<Pat, Token![,]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatTupleStruct {
|
|
pub attrs: Vec<Attribute>,
|
|
pub path: Path,
|
|
pub pat: PatTuple,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A type ascription pattern: `foo: f64`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatType {
|
|
pub attrs: Vec<Attribute>,
|
|
pub pat: Box<Pat>,
|
|
pub colon_token: Token![:],
|
|
pub ty: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A pattern that matches any value: `_`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct PatWild {
|
|
pub attrs: Vec<Attribute>,
|
|
pub underscore_token: Token![_],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A single field in a struct pattern.
|
|
///
|
|
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
|
|
/// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct FieldPat {
|
|
pub attrs: Vec<Attribute>,
|
|
pub member: Member,
|
|
pub colon_token: Option<Token![:]>,
|
|
pub pat: Box<Pat>,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Pat {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Pat {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (Pat::Box(this), Pat::Box(other)) => this == other,
|
|
- (Pat::Ident(this), Pat::Ident(other)) => this == other,
|
|
- (Pat::Lit(this), Pat::Lit(other)) => this == other,
|
|
- (Pat::Macro(this), Pat::Macro(other)) => this == other,
|
|
- (Pat::Or(this), Pat::Or(other)) => this == other,
|
|
- (Pat::Path(this), Pat::Path(other)) => this == other,
|
|
- (Pat::Range(this), Pat::Range(other)) => this == other,
|
|
- (Pat::Reference(this), Pat::Reference(other)) => this == other,
|
|
- (Pat::Rest(this), Pat::Rest(other)) => this == other,
|
|
- (Pat::Slice(this), Pat::Slice(other)) => this == other,
|
|
- (Pat::Struct(this), Pat::Struct(other)) => this == other,
|
|
- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
|
|
- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
|
|
- (Pat::Type(this), Pat::Type(other)) => this == other,
|
|
- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- (Pat::Wild(this), Pat::Wild(other)) => this == other,
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Pat {
|
|
- fn hash<H>(&self, hash: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- Pat::Box(pat) => {
|
|
- hash.write_u8(0);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Ident(pat) => {
|
|
- hash.write_u8(1);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Lit(pat) => {
|
|
- hash.write_u8(2);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Macro(pat) => {
|
|
- hash.write_u8(3);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Or(pat) => {
|
|
- hash.write_u8(4);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Path(pat) => {
|
|
- hash.write_u8(5);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Range(pat) => {
|
|
- hash.write_u8(6);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Reference(pat) => {
|
|
- hash.write_u8(7);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Rest(pat) => {
|
|
- hash.write_u8(8);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Slice(pat) => {
|
|
- hash.write_u8(9);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Struct(pat) => {
|
|
- hash.write_u8(10);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Tuple(pat) => {
|
|
- hash.write_u8(11);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::TupleStruct(pat) => {
|
|
- hash.write_u8(12);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Type(pat) => {
|
|
- hash.write_u8(13);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::Verbatim(pat) => {
|
|
- hash.write_u8(14);
|
|
- TokenStreamHelper(pat).hash(hash);
|
|
- }
|
|
- Pat::Wild(pat) => {
|
|
- hash.write_u8(15);
|
|
- pat.hash(hash);
|
|
- }
|
|
- Pat::__Nonexhaustive => unreachable!(),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
#[cfg(feature = "parsing")]
|
|
-mod parsing {
|
|
+pub mod parsing {
|
|
use super::*;
|
|
|
|
use crate::ext::IdentExt;
|
|
- use crate::parse::{Parse, ParseStream, Result};
|
|
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
|
|
use crate::path;
|
|
|
|
impl Parse for Pat {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
+ let begin = input.fork();
|
|
let lookahead = input.lookahead1();
|
|
if lookahead.peek(Ident)
|
|
&& ({
|
|
input.peek2(Token![::])
|
|
|| input.peek2(Token![!])
|
|
|| input.peek2(token::Brace)
|
|
|| input.peek2(token::Paren)
|
|
|| input.peek2(Token![..])
|
|
@@ -406,17 +300,16 @@ mod parsing {
|
|
ahead.is_empty() || ahead.peek(Token![,])
|
|
}
|
|
})
|
|
|| input.peek(Token![self]) && input.peek2(Token![::])
|
|
|| lookahead.peek(Token![::])
|
|
|| lookahead.peek(Token![<])
|
|
|| input.peek(Token![Self])
|
|
|| input.peek(Token![super])
|
|
- || input.peek(Token![extern])
|
|
|| input.peek(Token![crate])
|
|
{
|
|
pat_path_or_macro_or_struct_or_range(input)
|
|
} else if lookahead.peek(Token![_]) {
|
|
input.call(pat_wild).map(Pat::Wild)
|
|
} else if input.peek(Token![box]) {
|
|
input.call(pat_box).map(Pat::Box)
|
|
} else if input.peek(Token![-]) || lookahead.peek(Lit) {
|
|
@@ -429,28 +322,29 @@ mod parsing {
|
|
input.call(pat_ident).map(Pat::Ident)
|
|
} else if lookahead.peek(Token![&]) {
|
|
input.call(pat_reference).map(Pat::Reference)
|
|
} else if lookahead.peek(token::Paren) {
|
|
input.call(pat_tuple).map(Pat::Tuple)
|
|
} else if lookahead.peek(token::Bracket) {
|
|
input.call(pat_slice).map(Pat::Slice)
|
|
} else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
|
|
- input.call(pat_rest).map(Pat::Rest)
|
|
+ pat_range_half_open(input, begin)
|
|
} else {
|
|
Err(lookahead.error())
|
|
}
|
|
}
|
|
}
|
|
|
|
fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
|
|
+ let begin = input.fork();
|
|
let (qself, path) = path::parsing::qpath(input, true)?;
|
|
|
|
if input.peek(Token![..]) {
|
|
- return pat_range(input, qself, path).map(Pat::Range);
|
|
+ return pat_range(input, begin, qself, path);
|
|
}
|
|
|
|
if qself.is_some() {
|
|
return Ok(Pat::Path(PatPath {
|
|
attrs: Vec::new(),
|
|
qself,
|
|
path,
|
|
}));
|
|
@@ -482,17 +376,17 @@ mod parsing {
|
|
}
|
|
}
|
|
|
|
if input.peek(token::Brace) {
|
|
pat_struct(input, path).map(Pat::Struct)
|
|
} else if input.peek(token::Paren) {
|
|
pat_tuple_struct(input, path).map(Pat::TupleStruct)
|
|
} else if input.peek(Token![..]) {
|
|
- pat_range(input, qself, path).map(Pat::Range)
|
|
+ pat_range(input, begin, qself, path)
|
|
} else {
|
|
Ok(Pat::Path(PatPath {
|
|
attrs: Vec::new(),
|
|
qself,
|
|
path,
|
|
}))
|
|
}
|
|
}
|
|
@@ -541,17 +435,17 @@ mod parsing {
|
|
fn pat_struct(input: ParseStream, path: Path) -> Result<PatStruct> {
|
|
let content;
|
|
let brace_token = braced!(content in input);
|
|
|
|
let mut fields = Punctuated::new();
|
|
while !content.is_empty() && !content.peek(Token![..]) {
|
|
let value = content.call(field_pat)?;
|
|
fields.push_value(value);
|
|
- if !content.peek(Token![,]) {
|
|
+ if content.is_empty() {
|
|
break;
|
|
}
|
|
let punct: Token![,] = content.parse()?;
|
|
fields.push_punct(punct);
|
|
}
|
|
|
|
let dot2_token = if fields.empty_or_trailing() && content.peek(Token![..]) {
|
|
Some(content.parse()?)
|
|
@@ -573,29 +467,30 @@ mod parsing {
|
|
match *self {
|
|
Member::Named(_) => false,
|
|
Member::Unnamed(_) => true,
|
|
}
|
|
}
|
|
}
|
|
|
|
fn field_pat(input: ParseStream) -> Result<FieldPat> {
|
|
+ let attrs = input.call(Attribute::parse_outer)?;
|
|
let boxed: Option<Token![box]> = input.parse()?;
|
|
let by_ref: Option<Token![ref]> = input.parse()?;
|
|
let mutability: Option<Token![mut]> = input.parse()?;
|
|
let member: Member = input.parse()?;
|
|
|
|
if boxed.is_none() && by_ref.is_none() && mutability.is_none() && input.peek(Token![:])
|
|
|| member.is_unnamed()
|
|
{
|
|
return Ok(FieldPat {
|
|
- attrs: Vec::new(),
|
|
+ attrs,
|
|
member,
|
|
colon_token: input.parse()?,
|
|
- pat: input.parse()?,
|
|
+ pat: Box::new(multi_pat(input)?),
|
|
});
|
|
}
|
|
|
|
let ident = match member {
|
|
Member::Named(ident) => ident,
|
|
Member::Unnamed(_) => unreachable!(),
|
|
};
|
|
|
|
@@ -605,49 +500,76 @@ mod parsing {
|
|
mutability,
|
|
ident: ident.clone(),
|
|
subpat: None,
|
|
});
|
|
|
|
if let Some(boxed) = boxed {
|
|
pat = Pat::Box(PatBox {
|
|
attrs: Vec::new(),
|
|
+ box_token: boxed,
|
|
pat: Box::new(pat),
|
|
- box_token: boxed,
|
|
});
|
|
}
|
|
|
|
Ok(FieldPat {
|
|
+ attrs,
|
|
member: Member::Named(ident),
|
|
+ colon_token: None,
|
|
pat: Box::new(pat),
|
|
- attrs: Vec::new(),
|
|
- colon_token: None,
|
|
})
|
|
}
|
|
|
|
- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
|
|
- Ok(PatRange {
|
|
- attrs: Vec::new(),
|
|
- lo: Box::new(Expr::Path(ExprPath {
|
|
+ fn pat_range(
|
|
+ input: ParseStream,
|
|
+ begin: ParseBuffer,
|
|
+ qself: Option<QSelf>,
|
|
+ path: Path,
|
|
+ ) -> Result<Pat> {
|
|
+ let limits: RangeLimits = input.parse()?;
|
|
+ let hi = input.call(pat_lit_expr)?;
|
|
+ if let Some(hi) = hi {
|
|
+ Ok(Pat::Range(PatRange {
|
|
attrs: Vec::new(),
|
|
- qself,
|
|
- path,
|
|
- })),
|
|
- limits: input.parse()?,
|
|
- hi: input.call(pat_lit_expr)?,
|
|
- })
|
|
+ lo: Box::new(Expr::Path(ExprPath {
|
|
+ attrs: Vec::new(),
|
|
+ qself,
|
|
+ path,
|
|
+ })),
|
|
+ limits,
|
|
+ hi,
|
|
+ }))
|
|
+ } else {
|
|
+ Ok(Pat::Verbatim(verbatim::between(begin, input)))
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
|
|
+ let limits: RangeLimits = input.parse()?;
|
|
+ let hi = input.call(pat_lit_expr)?;
|
|
+ if hi.is_some() {
|
|
+ Ok(Pat::Verbatim(verbatim::between(begin, input)))
|
|
+ } else {
|
|
+ match limits {
|
|
+ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
|
|
+ attrs: Vec::new(),
|
|
+ dot2_token,
|
|
+ })),
|
|
+ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
|
|
+ }
|
|
+ }
|
|
}
|
|
|
|
fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
|
|
let content;
|
|
let paren_token = parenthesized!(content in input);
|
|
|
|
let mut elems = Punctuated::new();
|
|
while !content.is_empty() {
|
|
- let value: Pat = content.parse()?;
|
|
+ let value = multi_pat(&content)?;
|
|
elems.push_value(value);
|
|
if content.is_empty() {
|
|
break;
|
|
}
|
|
let punct = content.parse()?;
|
|
elems.push_punct(punct);
|
|
}
|
|
|
|
@@ -663,225 +585,280 @@ mod parsing {
|
|
attrs: Vec::new(),
|
|
and_token: input.parse()?,
|
|
mutability: input.parse()?,
|
|
pat: input.parse()?,
|
|
})
|
|
}
|
|
|
|
fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
|
|
- let lo = input.call(pat_lit_expr)?;
|
|
+ let begin = input.fork();
|
|
+ let lo = input.call(pat_lit_expr)?.unwrap();
|
|
if input.peek(Token![..]) {
|
|
- Ok(Pat::Range(PatRange {
|
|
- attrs: Vec::new(),
|
|
- lo,
|
|
- limits: input.parse()?,
|
|
- hi: input.call(pat_lit_expr)?,
|
|
- }))
|
|
+ let limits: RangeLimits = input.parse()?;
|
|
+ let hi = input.call(pat_lit_expr)?;
|
|
+ if let Some(hi) = hi {
|
|
+ Ok(Pat::Range(PatRange {
|
|
+ attrs: Vec::new(),
|
|
+ lo,
|
|
+ limits,
|
|
+ hi,
|
|
+ }))
|
|
+ } else {
|
|
+ Ok(Pat::Verbatim(verbatim::between(begin, input)))
|
|
+ }
|
|
} else {
|
|
Ok(Pat::Lit(PatLit {
|
|
attrs: Vec::new(),
|
|
expr: lo,
|
|
}))
|
|
}
|
|
}
|
|
|
|
- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
|
|
+ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
|
|
+ if input.is_empty()
|
|
+ || input.peek(Token![|])
|
|
+ || input.peek(Token![=>])
|
|
+ || input.peek(Token![:]) && !input.peek(Token![::])
|
|
+ || input.peek(Token![,])
|
|
+ || input.peek(Token![;])
|
|
+ {
|
|
+ return Ok(None);
|
|
+ }
|
|
+
|
|
let neg: Option<Token![-]> = input.parse()?;
|
|
|
|
let lookahead = input.lookahead1();
|
|
let expr = if lookahead.peek(Lit) {
|
|
Expr::Lit(input.parse()?)
|
|
} else if lookahead.peek(Ident)
|
|
|| lookahead.peek(Token![::])
|
|
|| lookahead.peek(Token![<])
|
|
|| lookahead.peek(Token![self])
|
|
|| lookahead.peek(Token![Self])
|
|
|| lookahead.peek(Token![super])
|
|
- || lookahead.peek(Token![extern])
|
|
|| lookahead.peek(Token![crate])
|
|
{
|
|
Expr::Path(input.parse()?)
|
|
} else {
|
|
return Err(lookahead.error());
|
|
};
|
|
|
|
- Ok(Box::new(if let Some(neg) = neg {
|
|
+ Ok(Some(Box::new(if let Some(neg) = neg {
|
|
Expr::Unary(ExprUnary {
|
|
attrs: Vec::new(),
|
|
op: UnOp::Neg(neg),
|
|
expr: Box::new(expr),
|
|
})
|
|
} else {
|
|
expr
|
|
- }))
|
|
+ })))
|
|
}
|
|
|
|
fn pat_slice(input: ParseStream) -> Result<PatSlice> {
|
|
let content;
|
|
let bracket_token = bracketed!(content in input);
|
|
|
|
let mut elems = Punctuated::new();
|
|
while !content.is_empty() {
|
|
- let value: Pat = content.parse()?;
|
|
+ let value = multi_pat(&content)?;
|
|
elems.push_value(value);
|
|
if content.is_empty() {
|
|
break;
|
|
}
|
|
let punct = content.parse()?;
|
|
elems.push_punct(punct);
|
|
}
|
|
|
|
Ok(PatSlice {
|
|
attrs: Vec::new(),
|
|
bracket_token,
|
|
elems,
|
|
})
|
|
}
|
|
|
|
- fn pat_rest(input: ParseStream) -> Result<PatRest> {
|
|
- Ok(PatRest {
|
|
- attrs: Vec::new(),
|
|
- dot2_token: input.parse()?,
|
|
- })
|
|
+ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
|
|
+ multi_pat_impl(input, None)
|
|
+ }
|
|
+
|
|
+ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
|
|
+ let leading_vert: Option<Token![|]> = input.parse()?;
|
|
+ multi_pat_impl(input, leading_vert)
|
|
+ }
|
|
+
|
|
+ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
|
|
+ let mut pat: Pat = input.parse()?;
|
|
+ if leading_vert.is_some()
|
|
+ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
|
|
+ {
|
|
+ let mut cases = Punctuated::new();
|
|
+ cases.push_value(pat);
|
|
+ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
|
|
+ let punct = input.parse()?;
|
|
+ cases.push_punct(punct);
|
|
+ let pat: Pat = input.parse()?;
|
|
+ cases.push_value(pat);
|
|
+ }
|
|
+ pat = Pat::Or(PatOr {
|
|
+ attrs: Vec::new(),
|
|
+ leading_vert,
|
|
+ cases,
|
|
+ });
|
|
+ }
|
|
+ Ok(pat)
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "printing")]
|
|
mod printing {
|
|
use super::*;
|
|
|
|
use proc_macro2::TokenStream;
|
|
use quote::{ToTokens, TokenStreamExt};
|
|
|
|
use crate::attr::FilterAttrs;
|
|
|
|
impl ToTokens for PatWild {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.underscore_token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatIdent {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.by_ref.to_tokens(tokens);
|
|
self.mutability.to_tokens(tokens);
|
|
self.ident.to_tokens(tokens);
|
|
if let Some((at_token, subpat)) = &self.subpat {
|
|
at_token.to_tokens(tokens);
|
|
subpat.to_tokens(tokens);
|
|
}
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatStruct {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.path.to_tokens(tokens);
|
|
self.brace_token.surround(tokens, |tokens| {
|
|
self.fields.to_tokens(tokens);
|
|
// NOTE: We need a comma before the dot2 token if it is present.
|
|
if !self.fields.empty_or_trailing() && self.dot2_token.is_some() {
|
|
<Token![,]>::default().to_tokens(tokens);
|
|
}
|
|
self.dot2_token.to_tokens(tokens);
|
|
});
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatTupleStruct {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.path.to_tokens(tokens);
|
|
self.pat.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatType {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
tokens.append_all(self.attrs.outer());
|
|
self.pat.to_tokens(tokens);
|
|
self.colon_token.to_tokens(tokens);
|
|
self.ty.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatPath {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
private::print_path(tokens, &self.qself, &self.path);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatTuple {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.paren_token.surround(tokens, |tokens| {
|
|
self.elems.to_tokens(tokens);
|
|
});
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatBox {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.box_token.to_tokens(tokens);
|
|
self.pat.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatReference {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.and_token.to_tokens(tokens);
|
|
self.mutability.to_tokens(tokens);
|
|
self.pat.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatRest {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.dot2_token.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatLit {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.expr.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatRange {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.lo.to_tokens(tokens);
|
|
match &self.limits {
|
|
RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
|
|
RangeLimits::Closed(t) => t.to_tokens(tokens),
|
|
}
|
|
self.hi.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatSlice {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.bracket_token.surround(tokens, |tokens| {
|
|
self.elems.to_tokens(tokens);
|
|
});
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatMacro {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.mac.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for PatOr {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
self.leading_vert.to_tokens(tokens);
|
|
self.cases.to_tokens(tokens);
|
|
}
|
|
}
|
|
|
|
impl ToTokens for FieldPat {
|
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
|
+ tokens.append_all(self.attrs.outer());
|
|
if let Some(colon_token) = &self.colon_token {
|
|
self.member.to_tokens(tokens);
|
|
colon_token.to_tokens(tokens);
|
|
}
|
|
self.pat.to_tokens(tokens);
|
|
}
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
|
|
--- a/third_party/rust/syn/src/path.rs
|
|
+++ b/third_party/rust/syn/src/path.rs
|
|
@@ -1,15 +1,15 @@
|
|
use super::*;
|
|
use crate::punctuated::Punctuated;
|
|
|
|
ast_struct! {
|
|
- /// A path at which a named item is exported: `std::collections::HashMap`.
|
|
+ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Path {
|
|
pub leading_colon: Option<Token![::]>,
|
|
pub segments: Punctuated<PathSegment, Token![::]>,
|
|
}
|
|
}
|
|
|
|
impl<T> From<T> for Path
|
|
@@ -24,17 +24,17 @@ where
|
|
path.segments.push_value(segment.into());
|
|
path
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A segment of a path together with any path arguments on that segment.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct PathSegment {
|
|
pub ident: Ident,
|
|
pub arguments: PathArguments,
|
|
}
|
|
}
|
|
|
|
impl<T> From<T> for PathSegment
|
|
@@ -47,17 +47,17 @@ where
|
|
arguments: PathArguments::None,
|
|
}
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// Angle bracketed or parenthesized arguments of a path segment.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// ## Angle bracketed
|
|
///
|
|
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
|
|
///
|
|
/// ## Parenthesized
|
|
///
|
|
@@ -93,17 +93,17 @@ impl PathArguments {
|
|
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
|
|
}
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub enum GenericArgument {
|
|
/// A lifetime argument.
|
|
Lifetime(Lifetime),
|
|
/// A type argument.
|
|
Type(Type),
|
|
/// A binding (equality constraint) on an associated type: the `Item =
|
|
/// u8` in `Iterator<Item = u8>`.
|
|
@@ -117,55 +117,55 @@ ast_enum! {
|
|
Const(Expr),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
|
|
/// V>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct AngleBracketedGenericArguments {
|
|
pub colon2_token: Option<Token![::]>,
|
|
pub lt_token: Token![<],
|
|
pub args: Punctuated<GenericArgument, Token![,]>,
|
|
pub gt_token: Token![>],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A binding (equality constraint) on an associated type: `Item = u8`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Binding {
|
|
pub ident: Ident,
|
|
pub eq_token: Token![=],
|
|
pub ty: Type,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An associated type bound: `Iterator<Item: Display>`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Constraint {
|
|
pub ident: Ident,
|
|
pub colon_token: Token![:],
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
|
|
/// C`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct ParenthesizedGenericArguments {
|
|
pub paren_token: token::Paren,
|
|
/// `(A, B)`
|
|
pub inputs: Punctuated<Type, Token![,]>,
|
|
/// `C`
|
|
pub output: ReturnType,
|
|
}
|
|
@@ -184,17 +184,17 @@ ast_struct! {
|
|
/// ^~~~~~ ~~~~~~~~~~~~~~^
|
|
/// ty position = 3
|
|
///
|
|
/// <Vec<T>>::AssociatedItem
|
|
/// ^~~~~~ ^
|
|
/// ty position = 0
|
|
/// ```
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct QSelf {
|
|
pub lt_token: Token![<],
|
|
pub ty: Box<Type>,
|
|
pub position: usize,
|
|
pub as_token: Option<Token![as]>,
|
|
pub gt_token: Token![>],
|
|
}
|
|
@@ -286,21 +286,17 @@ pub mod parsing {
|
|
impl Parse for PathSegment {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Self::parse_helper(input, false)
|
|
}
|
|
}
|
|
|
|
impl PathSegment {
|
|
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
|
- if input.peek(Token![super])
|
|
- || input.peek(Token![self])
|
|
- || input.peek(Token![crate])
|
|
- || input.peek(Token![extern])
|
|
- {
|
|
+ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
|
|
let ident = input.call(Ident::parse_any)?;
|
|
return Ok(PathSegment::from(ident));
|
|
}
|
|
|
|
let ident = if input.peek(Token![Self]) {
|
|
input.call(Ident::parse_any)?
|
|
} else {
|
|
input.parse()?
|
|
@@ -353,17 +349,17 @@ pub mod parsing {
|
|
},
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Path {
|
|
/// Parse a `Path` containing no path arguments on any of its segments.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
///
|
|
/// # Example
|
|
///
|
|
/// ```
|
|
/// use syn::{Path, Result, Token};
|
|
/// use syn::parse::{Parse, ParseStream};
|
|
///
|
|
@@ -395,17 +391,16 @@ pub mod parsing {
|
|
segments: {
|
|
let mut segments = Punctuated::new();
|
|
loop {
|
|
if !input.peek(Ident)
|
|
&& !input.peek(Token![super])
|
|
&& !input.peek(Token![self])
|
|
&& !input.peek(Token![Self])
|
|
&& !input.peek(Token![crate])
|
|
- && !input.peek(Token![extern])
|
|
{
|
|
break;
|
|
}
|
|
let ident = Ident::parse_any(input)?;
|
|
segments.push_value(PathSegment::from(ident));
|
|
if !input.peek(Token![::]) {
|
|
break;
|
|
}
|
|
@@ -428,17 +423,17 @@ pub mod parsing {
|
|
/// For them to compare equal, it must be the case that:
|
|
///
|
|
/// - the path has no leading colon,
|
|
/// - the number of path segments is 1,
|
|
/// - the first path segment has no angle bracketed or parenthesized
|
|
/// path arguments, and
|
|
/// - the ident of the first path segment is equal to the given one.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
///
|
|
/// # Example
|
|
///
|
|
/// ```
|
|
/// use syn::{Attribute, Error, Meta, NestedMeta, Result};
|
|
/// # use std::iter::FromIterator;
|
|
///
|
|
@@ -467,17 +462,17 @@ pub mod parsing {
|
|
///
|
|
/// A path is considered an ident if:
|
|
///
|
|
/// - the path has no leading colon,
|
|
/// - the number of path segments is 1, and
|
|
/// - the first path segment has no angle bracketed or parenthesized
|
|
/// path arguments.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
pub fn get_ident(&self) -> Option<&Ident> {
|
|
if self.leading_colon.is_none()
|
|
&& self.segments.len() == 1
|
|
&& self.segments[0].arguments.is_none()
|
|
{
|
|
Some(&self.segments[0].ident)
|
|
} else {
|
|
diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
|
|
--- a/third_party/rust/syn/src/punctuated.rs
|
|
+++ b/third_party/rust/syn/src/punctuated.rs
|
|
@@ -17,16 +17,18 @@
|
|
//!
|
|
//! ```text
|
|
//! a_function_call(arg1, arg2, arg3);
|
|
//! ~~~~^ ~~~~^ ~~~~
|
|
//! ```
|
|
|
|
#[cfg(feature = "extra-traits")]
|
|
use std::fmt::{self, Debug};
|
|
+#[cfg(feature = "extra-traits")]
|
|
+use std::hash::{Hash, Hasher};
|
|
#[cfg(any(feature = "full", feature = "derive"))]
|
|
use std::iter;
|
|
use std::iter::FromIterator;
|
|
use std::ops::{Index, IndexMut};
|
|
use std::option;
|
|
use std::slice;
|
|
use std::vec;
|
|
|
|
@@ -36,18 +38,16 @@ use crate::parse::{Parse, ParseStream, R
|
|
use crate::token::Token;
|
|
|
|
/// A punctuated sequence of syntax tree nodes of type `T` separated by
|
|
/// punctuation of type `P`.
|
|
///
|
|
/// Refer to the [module documentation] for details about punctuated sequences.
|
|
///
|
|
/// [module documentation]: self
|
|
-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
pub struct Punctuated<T, P> {
|
|
inner: Vec<(T, P)>,
|
|
last: Option<Box<T>>,
|
|
}
|
|
|
|
impl<T, P> Punctuated<T, P> {
|
|
/// Creates an empty punctuated sequence.
|
|
pub fn new() -> Punctuated<T, P> {
|
|
@@ -71,32 +71,29 @@ impl<T, P> Punctuated<T, P> {
|
|
self.inner.len() + if self.last.is_some() { 1 } else { 0 }
|
|
}
|
|
|
|
/// Borrows the first element in this sequence.
|
|
pub fn first(&self) -> Option<&T> {
|
|
self.iter().next()
|
|
}
|
|
|
|
+ /// Mutably borrows the first element in this sequence.
|
|
+ pub fn first_mut(&mut self) -> Option<&mut T> {
|
|
+ self.iter_mut().next()
|
|
+ }
|
|
+
|
|
/// Borrows the last element in this sequence.
|
|
pub fn last(&self) -> Option<&T> {
|
|
- if self.last.is_some() {
|
|
- self.last.as_ref().map(Box::as_ref)
|
|
- } else {
|
|
- self.inner.last().map(|pair| &pair.0)
|
|
- }
|
|
+ self.iter().next_back()
|
|
}
|
|
|
|
/// Mutably borrows the last element in this sequence.
|
|
pub fn last_mut(&mut self) -> Option<&mut T> {
|
|
- if self.last.is_some() {
|
|
- self.last.as_mut().map(Box::as_mut)
|
|
- } else {
|
|
- self.inner.last_mut().map(|pair| &mut pair.0)
|
|
- }
|
|
+ self.iter_mut().next_back()
|
|
}
|
|
|
|
/// Returns an iterator over borrowed syntax tree nodes of type `&T`.
|
|
pub fn iter(&self) -> Iter<T> {
|
|
Iter {
|
|
inner: Box::new(PrivateIter {
|
|
inner: self.inner.iter(),
|
|
last: self.last.as_ref().map(Box::as_ref).into_iter(),
|
|
@@ -225,23 +222,29 @@ impl<T, P> Punctuated<T, P> {
|
|
|
|
if index == self.len() {
|
|
self.push(value);
|
|
} else {
|
|
self.inner.insert(index, (value, Default::default()));
|
|
}
|
|
}
|
|
|
|
+ /// Clears the sequence of all values and punctuation, making it empty.
|
|
+ pub fn clear(&mut self) {
|
|
+ self.inner.clear();
|
|
+ self.last = None;
|
|
+ }
|
|
+
|
|
/// Parses zero or more occurrences of `T` separated by punctuation of type
|
|
/// `P`, with optional trailing punctuation.
|
|
///
|
|
/// Parsing continues until the end of this parse stream. The entire content
|
|
/// of this parse stream must consist of `T` and `P`.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_terminated(input: ParseStream) -> Result<Self>
|
|
where
|
|
T: Parse,
|
|
P: Parse,
|
|
{
|
|
Self::parse_terminated_with(input, T::parse)
|
|
@@ -251,17 +254,17 @@ impl<T, P> Punctuated<T, P> {
|
|
/// separated by punctuation of type `P`, with optional trailing
|
|
/// punctuation.
|
|
///
|
|
/// Like [`parse_terminated`], the entire content of this stream is expected
|
|
/// to be parsed.
|
|
///
|
|
/// [`parse_terminated`]: Punctuated::parse_terminated
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_terminated_with(
|
|
input: ParseStream,
|
|
parser: fn(ParseStream) -> Result<T>,
|
|
) -> Result<Self>
|
|
where
|
|
P: Parse,
|
|
@@ -287,17 +290,17 @@ impl<T, P> Punctuated<T, P> {
|
|
/// Parses one or more occurrences of `T` separated by punctuation of type
|
|
/// `P`, not accepting trailing punctuation.
|
|
///
|
|
/// Parsing continues as long as punctuation `P` is present at the head of
|
|
/// the stream. This method returns upon parsing a `T` and observing that it
|
|
/// is not followed by a `P`, even if there are remaining tokens in the
|
|
/// stream.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
|
|
where
|
|
T: Parse,
|
|
P: Token + Parse,
|
|
{
|
|
Self::parse_separated_nonempty_with(input, T::parse)
|
|
@@ -307,17 +310,17 @@ impl<T, P> Punctuated<T, P> {
|
|
/// separated by punctuation of type `P`, not accepting trailing
|
|
/// punctuation.
|
|
///
|
|
/// Like [`parse_separated_nonempty`], may complete early without parsing
|
|
/// the entire content of this stream.
|
|
///
|
|
/// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
#[cfg(feature = "parsing")]
|
|
pub fn parse_separated_nonempty_with(
|
|
input: ParseStream,
|
|
parser: fn(ParseStream) -> Result<T>,
|
|
) -> Result<Self>
|
|
where
|
|
P: Token + Parse,
|
|
@@ -333,16 +336,63 @@ impl<T, P> Punctuated<T, P> {
|
|
let punct = input.parse()?;
|
|
punctuated.push_punct(punct);
|
|
}
|
|
|
|
Ok(punctuated)
|
|
}
|
|
}
|
|
|
|
+#[cfg(feature = "clone-impls")]
|
|
+impl<T, P> Clone for Punctuated<T, P>
|
|
+where
|
|
+ T: Clone,
|
|
+ P: Clone,
|
|
+{
|
|
+ fn clone(&self) -> Self {
|
|
+ Punctuated {
|
|
+ inner: self.inner.clone(),
|
|
+ last: self.last.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "extra-traits")]
|
|
+impl<T, P> Eq for Punctuated<T, P>
|
|
+where
|
|
+ T: Eq,
|
|
+ P: Eq,
|
|
+{
|
|
+}
|
|
+
|
|
+#[cfg(feature = "extra-traits")]
|
|
+impl<T, P> PartialEq for Punctuated<T, P>
|
|
+where
|
|
+ T: PartialEq,
|
|
+ P: PartialEq,
|
|
+{
|
|
+ fn eq(&self, other: &Self) -> bool {
|
|
+ let Punctuated { inner, last } = self;
|
|
+ *inner == other.inner && *last == other.last
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "extra-traits")]
|
|
+impl<T, P> Hash for Punctuated<T, P>
|
|
+where
|
|
+ T: Hash,
|
|
+ P: Hash,
|
|
+{
|
|
+ fn hash<H: Hasher>(&self, state: &mut H) {
|
|
+ let Punctuated { inner, last } = self;
|
|
+ inner.hash(state);
|
|
+ last.hash(state);
|
|
+ }
|
|
+}
|
|
+
|
|
#[cfg(feature = "extra-traits")]
|
|
impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
let mut list = f.debug_list();
|
|
for (t, p) in &self.inner {
|
|
list.entry(t);
|
|
list.entry(p);
|
|
}
|
|
@@ -531,17 +581,16 @@ impl<'a, T, P> ExactSizeIterator for Pai
|
|
}
|
|
}
|
|
|
|
/// An iterator over owned pairs of type `Pair<T, P>`.
|
|
///
|
|
/// Refer to the [module documentation] for details about punctuated sequences.
|
|
///
|
|
/// [module documentation]: self
|
|
-#[derive(Clone)]
|
|
pub struct IntoPairs<T, P> {
|
|
inner: vec::IntoIter<(T, P)>,
|
|
last: option::IntoIter<T>,
|
|
}
|
|
|
|
impl<T, P> Iterator for IntoPairs<T, P> {
|
|
type Item = Pair<T, P>;
|
|
|
|
@@ -567,22 +616,34 @@ impl<T, P> DoubleEndedIterator for IntoP
|
|
}
|
|
|
|
impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
|
|
fn len(&self) -> usize {
|
|
self.inner.len() + self.last.len()
|
|
}
|
|
}
|
|
|
|
+impl<T, P> Clone for IntoPairs<T, P>
|
|
+where
|
|
+ T: Clone,
|
|
+ P: Clone,
|
|
+{
|
|
+ fn clone(&self) -> Self {
|
|
+ IntoPairs {
|
|
+ inner: self.inner.clone(),
|
|
+ last: self.last.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
/// An iterator over owned values of type `T`.
|
|
///
|
|
/// Refer to the [module documentation] for details about punctuated sequences.
|
|
///
|
|
/// [module documentation]: self
|
|
-#[derive(Clone)]
|
|
pub struct IntoIter<T> {
|
|
inner: vec::IntoIter<T>,
|
|
}
|
|
|
|
impl<T> Iterator for IntoIter<T> {
|
|
type Item = T;
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
|
@@ -601,16 +662,27 @@ impl<T> DoubleEndedIterator for IntoIter
|
|
}
|
|
|
|
impl<T> ExactSizeIterator for IntoIter<T> {
|
|
fn len(&self) -> usize {
|
|
self.inner.len()
|
|
}
|
|
}
|
|
|
|
+impl<T> Clone for IntoIter<T>
|
|
+where
|
|
+ T: Clone,
|
|
+{
|
|
+ fn clone(&self) -> Self {
|
|
+ IntoIter {
|
|
+ inner: self.inner.clone(),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
/// An iterator over borrowed values of type `&T`.
|
|
///
|
|
/// Refer to the [module documentation] for details about punctuated sequences.
|
|
///
|
|
/// [module documentation]: self
|
|
pub struct Iter<'a, T: 'a> {
|
|
// The `Item = &'a T` needs to be specified to support rustc 1.31 and older.
|
|
// On modern compilers we would be able to write just IterTrait<'a, T> where
|
|
@@ -794,17 +866,16 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a,
|
|
}
|
|
|
|
/// A single syntax tree node of type `T` followed by its trailing punctuation
|
|
/// of type `P` if any.
|
|
///
|
|
/// Refer to the [module documentation] for details about punctuated sequences.
|
|
///
|
|
/// [module documentation]: self
|
|
-#[cfg_attr(feature = "clone-impls", derive(Clone))]
|
|
pub enum Pair<T, P> {
|
|
Punctuated(T, P),
|
|
End(T),
|
|
}
|
|
|
|
impl<T, P> Pair<T, P> {
|
|
/// Extracts the syntax tree node from this punctuated pair, discarding the
|
|
/// following punctuation.
|
|
@@ -851,16 +922,30 @@ impl<T, P> Pair<T, P> {
|
|
pub fn into_tuple(self) -> (T, Option<P>) {
|
|
match self {
|
|
Pair::Punctuated(t, d) => (t, Some(d)),
|
|
Pair::End(t) => (t, None),
|
|
}
|
|
}
|
|
}
|
|
|
|
+#[cfg(feature = "clone-impls")]
|
|
+impl<T, P> Clone for Pair<T, P>
|
|
+where
|
|
+ T: Clone,
|
|
+ P: Clone,
|
|
+{
|
|
+ fn clone(&self) -> Self {
|
|
+ match self {
|
|
+ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
|
|
+ Pair::End(t) => Pair::End(t.clone()),
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
impl<T, P> Index<usize> for Punctuated<T, P> {
|
|
type Output = T;
|
|
|
|
fn index(&self, index: usize) -> &Self::Output {
|
|
if index == self.len() - 1 {
|
|
match &self.last {
|
|
Some(t) => t,
|
|
None => &self.inner[index].0,
|
|
diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/reserved.rs
|
|
@@ -0,0 +1,42 @@
|
|
+// Type for a syntax tree node that is reserved for future use.
|
|
+//
|
|
+// For example ExprReference contains a field `raw` of type Reserved. If `&raw
|
|
+// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
|
|
+// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
|
|
+// the possibility of breaking any code.
|
|
+
|
|
+use proc_macro2::Span;
|
|
+use std::marker::PhantomData;
|
|
+
|
|
+#[cfg(feature = "extra-traits")]
|
|
+use std::fmt::{self, Debug};
|
|
+
|
|
+ast_struct! {
|
|
+ pub struct Reserved {
|
|
+ _private: PhantomData<Span>,
|
|
+ }
|
|
+}
|
|
+
|
|
+impl Default for Reserved {
|
|
+ fn default() -> Self {
|
|
+ Reserved {
|
|
+ _private: PhantomData,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "clone-impls")]
|
|
+impl Clone for Reserved {
|
|
+ fn clone(&self) -> Self {
|
|
+ Reserved {
|
|
+ _private: self._private,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+#[cfg(feature = "extra-traits")]
|
|
+impl Debug for Reserved {
|
|
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
+ formatter.debug_struct("Reserved").finish()
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
|
|
--- a/third_party/rust/syn/src/spanned.rs
|
|
+++ b/third_party/rust/syn/src/spanned.rs
|
|
@@ -1,12 +1,12 @@
|
|
//! A trait that can provide the `Span` of the complete contents of a syntax
|
|
//! tree node.
|
|
//!
|
|
-//! *This module is available if Syn is built with both the `"parsing"` and
|
|
+//! *This module is available only if Syn is built with both the `"parsing"` and
|
|
//! `"printing"` features.*
|
|
//!
|
|
//! <br>
|
|
//!
|
|
//! # Example
|
|
//!
|
|
//! Suppose in a procedural macro we have a [`Type`] that we want to assert
|
|
//! implements the [`Sync`] trait. Maybe this is the type of one of the fields
|
|
@@ -92,17 +92,17 @@ use quote::spanned::Spanned as ToTokens;
|
|
/// [`ToTokens`] from the `quote` crate, as well as for `Span` itself.
|
|
///
|
|
/// [`ToTokens`]: quote::ToTokens
|
|
///
|
|
/// See the [module documentation] for an example.
|
|
///
|
|
/// [module documentation]: self
|
|
///
|
|
-/// *This trait is available if Syn is built with both the `"parsing"` and
|
|
+/// *This trait is available only if Syn is built with both the `"parsing"` and
|
|
/// `"printing"` features.*
|
|
pub trait Spanned {
|
|
/// Returns a `Span` covering the complete contents of this syntax tree
|
|
/// node, or [`Span::call_site()`] if this node is empty.
|
|
///
|
|
/// [`Span::call_site()`]: proc_macro2::Span::call_site
|
|
fn span(&self) -> Span;
|
|
}
|
|
diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
|
|
--- a/third_party/rust/syn/src/stmt.rs
|
|
+++ b/third_party/rust/syn/src/stmt.rs
|
|
@@ -1,25 +1,25 @@
|
|
use super::*;
|
|
|
|
ast_struct! {
|
|
/// A braced block containing Rust statements.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct Block {
|
|
pub brace_token: token::Brace,
|
|
/// Statements in a block
|
|
pub stmts: Vec<Stmt>,
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// A statement, usually ending in a semicolon.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub enum Stmt {
|
|
/// A local (let) binding.
|
|
Local(Local),
|
|
|
|
/// An item definition.
|
|
Item(Item),
|
|
|
|
/// Expr without trailing semicolon.
|
|
@@ -28,38 +28,39 @@ ast_enum! {
|
|
/// Expression with trailing semicolon.
|
|
Semi(Expr, Token![;]),
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A local `let` binding: `let x: u64 = s.parse()?`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"full"` feature.*
|
|
+ /// *This type is available only if Syn is built with the `"full"` feature.*
|
|
pub struct Local {
|
|
pub attrs: Vec<Attribute>,
|
|
pub let_token: Token![let],
|
|
pub pat: Pat,
|
|
pub init: Option<(Token![=], Box<Expr>)>,
|
|
pub semi_token: Token![;],
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parsing {
|
|
use super::*;
|
|
|
|
+ use crate::parse::discouraged::Speculative;
|
|
use crate::parse::{Parse, ParseStream, Result};
|
|
- use crate::punctuated::Punctuated;
|
|
+ use proc_macro2::TokenStream;
|
|
|
|
impl Block {
|
|
/// Parse the body of a block as zero or more statements, possibly
|
|
/// including one trailing expression.
|
|
///
|
|
- /// *This function is available if Syn is built with the `"parsing"`
|
|
+ /// *This function is available only if Syn is built with the `"parsing"`
|
|
/// feature.*
|
|
///
|
|
/// # Example
|
|
///
|
|
/// ```
|
|
/// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token};
|
|
/// use syn::parse::{Parse, ParseStream};
|
|
///
|
|
@@ -101,18 +102,18 @@ pub mod parsing {
|
|
/// stmts,
|
|
/// })
|
|
/// }
|
|
/// }
|
|
/// ```
|
|
pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
|
|
let mut stmts = Vec::new();
|
|
loop {
|
|
- while input.peek(Token![;]) {
|
|
- input.parse::<Token![;]>()?;
|
|
+ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
|
|
+ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
|
|
}
|
|
if input.is_empty() {
|
|
break;
|
|
}
|
|
let s = parse_stmt(input, true)?;
|
|
let requires_semicolon = if let Stmt::Expr(s) = &s {
|
|
expr::requires_terminator(s)
|
|
} else {
|
|
@@ -141,65 +142,65 @@ pub mod parsing {
|
|
|
|
impl Parse for Stmt {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
parse_stmt(input, false)
|
|
}
|
|
}
|
|
|
|
fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
|
|
- // TODO: optimize using advance_to
|
|
+ let mut attrs = input.call(Attribute::parse_outer)?;
|
|
+
|
|
+ // brace-style macros; paren and bracket macros get parsed as
|
|
+ // expression statements.
|
|
let ahead = input.fork();
|
|
- ahead.call(Attribute::parse_outer)?;
|
|
+ if let Ok(path) = ahead.call(Path::parse_mod_style) {
|
|
+ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
|
|
+ input.advance_to(&ahead);
|
|
+ return stmt_mac(input, attrs, path);
|
|
+ }
|
|
+ }
|
|
|
|
- if {
|
|
- let ahead = ahead.fork();
|
|
- // Only parse braces here; paren and bracket will get parsed as
|
|
- // expression statements
|
|
- ahead.call(Path::parse_mod_style).is_ok()
|
|
- && ahead.parse::<Token![!]>().is_ok()
|
|
- && (ahead.peek(token::Brace) || ahead.peek(Ident))
|
|
- } {
|
|
- stmt_mac(input)
|
|
- } else if ahead.peek(Token![let]) {
|
|
- stmt_local(input).map(Stmt::Local)
|
|
- } else if ahead.peek(Token![pub])
|
|
- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
|
|
- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
|
|
- || ahead.peek(Token![use])
|
|
- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
|
|
- || ahead.peek(Token![const])
|
|
- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
|
|
- || ahead.peek(Token![async])
|
|
- && (ahead.peek2(Token![unsafe])
|
|
- || ahead.peek2(Token![extern])
|
|
- || ahead.peek2(Token![fn]))
|
|
- || ahead.peek(Token![fn])
|
|
- || ahead.peek(Token![mod])
|
|
- || ahead.peek(Token![type])
|
|
- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
|
|
- || ahead.peek(Token![struct])
|
|
- || ahead.peek(Token![enum])
|
|
- || ahead.peek(Token![union]) && ahead.peek2(Ident)
|
|
- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
|
|
- || ahead.peek(Token![trait])
|
|
- || ahead.peek(Token![default])
|
|
- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
|
|
- || ahead.peek(Token![impl])
|
|
- || ahead.peek(Token![macro])
|
|
+ if input.peek(Token![let]) {
|
|
+ stmt_local(input, attrs).map(Stmt::Local)
|
|
+ } else if input.peek(Token![pub])
|
|
+ || input.peek(Token![crate]) && !input.peek2(Token![::])
|
|
+ || input.peek(Token![extern])
|
|
+ || input.peek(Token![use])
|
|
+ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
|
|
+ || input.peek(Token![const])
|
|
+ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|
|
+ || input.peek(Token![async])
|
|
+ && (input.peek2(Token![unsafe])
|
|
+ || input.peek2(Token![extern])
|
|
+ || input.peek2(Token![fn]))
|
|
+ || input.peek(Token![fn])
|
|
+ || input.peek(Token![mod])
|
|
+ || input.peek(Token![type])
|
|
+ || input.peek(item::parsing::existential) && input.peek2(Token![type])
|
|
+ || input.peek(Token![struct])
|
|
+ || input.peek(Token![enum])
|
|
+ || input.peek(Token![union]) && input.peek2(Ident)
|
|
+ || input.peek(Token![auto]) && input.peek2(Token![trait])
|
|
+ || input.peek(Token![trait])
|
|
+ || input.peek(Token![default])
|
|
+ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
|
|
+ || input.peek(Token![impl])
|
|
+ || input.peek(Token![macro])
|
|
{
|
|
- input.parse().map(Stmt::Item)
|
|
+ let mut item: Item = input.parse()?;
|
|
+ attrs.extend(item.replace_attrs(Vec::new()));
|
|
+ item.replace_attrs(attrs);
|
|
+ Ok(Stmt::Item(item))
|
|
} else {
|
|
- stmt_expr(input, allow_nosemi)
|
|
+ stmt_expr(input, allow_nosemi, attrs)
|
|
}
|
|
}
|
|
|
|
- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
|
|
- let attrs = input.call(Attribute::parse_outer)?;
|
|
- let path = input.call(Path::parse_mod_style)?;
|
|
+ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
|
|
let bang_token: Token![!] = input.parse()?;
|
|
let ident: Option<Ident> = input.parse()?;
|
|
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
|
let semi_token: Option<Token![;]> = input.parse()?;
|
|
|
|
Ok(Stmt::Item(Item::Macro(ItemMacro {
|
|
attrs,
|
|
ident,
|
|
@@ -208,43 +209,22 @@ pub mod parsing {
|
|
bang_token,
|
|
delimiter,
|
|
tokens,
|
|
},
|
|
semi_token,
|
|
})))
|
|
}
|
|
|
|
- fn stmt_local(input: ParseStream) -> Result<Local> {
|
|
+ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
|
|
Ok(Local {
|
|
- attrs: input.call(Attribute::parse_outer)?,
|
|
+ attrs,
|
|
let_token: input.parse()?,
|
|
pat: {
|
|
- let leading_vert: Option<Token![|]> = input.parse()?;
|
|
- let mut pat: Pat = input.parse()?;
|
|
- if leading_vert.is_some()
|
|
- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
|
|
- {
|
|
- let mut cases = Punctuated::new();
|
|
- cases.push_value(pat);
|
|
- while input.peek(Token![|])
|
|
- && !input.peek(Token![||])
|
|
- && !input.peek(Token![|=])
|
|
- {
|
|
- let punct = input.parse()?;
|
|
- cases.push_punct(punct);
|
|
- let pat: Pat = input.parse()?;
|
|
- cases.push_value(pat);
|
|
- }
|
|
- pat = Pat::Or(PatOr {
|
|
- attrs: Vec::new(),
|
|
- leading_vert,
|
|
- cases,
|
|
- });
|
|
- }
|
|
+ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
|
|
if input.peek(Token![:]) {
|
|
let colon_token: Token![:] = input.parse()?;
|
|
let ty: Type = input.parse()?;
|
|
pat = Pat::Type(PatType {
|
|
attrs: Vec::new(),
|
|
pat: Box::new(pat),
|
|
colon_token,
|
|
ty: Box::new(ty),
|
|
@@ -260,22 +240,29 @@ pub mod parsing {
|
|
} else {
|
|
None
|
|
}
|
|
},
|
|
semi_token: input.parse()?,
|
|
})
|
|
}
|
|
|
|
- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
|
|
- let mut attrs = input.call(Attribute::parse_outer)?;
|
|
+ fn stmt_expr(
|
|
+ input: ParseStream,
|
|
+ allow_nosemi: bool,
|
|
+ mut attrs: Vec<Attribute>,
|
|
+ ) -> Result<Stmt> {
|
|
let mut e = expr::parsing::expr_early(input)?;
|
|
|
|
- attrs.extend(e.replace_attrs(Vec::new()));
|
|
- e.replace_attrs(attrs);
|
|
+ let mut attr_target = &mut e;
|
|
+ while let Expr::Binary(e) = attr_target {
|
|
+ attr_target = &mut e.left;
|
|
+ }
|
|
+ attrs.extend(attr_target.replace_attrs(Vec::new()));
|
|
+ attr_target.replace_attrs(attrs);
|
|
|
|
if input.peek(Token![;]) {
|
|
return Ok(Stmt::Semi(e, input.parse()?));
|
|
}
|
|
|
|
if allow_nosemi || !expr::requires_terminator(&e) {
|
|
Ok(Stmt::Expr(e))
|
|
} else {
|
|
diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
|
|
--- a/third_party/rust/syn/src/token.rs
|
|
+++ b/third_party/rust/syn/src/token.rs
|
|
@@ -83,44 +83,41 @@
|
|
//!
|
|
//! - Field access to its span — `let sp = the_token.span`
|
|
//!
|
|
//! [Peeking]: ../parse/struct.ParseBuffer.html#method.peek
|
|
//! [Parsing]: ../parse/struct.ParseBuffer.html#method.parse
|
|
//! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
|
|
//! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
|
|
|
|
-use std;
|
|
#[cfg(feature = "extra-traits")]
|
|
use std::cmp;
|
|
#[cfg(feature = "extra-traits")]
|
|
use std::fmt::{self, Debug};
|
|
#[cfg(feature = "extra-traits")]
|
|
use std::hash::{Hash, Hasher};
|
|
use std::ops::{Deref, DerefMut};
|
|
|
|
-#[cfg(feature = "parsing")]
|
|
-use proc_macro2::Delimiter;
|
|
#[cfg(any(feature = "parsing", feature = "printing"))]
|
|
use proc_macro2::Ident;
|
|
use proc_macro2::Span;
|
|
#[cfg(feature = "printing")]
|
|
use proc_macro2::TokenStream;
|
|
+#[cfg(feature = "parsing")]
|
|
+use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
|
|
#[cfg(feature = "printing")]
|
|
use quote::{ToTokens, TokenStreamExt};
|
|
|
|
use self::private::WithSpan;
|
|
#[cfg(feature = "parsing")]
|
|
use crate::buffer::Cursor;
|
|
#[cfg(feature = "parsing")]
|
|
use crate::error::Result;
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
#[cfg(feature = "parsing")]
|
|
use crate::lifetime::Lifetime;
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
#[cfg(feature = "parsing")]
|
|
use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
|
|
#[cfg(feature = "parsing")]
|
|
use crate::lookahead;
|
|
#[cfg(feature = "parsing")]
|
|
use crate::parse::{Parse, ParseStream};
|
|
use crate::span::IntoSpans;
|
|
|
|
@@ -150,31 +147,30 @@ mod private {
|
|
pub struct WithSpan {
|
|
pub span: Span,
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
impl private::Sealed for Ident {}
|
|
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
#[cfg(feature = "parsing")]
|
|
fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
|
|
+ use crate::parse::Unexpected;
|
|
use std::cell::Cell;
|
|
use std::rc::Rc;
|
|
|
|
let scope = Span::call_site();
|
|
- let unexpected = Rc::new(Cell::new(None));
|
|
+ let unexpected = Rc::new(Cell::new(Unexpected::None));
|
|
let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
|
|
peek(&buffer)
|
|
}
|
|
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
macro_rules! impl_token {
|
|
- ($name:ident $display:expr) => {
|
|
+ ($display:tt $name:ty) => {
|
|
#[cfg(feature = "parsing")]
|
|
impl Token for $name {
|
|
fn peek(cursor: Cursor) -> bool {
|
|
fn peek(input: ParseStream) -> bool {
|
|
<$name as Parse>::parse(input).is_ok()
|
|
}
|
|
peek_impl(cursor, peek)
|
|
}
|
|
@@ -184,34 +180,48 @@ macro_rules! impl_token {
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "parsing")]
|
|
impl private::Sealed for $name {}
|
|
};
|
|
}
|
|
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(Lifetime "lifetime");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(Lit "literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitStr "string literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitByteStr "byte string literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitByte "byte literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitChar "character literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitInt "integer literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitFloat "floating point literal");
|
|
-#[cfg(any(feature = "full", feature = "derive"))]
|
|
-impl_token!(LitBool "boolean literal");
|
|
+impl_token!("lifetime" Lifetime);
|
|
+impl_token!("literal" Lit);
|
|
+impl_token!("string literal" LitStr);
|
|
+impl_token!("byte string literal" LitByteStr);
|
|
+impl_token!("byte literal" LitByte);
|
|
+impl_token!("character literal" LitChar);
|
|
+impl_token!("integer literal" LitInt);
|
|
+impl_token!("floating point literal" LitFloat);
|
|
+impl_token!("boolean literal" LitBool);
|
|
+impl_token!("group token" proc_macro2::Group);
|
|
+
|
|
+macro_rules! impl_low_level_token {
|
|
+ ($display:tt $ty:ident $get:ident) => {
|
|
+ #[cfg(feature = "parsing")]
|
|
+ impl Token for $ty {
|
|
+ fn peek(cursor: Cursor) -> bool {
|
|
+ cursor.$get().is_some()
|
|
+ }
|
|
+
|
|
+ fn display() -> &'static str {
|
|
+ $display
|
|
+ }
|
|
+ }
|
|
+
|
|
+ #[cfg(feature = "parsing")]
|
|
+ impl private::Sealed for $ty {}
|
|
+ };
|
|
+}
|
|
+
|
|
+impl_low_level_token!("punctuation token" Punct punct);
|
|
+impl_low_level_token!("literal" Literal literal);
|
|
+impl_low_level_token!("token" TokenTree token_tree);
|
|
|
|
// Not public API.
|
|
#[doc(hidden)]
|
|
#[cfg(feature = "parsing")]
|
|
pub trait CustomToken {
|
|
fn peek(cursor: Cursor) -> bool;
|
|
fn display() -> &'static str;
|
|
}
|
|
@@ -228,17 +238,16 @@ impl<T: CustomToken> Token for T {
|
|
fn display() -> &'static str {
|
|
<Self as CustomToken>::display()
|
|
}
|
|
}
|
|
|
|
macro_rules! define_keywords {
|
|
($($token:tt pub struct $name:ident #[$doc:meta])*) => {
|
|
$(
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
|
|
#[$doc]
|
|
///
|
|
/// Don't try to remember the name of this type — use the
|
|
/// [`Token!`] macro instead.
|
|
///
|
|
/// [`Token!`]: crate::token
|
|
pub struct $name {
|
|
pub span: Span,
|
|
@@ -255,16 +264,26 @@ macro_rules! define_keywords {
|
|
impl std::default::Default for $name {
|
|
fn default() -> Self {
|
|
$name {
|
|
span: Span::call_site(),
|
|
}
|
|
}
|
|
}
|
|
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Copy for $name {}
|
|
+
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Clone for $name {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+ }
|
|
+
|
|
#[cfg(feature = "extra-traits")]
|
|
impl Debug for $name {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
f.write_str(stringify!($name))
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "extra-traits")]
|
|
@@ -333,17 +352,16 @@ macro_rules! impl_deref_if_len_is_1 {
|
|
};
|
|
|
|
($name:ident/$len:tt) => {};
|
|
}
|
|
|
|
macro_rules! define_punctuation_structs {
|
|
($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
|
|
$(
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
|
|
#[repr(C)]
|
|
#[$doc]
|
|
///
|
|
/// Don't try to remember the name of this type — use the
|
|
/// [`Token!`] macro instead.
|
|
///
|
|
/// [`Token!`]: crate::token
|
|
pub struct $name {
|
|
@@ -361,16 +379,26 @@ macro_rules! define_punctuation_structs
|
|
impl std::default::Default for $name {
|
|
fn default() -> Self {
|
|
$name {
|
|
spans: [Span::call_site(); $len],
|
|
}
|
|
}
|
|
}
|
|
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Copy for $name {}
|
|
+
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Clone for $name {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+ }
|
|
+
|
|
#[cfg(feature = "extra-traits")]
|
|
impl Debug for $name {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
f.write_str(stringify!($name))
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "extra-traits")]
|
|
@@ -431,17 +459,16 @@ macro_rules! define_punctuation {
|
|
impl private::Sealed for $name {}
|
|
)*
|
|
};
|
|
}
|
|
|
|
macro_rules! define_delimiters {
|
|
($($token:tt pub struct $name:ident #[$doc:meta])*) => {
|
|
$(
|
|
- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
|
|
#[$doc]
|
|
pub struct $name {
|
|
pub span: Span,
|
|
}
|
|
|
|
#[doc(hidden)]
|
|
#[allow(non_snake_case)]
|
|
pub fn $name<S: IntoSpans<[Span; 1]>>(span: S) -> $name {
|
|
@@ -453,16 +480,26 @@ macro_rules! define_delimiters {
|
|
impl std::default::Default for $name {
|
|
fn default() -> Self {
|
|
$name {
|
|
span: Span::call_site(),
|
|
}
|
|
}
|
|
}
|
|
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Copy for $name {}
|
|
+
|
|
+ #[cfg(feature = "clone-impls")]
|
|
+ impl Clone for $name {
|
|
+ fn clone(&self) -> Self {
|
|
+ *self
|
|
+ }
|
|
+ }
|
|
+
|
|
#[cfg(feature = "extra-traits")]
|
|
impl Debug for $name {
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
f.write_str(stringify!($name))
|
|
}
|
|
}
|
|
|
|
#[cfg(feature = "extra-traits")]
|
|
@@ -850,17 +887,17 @@ pub mod parsing {
|
|
if let Some((ident, _rest)) = cursor.ident() {
|
|
ident == token
|
|
} else {
|
|
false
|
|
}
|
|
}
|
|
|
|
pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
|
|
- let mut spans = [input.cursor().span(); 3];
|
|
+ let mut spans = [input.span(); 3];
|
|
punct_helper(input, token, &mut spans)?;
|
|
Ok(S::from_spans(&spans))
|
|
}
|
|
|
|
fn punct_helper(input: ParseStream, token: &str, spans: &mut [Span; 3]) -> Result<()> {
|
|
input.step(|cursor| {
|
|
let mut cursor = *cursor;
|
|
assert!(token.len() <= spans.len());
|
|
diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
|
|
--- a/third_party/rust/syn/src/tt.rs
|
|
+++ b/third_party/rust/syn/src/tt.rs
|
|
@@ -13,18 +13,18 @@ impl<'a> PartialEq for TokenTreeHelper<'
|
|
match (g1.delimiter(), g2.delimiter()) {
|
|
(Delimiter::Parenthesis, Delimiter::Parenthesis)
|
|
| (Delimiter::Brace, Delimiter::Brace)
|
|
| (Delimiter::Bracket, Delimiter::Bracket)
|
|
| (Delimiter::None, Delimiter::None) => {}
|
|
_ => return false,
|
|
}
|
|
|
|
- let s1 = g1.stream().clone().into_iter();
|
|
- let mut s2 = g2.stream().clone().into_iter();
|
|
+ let s1 = g1.stream().into_iter();
|
|
+ let mut s2 = g2.stream().into_iter();
|
|
|
|
for item1 in s1 {
|
|
let item2 = match s2.next() {
|
|
Some(item) => item,
|
|
None => return false,
|
|
};
|
|
if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
|
|
return false;
|
|
@@ -55,17 +55,17 @@ impl<'a> Hash for TokenTreeHelper<'a> {
|
|
0u8.hash(h);
|
|
match g.delimiter() {
|
|
Delimiter::Parenthesis => 0u8.hash(h),
|
|
Delimiter::Brace => 1u8.hash(h),
|
|
Delimiter::Bracket => 2u8.hash(h),
|
|
Delimiter::None => 3u8.hash(h),
|
|
}
|
|
|
|
- for item in g.stream().clone() {
|
|
+ for item in g.stream() {
|
|
TokenTreeHelper(&item).hash(h);
|
|
}
|
|
0xffu8.hash(h); // terminator w/ a variant we don't normally hash
|
|
}
|
|
TokenTree::Punct(op) => {
|
|
1u8.hash(h);
|
|
op.as_char().hash(h);
|
|
match op.spacing() {
|
|
diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
|
|
--- a/third_party/rust/syn/src/ty.rs
|
|
+++ b/third_party/rust/syn/src/ty.rs
|
|
@@ -1,31 +1,27 @@
|
|
use super::*;
|
|
use crate::punctuated::Punctuated;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use crate::tt::TokenStreamHelper;
|
|
use proc_macro2::TokenStream;
|
|
-#[cfg(feature = "extra-traits")]
|
|
-use std::hash::{Hash, Hasher};
|
|
|
|
ast_enum_of_structs! {
|
|
/// The possible types that a Rust value could have.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
///
|
|
/// # Syntax tree enum
|
|
///
|
|
/// This type is a [syntax tree enum].
|
|
///
|
|
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
|
//
|
|
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
|
// blocked on https://github.com/rust-lang/rust/issues/62833
|
|
- pub enum Type #manual_extra_traits {
|
|
+ pub enum Type {
|
|
/// A fixed size array type: `[T; n]`.
|
|
Array(TypeArray),
|
|
|
|
/// A bare function type: `fn(usize) -> bool`.
|
|
BareFn(TypeBareFn),
|
|
|
|
/// A type contained within invisible delimiters.
|
|
Group(TypeGroup),
|
|
@@ -72,295 +68,194 @@ ast_enum_of_structs! {
|
|
#[doc(hidden)]
|
|
__Nonexhaustive,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A fixed size array type: `[T; n]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeArray {
|
|
pub bracket_token: token::Bracket,
|
|
pub elem: Box<Type>,
|
|
pub semi_token: Token![;],
|
|
pub len: Expr,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A bare function type: `fn(usize) -> bool`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeBareFn {
|
|
pub lifetimes: Option<BoundLifetimes>,
|
|
pub unsafety: Option<Token![unsafe]>,
|
|
pub abi: Option<Abi>,
|
|
pub fn_token: Token![fn],
|
|
pub paren_token: token::Paren,
|
|
pub inputs: Punctuated<BareFnArg, Token![,]>,
|
|
pub variadic: Option<Variadic>,
|
|
pub output: ReturnType,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A type contained within invisible delimiters.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeGroup {
|
|
pub group_token: token::Group,
|
|
pub elem: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
|
|
/// a lifetime.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeImplTrait {
|
|
pub impl_token: Token![impl],
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// Indication that a type should be inferred by the compiler: `_`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeInfer {
|
|
pub underscore_token: Token![_],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A macro in the type position.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeMacro {
|
|
pub mac: Macro,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// The never type: `!`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeNever {
|
|
pub bang_token: Token![!],
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A parenthesized type equivalent to the inner type.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeParen {
|
|
pub paren_token: token::Paren,
|
|
pub elem: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A path like `std::slice::Iter`, optionally qualified with a
|
|
/// self-type as in `<Vec<T> as SomeTrait>::Associated`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypePath {
|
|
pub qself: Option<QSelf>,
|
|
pub path: Path,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A raw pointer type: `*const T` or `*mut T`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypePtr {
|
|
pub star_token: Token![*],
|
|
pub const_token: Option<Token![const]>,
|
|
pub mutability: Option<Token![mut]>,
|
|
pub elem: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A reference type: `&'a T` or `&'a mut T`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeReference {
|
|
pub and_token: Token![&],
|
|
pub lifetime: Option<Lifetime>,
|
|
pub mutability: Option<Token![mut]>,
|
|
pub elem: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A dynamically sized slice type: `[T]`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeSlice {
|
|
pub bracket_token: token::Bracket,
|
|
pub elem: Box<Type>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
|
|
/// trait or a lifetime.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeTraitObject {
|
|
pub dyn_token: Option<Token![dyn]>,
|
|
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// A tuple type: `(A, B, C, String)`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or
|
|
/// `"full"` feature.*
|
|
pub struct TypeTuple {
|
|
pub paren_token: token::Paren,
|
|
pub elems: Punctuated<Type, Token![,]>,
|
|
}
|
|
}
|
|
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Eq for Type {}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl PartialEq for Type {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- match (self, other) {
|
|
- (Type::Array(this), Type::Array(other)) => this == other,
|
|
- (Type::BareFn(this), Type::BareFn(other)) => this == other,
|
|
- (Type::Group(this), Type::Group(other)) => this == other,
|
|
- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
|
|
- (Type::Infer(this), Type::Infer(other)) => this == other,
|
|
- (Type::Macro(this), Type::Macro(other)) => this == other,
|
|
- (Type::Never(this), Type::Never(other)) => this == other,
|
|
- (Type::Paren(this), Type::Paren(other)) => this == other,
|
|
- (Type::Path(this), Type::Path(other)) => this == other,
|
|
- (Type::Ptr(this), Type::Ptr(other)) => this == other,
|
|
- (Type::Reference(this), Type::Reference(other)) => this == other,
|
|
- (Type::Slice(this), Type::Slice(other)) => this == other,
|
|
- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
|
|
- (Type::Tuple(this), Type::Tuple(other)) => this == other,
|
|
- (Type::Verbatim(this), Type::Verbatim(other)) => {
|
|
- TokenStreamHelper(this) == TokenStreamHelper(other)
|
|
- }
|
|
- _ => false,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-#[cfg(feature = "extra-traits")]
|
|
-impl Hash for Type {
|
|
- fn hash<H>(&self, hash: &mut H)
|
|
- where
|
|
- H: Hasher,
|
|
- {
|
|
- match self {
|
|
- Type::Array(ty) => {
|
|
- hash.write_u8(0);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::BareFn(ty) => {
|
|
- hash.write_u8(1);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Group(ty) => {
|
|
- hash.write_u8(2);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::ImplTrait(ty) => {
|
|
- hash.write_u8(3);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Infer(ty) => {
|
|
- hash.write_u8(4);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Macro(ty) => {
|
|
- hash.write_u8(5);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Never(ty) => {
|
|
- hash.write_u8(6);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Paren(ty) => {
|
|
- hash.write_u8(7);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Path(ty) => {
|
|
- hash.write_u8(8);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Ptr(ty) => {
|
|
- hash.write_u8(9);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Reference(ty) => {
|
|
- hash.write_u8(10);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Slice(ty) => {
|
|
- hash.write_u8(11);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::TraitObject(ty) => {
|
|
- hash.write_u8(12);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Tuple(ty) => {
|
|
- hash.write_u8(13);
|
|
- ty.hash(hash);
|
|
- }
|
|
- Type::Verbatim(ty) => {
|
|
- hash.write_u8(14);
|
|
- TokenStreamHelper(ty).hash(hash);
|
|
- }
|
|
- Type::__Nonexhaustive => unreachable!(),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
ast_struct! {
|
|
/// The binary interface of a function: `extern "C"`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Abi {
|
|
pub extern_token: Token![extern],
|
|
pub name: Option<LitStr>,
|
|
}
|
|
}
|
|
|
|
ast_struct! {
|
|
/// An argument in a function type: the `usize` in `fn(usize) -> bool`.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct BareFnArg {
|
|
pub attrs: Vec<Attribute>,
|
|
pub name: Option<(Ident, Token![:])>,
|
|
pub ty: Type,
|
|
}
|
|
}
|
|
|
|
@@ -372,28 +267,28 @@ ast_struct! {
|
|
/// # struct c_int;
|
|
/// #
|
|
/// extern "C" {
|
|
/// fn printf(format: *const c_char, ...) -> c_int;
|
|
/// // ^^^
|
|
/// }
|
|
/// ```
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub struct Variadic {
|
|
pub attrs: Vec<Attribute>,
|
|
pub dots: Token![...],
|
|
}
|
|
}
|
|
|
|
ast_enum! {
|
|
/// Return type of a function signature.
|
|
///
|
|
- /// *This type is available if Syn is built with the `"derive"` or `"full"`
|
|
+ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
|
/// feature.*
|
|
pub enum ReturnType {
|
|
/// Return type is not specified.
|
|
///
|
|
/// Functions default to `()` and closures default to type inference.
|
|
Default,
|
|
/// A particular type is returned.
|
|
Type(Token![->], Box<Type>),
|
|
@@ -402,39 +297,44 @@ ast_enum! {
|
|
|
|
#[cfg(feature = "parsing")]
|
|
pub mod parsing {
|
|
use super::*;
|
|
|
|
use crate::ext::IdentExt;
|
|
use crate::parse::{Parse, ParseStream, Result};
|
|
use crate::path;
|
|
+ use proc_macro2::{Punct, Spacing, TokenTree};
|
|
+ use std::iter::FromIterator;
|
|
|
|
impl Parse for Type {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- ambig_ty(input, true)
|
|
+ let allow_plus = true;
|
|
+ ambig_ty(input, allow_plus)
|
|
}
|
|
}
|
|
|
|
impl Type {
|
|
/// In some positions, types may not contain the `+` character, to
|
|
/// disambiguate them. For example in the expression `1 as T`, T may not
|
|
/// contain a `+` character.
|
|
///
|
|
/// This parser does not allow a `+`, while the default parser does.
|
|
pub fn without_plus(input: ParseStream) -> Result<Self> {
|
|
- ambig_ty(input, false)
|
|
+ let allow_plus = false;
|
|
+ ambig_ty(input, allow_plus)
|
|
}
|
|
}
|
|
|
|
fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
|
|
- if input.peek(token::Group) {
|
|
+ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
|
|
return input.parse().map(Type::Group);
|
|
}
|
|
|
|
+ let begin = input.fork();
|
|
let mut lifetimes = None::<BoundLifetimes>;
|
|
let mut lookahead = input.lookahead1();
|
|
if lookahead.peek(Token![for]) {
|
|
lifetimes = input.parse()?;
|
|
lookahead = input.lookahead1();
|
|
if !lookahead.peek(Ident)
|
|
&& !lookahead.peek(Token![fn])
|
|
&& !lookahead.peek(Token![unsafe])
|
|
@@ -519,17 +419,17 @@ pub mod parsing {
|
|
}
|
|
match bounds.into_iter().next().unwrap() {
|
|
TypeParamBound::Trait(trait_bound) => {
|
|
TypeParamBound::Trait(TraitBound {
|
|
paren_token: Some(paren_token),
|
|
..trait_bound
|
|
})
|
|
}
|
|
- other => other,
|
|
+ other @ TypeParamBound::Lifetime(_) => other,
|
|
}
|
|
}
|
|
_ => break,
|
|
};
|
|
return Ok(Type::TraitObject(TypeTraitObject {
|
|
dyn_token: None,
|
|
bounds: {
|
|
let mut bounds = Punctuated::new();
|
|
@@ -544,27 +444,30 @@ pub mod parsing {
|
|
}
|
|
}
|
|
Ok(Type::Paren(TypeParen {
|
|
paren_token,
|
|
elem: Box::new(first),
|
|
}))
|
|
} else if lookahead.peek(Token![fn])
|
|
|| lookahead.peek(Token![unsafe])
|
|
- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
|
|
+ || lookahead.peek(Token![extern])
|
|
{
|
|
- let mut bare_fn: TypeBareFn = input.parse()?;
|
|
- bare_fn.lifetimes = lifetimes;
|
|
- Ok(Type::BareFn(bare_fn))
|
|
+ let allow_mut_self = true;
|
|
+ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
|
|
+ bare_fn.lifetimes = lifetimes;
|
|
+ Ok(Type::BareFn(bare_fn))
|
|
+ } else {
|
|
+ Ok(Type::Verbatim(verbatim::between(begin, input)))
|
|
+ }
|
|
} else if lookahead.peek(Ident)
|
|
|| input.peek(Token![super])
|
|
|| input.peek(Token![self])
|
|
|| input.peek(Token![Self])
|
|
|| input.peek(Token![crate])
|
|
- || input.peek(Token![extern])
|
|
|| lookahead.peek(Token![::])
|
|
|| lookahead.peek(Token![<])
|
|
{
|
|
if input.peek(Token![dyn]) {
|
|
let mut trait_object: TypeTraitObject = input.parse()?;
|
|
if lifetimes.is_some() {
|
|
match trait_object.bounds.iter_mut().next().unwrap() {
|
|
TypeParamBound::Trait(trait_bound) => {
|
|
@@ -717,48 +620,68 @@ pub mod parsing {
|
|
// & binds tighter than +, so we don't allow + here.
|
|
elem: Box::new(input.call(Type::without_plus)?),
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeBareFn {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- let args;
|
|
- let allow_variadic;
|
|
- Ok(TypeBareFn {
|
|
- lifetimes: input.parse()?,
|
|
- unsafety: input.parse()?,
|
|
- abi: input.parse()?,
|
|
- fn_token: input.parse()?,
|
|
- paren_token: parenthesized!(args in input),
|
|
- inputs: {
|
|
- let mut inputs = Punctuated::new();
|
|
- while !args.is_empty() && !args.peek(Token![...]) {
|
|
- inputs.push_value(args.parse()?);
|
|
- if args.is_empty() {
|
|
- break;
|
|
- }
|
|
- inputs.push_punct(args.parse()?);
|
|
+ let allow_mut_self = false;
|
|
+ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
|
|
+ let args;
|
|
+ let mut variadic = None;
|
|
+ let mut has_mut_self = false;
|
|
+
|
|
+ let bare_fn = TypeBareFn {
|
|
+ lifetimes: input.parse()?,
|
|
+ unsafety: input.parse()?,
|
|
+ abi: input.parse()?,
|
|
+ fn_token: input.parse()?,
|
|
+ paren_token: parenthesized!(args in input),
|
|
+ inputs: {
|
|
+ let mut inputs = Punctuated::new();
|
|
+
|
|
+ while !args.is_empty() {
|
|
+ let attrs = args.call(Attribute::parse_outer)?;
|
|
+
|
|
+ if inputs.empty_or_trailing() && args.peek(Token![...]) {
|
|
+ variadic = Some(Variadic {
|
|
+ attrs,
|
|
+ dots: args.parse()?,
|
|
+ });
|
|
+ break;
|
|
}
|
|
- allow_variadic = inputs.empty_or_trailing();
|
|
- inputs
|
|
- },
|
|
- variadic: {
|
|
- if allow_variadic && args.peek(Token![...]) {
|
|
- Some(Variadic {
|
|
- attrs: Vec::new(),
|
|
- dots: args.parse()?,
|
|
- })
|
|
+
|
|
+ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
|
|
+ inputs.push_value(BareFnArg { attrs, ..arg });
|
|
} else {
|
|
- None
|
|
+ has_mut_self = true;
|
|
+ }
|
|
+ if args.is_empty() {
|
|
+ break;
|
|
}
|
|
- },
|
|
- output: input.call(ReturnType::without_plus)?,
|
|
- })
|
|
+
|
|
+ inputs.push_punct(args.parse()?);
|
|
+ }
|
|
+
|
|
+ inputs
|
|
+ },
|
|
+ variadic,
|
|
+ output: input.call(ReturnType::without_plus)?,
|
|
+ };
|
|
+
|
|
+ if has_mut_self {
|
|
+ Ok(None)
|
|
+ } else {
|
|
+ Ok(Some(bare_fn))
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeNever {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(TypeNever {
|
|
bang_token: input.parse()?,
|
|
})
|
|
@@ -771,19 +694,37 @@ pub mod parsing {
|
|
underscore_token: input.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeTuple {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
let content;
|
|
+ let paren_token = parenthesized!(content in input);
|
|
+
|
|
+ if content.is_empty() {
|
|
+ return Ok(TypeTuple {
|
|
+ paren_token,
|
|
+ elems: Punctuated::new(),
|
|
+ });
|
|
+ }
|
|
+
|
|
+ let first: Type = content.parse()?;
|
|
Ok(TypeTuple {
|
|
- paren_token: parenthesized!(content in input),
|
|
- elems: content.parse_terminated(Type::parse)?,
|
|
+ paren_token,
|
|
+ elems: {
|
|
+ let mut elems = Punctuated::new();
|
|
+ elems.push_value(first);
|
|
+ elems.push_punct(content.parse()?);
|
|
+ let rest: Punctuated<Type, Token![,]> =
|
|
+ content.parse_terminated(Parse::parse)?;
|
|
+ elems.extend(rest);
|
|
+ elems
|
|
+ },
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeMacro {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(TypeMacro {
|
|
mac: input.parse()?,
|
|
@@ -802,19 +743,21 @@ pub mod parsing {
|
|
}
|
|
|
|
Ok(TypePath { qself, path })
|
|
}
|
|
}
|
|
|
|
impl ReturnType {
|
|
pub fn without_plus(input: ParseStream) -> Result<Self> {
|
|
- Self::parse(input, false)
|
|
+ let allow_plus = false;
|
|
+ Self::parse(input, allow_plus)
|
|
}
|
|
|
|
+ #[doc(hidden)]
|
|
pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
|
|
if input.peek(Token![->]) {
|
|
let arrow = input.parse()?;
|
|
let ty = ambig_ty(input, allow_plus)?;
|
|
Ok(ReturnType::Type(arrow, Box::new(ty)))
|
|
} else {
|
|
Ok(ReturnType::Default)
|
|
}
|
|
@@ -839,20 +782,22 @@ pub mod parsing {
|
|
return true;
|
|
}
|
|
}
|
|
false
|
|
}
|
|
|
|
impl TypeTraitObject {
|
|
pub fn without_plus(input: ParseStream) -> Result<Self> {
|
|
- Self::parse(input, false)
|
|
+ let allow_plus = false;
|
|
+ Self::parse(input, allow_plus)
|
|
}
|
|
|
|
// Only allow multiple trait references if allow_plus is true.
|
|
+ #[doc(hidden)]
|
|
pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
|
|
Ok(TypeTraitObject {
|
|
dyn_token: input.parse()?,
|
|
bounds: {
|
|
let mut bounds = Punctuated::new();
|
|
if allow_plus {
|
|
loop {
|
|
bounds.push_value(input.parse()?);
|
|
@@ -905,48 +850,99 @@ pub mod parsing {
|
|
group_token: group.token,
|
|
elem: group.content.parse()?,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for TypeParen {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- Self::parse(input, false)
|
|
+ let allow_plus = false;
|
|
+ Self::parse(input, allow_plus)
|
|
}
|
|
}
|
|
|
|
impl TypeParen {
|
|
fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
|
|
let content;
|
|
Ok(TypeParen {
|
|
paren_token: parenthesized!(content in input),
|
|
elem: Box::new(ambig_ty(&content, allow_plus)?),
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Parse for BareFnArg {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
- Ok(BareFnArg {
|
|
- attrs: input.call(Attribute::parse_outer)?,
|
|
- name: {
|
|
- if (input.peek(Ident) || input.peek(Token![_]))
|
|
- && input.peek2(Token![:])
|
|
- && !input.peek2(Token![::])
|
|
- {
|
|
- let name = input.call(Ident::parse_any)?;
|
|
- let colon: Token![:] = input.parse()?;
|
|
- Some((name, colon))
|
|
- } else {
|
|
- None
|
|
- }
|
|
- },
|
|
- ty: input.parse()?,
|
|
- })
|
|
+ let allow_mut_self = false;
|
|
+ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn parse_bare_fn_arg(
|
|
+ input: ParseStream,
|
|
+ mut allow_mut_self: bool,
|
|
+ ) -> Result<Option<BareFnArg>> {
|
|
+ let mut has_mut_self = false;
|
|
+ let arg = BareFnArg {
|
|
+ attrs: input.call(Attribute::parse_outer)?,
|
|
+ name: {
|
|
+ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
|
|
+ && input.peek2(Token![:])
|
|
+ && !input.peek2(Token![::])
|
|
+ {
|
|
+ let name = input.call(Ident::parse_any)?;
|
|
+ let colon: Token![:] = input.parse()?;
|
|
+ Some((name, colon))
|
|
+ } else if allow_mut_self
|
|
+ && input.peek(Token![mut])
|
|
+ && input.peek2(Token![self])
|
|
+ && input.peek3(Token![:])
|
|
+ && !input.peek3(Token![::])
|
|
+ {
|
|
+ has_mut_self = true;
|
|
+ allow_mut_self = false;
|
|
+ input.parse::<Token![mut]>()?;
|
|
+ input.parse::<Token![self]>()?;
|
|
+ input.parse::<Token![:]>()?;
|
|
+ None
|
|
+ } else {
|
|
+ None
|
|
+ }
|
|
+ },
|
|
+ ty: if !has_mut_self && input.peek(Token![...]) {
|
|
+ let dot3 = input.parse::<Token![...]>()?;
|
|
+ let args = vec![
|
|
+ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
|
|
+ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
|
|
+ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
|
|
+ ];
|
|
+ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
|
|
+ |(mut arg, span)| {
|
|
+ arg.set_span(*span);
|
|
+ arg
|
|
+ },
|
|
+ ));
|
|
+ Type::Verbatim(tokens)
|
|
+ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
|
|
+ has_mut_self = true;
|
|
+ input.parse::<Token![mut]>()?;
|
|
+ Type::Path(TypePath {
|
|
+ qself: None,
|
|
+ path: input.parse::<Token![self]>()?.into(),
|
|
+ })
|
|
+ } else {
|
|
+ input.parse()?
|
|
+ },
|
|
+ };
|
|
+
|
|
+ if has_mut_self {
|
|
+ Ok(None)
|
|
+ } else {
|
|
+ Ok(Some(arg))
|
|
}
|
|
}
|
|
|
|
impl Parse for Abi {
|
|
fn parse(input: ParseStream) -> Result<Self> {
|
|
Ok(Abi {
|
|
extern_token: input.parse()?,
|
|
name: input.parse()?,
|
|
diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/verbatim.rs
|
|
@@ -0,0 +1,15 @@
|
|
+use crate::parse::{ParseBuffer, ParseStream};
|
|
+use proc_macro2::TokenStream;
|
|
+use std::iter;
|
|
+
|
|
+pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
|
|
+ let end = end.cursor();
|
|
+ let mut cursor = begin.cursor();
|
|
+ let mut tokens = TokenStream::new();
|
|
+ while cursor != end {
|
|
+ let (tt, next) = cursor.token_tree().unwrap();
|
|
+ tokens.extend(iter::once(tt));
|
|
+ cursor = next;
|
|
+ }
|
|
+ tokens
|
|
+}
|
|
diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/src/whitespace.rs
|
|
@@ -0,0 +1,65 @@
|
|
+pub fn skip(mut s: &str) -> &str {
|
|
+ 'skip: while !s.is_empty() {
|
|
+ let byte = s.as_bytes()[0];
|
|
+ if byte == b'/' {
|
|
+ if s.starts_with("//")
|
|
+ && (!s.starts_with("///") || s.starts_with("////"))
|
|
+ && !s.starts_with("//!")
|
|
+ {
|
|
+ if let Some(i) = s.find('\n') {
|
|
+ s = &s[i + 1..];
|
|
+ continue;
|
|
+ } else {
|
|
+ return "";
|
|
+ }
|
|
+ } else if s.starts_with("/**/") {
|
|
+ s = &s[4..];
|
|
+ continue;
|
|
+ } else if s.starts_with("/*")
|
|
+ && (!s.starts_with("/**") || s.starts_with("/***"))
|
|
+ && !s.starts_with("/*!")
|
|
+ {
|
|
+ let mut depth = 0;
|
|
+ let bytes = s.as_bytes();
|
|
+ let mut i = 0;
|
|
+ let upper = bytes.len() - 1;
|
|
+ while i < upper {
|
|
+ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
|
+ depth += 1;
|
|
+ i += 1; // eat '*'
|
|
+ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
|
+ depth -= 1;
|
|
+ if depth == 0 {
|
|
+ s = &s[i + 2..];
|
|
+ continue 'skip;
|
|
+ }
|
|
+ i += 1; // eat '/'
|
|
+ }
|
|
+ i += 1;
|
|
+ }
|
|
+ return s;
|
|
+ }
|
|
+ }
|
|
+ match byte {
|
|
+ b' ' | 0x09..=0x0d => {
|
|
+ s = &s[1..];
|
|
+ continue;
|
|
+ }
|
|
+ b if b <= 0x7f => {}
|
|
+ _ => {
|
|
+ let ch = s.chars().next().unwrap();
|
|
+ if is_whitespace(ch) {
|
|
+ s = &s[ch.len_utf8()..];
|
|
+ continue;
|
|
+ }
|
|
+ }
|
|
+ }
|
|
+ return s;
|
|
+ }
|
|
+ s
|
|
+}
|
|
+
|
|
+fn is_whitespace(ch: char) -> bool {
|
|
+ // Rust treats left-to-right mark and right-to-left mark as whitespace
|
|
+ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/.gitignore b/third_party/rust/syn/tests/.gitignore
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/.gitignore
|
|
@@ -0,0 +1,1 @@
|
|
+/*.pending-snap
|
|
diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
|
|
deleted file mode 100755
|
|
--- a/third_party/rust/syn/tests/clone.sh
|
|
+++ /dev/null
|
|
@@ -1,16 +0,0 @@
|
|
-#!/bin/bash
|
|
-
|
|
-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
|
|
-
|
|
-set -euo pipefail
|
|
-cd "$(dirname "${BASH_SOURCE[0]}")"
|
|
-mkdir -p rust
|
|
-touch rust/COMMIT
|
|
-
|
|
-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
|
|
- rm -rf rust
|
|
- mkdir rust
|
|
- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
|
|
- | tar xz --directory rust --strip-components 1
|
|
- echo "$REV" > rust/COMMIT
|
|
-fi
|
|
diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
|
|
--- a/third_party/rust/syn/tests/common/eq.rs
|
|
+++ b/third_party/rust/syn/tests/common/eq.rs
|
|
@@ -1,41 +1,40 @@
|
|
+extern crate rustc_ast;
|
|
extern crate rustc_data_structures;
|
|
+extern crate rustc_span;
|
|
extern crate rustc_target;
|
|
-extern crate syntax;
|
|
-extern crate syntax_pos;
|
|
|
|
use std::mem;
|
|
|
|
-use self::rustc_data_structures::sync::Lrc;
|
|
-use self::rustc_data_structures::thin_vec::ThinVec;
|
|
-use self::rustc_target::abi::FloatTy;
|
|
-use self::rustc_target::spec::abi::Abi;
|
|
-use self::syntax::ast::{
|
|
- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
|
|
- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
|
|
- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
|
|
- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
|
|
- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
|
|
- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
|
|
- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
|
|
- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
|
|
- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
|
|
- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
|
|
- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
|
|
- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
|
|
- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
|
|
+use rustc_ast::ast::{
|
|
+ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
|
|
+ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
|
|
+ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
|
|
+ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
|
|
+ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
|
|
+ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
|
|
+ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
|
|
+ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
|
|
+ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
|
|
+ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
|
|
+ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
|
|
+ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
|
|
+ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
|
|
+ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
|
|
+ WhereRegionPredicate,
|
|
};
|
|
-use self::syntax::parse::lexer::comments;
|
|
-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
|
|
-use self::syntax::ptr::P;
|
|
-use self::syntax::source_map::Spanned;
|
|
-use self::syntax::symbol::{sym, Symbol};
|
|
-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
|
-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
|
|
+use rustc_ast::ptr::P;
|
|
+use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
|
|
+use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
|
+use rustc_data_structures::sync::Lrc;
|
|
+use rustc_data_structures::thin_vec::ThinVec;
|
|
+use rustc_span::source_map::Spanned;
|
|
+use rustc_span::symbol::Ident;
|
|
+use rustc_span::{Span, Symbol, SyntaxContext};
|
|
|
|
pub trait SpanlessEq {
|
|
fn eq(&self, other: &Self) -> bool;
|
|
}
|
|
|
|
impl<T: SpanlessEq> SpanlessEq for P<T> {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
SpanlessEq::eq(&**self, &**other)
|
|
@@ -81,24 +80,16 @@ impl<T: SpanlessEq> SpanlessEq for Spann
|
|
}
|
|
|
|
impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
|
|
}
|
|
}
|
|
|
|
-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
|
|
- fn eq(&self, other: &Self) -> bool {
|
|
- SpanlessEq::eq(&self.0, &other.0)
|
|
- && SpanlessEq::eq(&self.1, &other.1)
|
|
- && SpanlessEq::eq(&self.2, &other.2)
|
|
- }
|
|
-}
|
|
-
|
|
macro_rules! spanless_eq_true {
|
|
($name:ident) => {
|
|
impl SpanlessEq for $name {
|
|
fn eq(&self, _other: &Self) -> bool {
|
|
true
|
|
}
|
|
}
|
|
};
|
|
@@ -121,60 +112,62 @@ macro_rules! spanless_eq_partial_eq {
|
|
}
|
|
|
|
spanless_eq_partial_eq!(bool);
|
|
spanless_eq_partial_eq!(u8);
|
|
spanless_eq_partial_eq!(u16);
|
|
spanless_eq_partial_eq!(u128);
|
|
spanless_eq_partial_eq!(usize);
|
|
spanless_eq_partial_eq!(char);
|
|
+spanless_eq_partial_eq!(String);
|
|
spanless_eq_partial_eq!(Symbol);
|
|
-spanless_eq_partial_eq!(Abi);
|
|
+spanless_eq_partial_eq!(CommentKind);
|
|
spanless_eq_partial_eq!(DelimToken);
|
|
+spanless_eq_partial_eq!(InlineAsmOptions);
|
|
|
|
macro_rules! spanless_eq_struct {
|
|
{
|
|
- $name:ident;
|
|
+ $name:ident $(<$param:ident>)?;
|
|
$([$field:ident $other:ident])*
|
|
$(![$ignore:ident])*
|
|
} => {
|
|
- impl SpanlessEq for $name {
|
|
+ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
let $name { $($field,)* $($ignore: _,)* } = self;
|
|
let $name { $($field: $other,)* $($ignore: _,)* } = other;
|
|
$(SpanlessEq::eq($field, $other))&&*
|
|
}
|
|
}
|
|
};
|
|
|
|
{
|
|
- $name:ident;
|
|
+ $name:ident $(<$param:ident>)?;
|
|
$([$field:ident $other:ident])*
|
|
$next:ident
|
|
$($rest:ident)*
|
|
$(!$ignore:ident)*
|
|
} => {
|
|
spanless_eq_struct! {
|
|
- $name;
|
|
+ $name $(<$param>)*;
|
|
$([$field $other])*
|
|
[$next other]
|
|
$($rest)*
|
|
$(!$ignore)*
|
|
}
|
|
};
|
|
|
|
{
|
|
- $name:ident;
|
|
+ $name:ident $(<$param:ident>)?;
|
|
$([$field:ident $other:ident])*
|
|
$(![$ignore:ident])*
|
|
!$next:ident
|
|
$(!$rest:ident)*
|
|
} => {
|
|
spanless_eq_struct! {
|
|
- $name;
|
|
+ $name $(<$param>)*;
|
|
$([$field $other])*
|
|
$(![$ignore])*
|
|
![$next]
|
|
$(!$rest)*
|
|
}
|
|
};
|
|
}
|
|
|
|
@@ -258,129 +251,141 @@ macro_rules! spanless_eq_enum {
|
|
$name;
|
|
$([$variant $($fields)*])*
|
|
[$next]
|
|
$($rest)*
|
|
}
|
|
};
|
|
}
|
|
|
|
-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
|
|
+spanless_eq_struct!(AngleBracketedArgs; span args);
|
|
spanless_eq_struct!(AnonConst; id value);
|
|
-spanless_eq_struct!(Arg; attrs ty pat id span);
|
|
-spanless_eq_struct!(Arm; attrs pats guard body span id);
|
|
+spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
|
|
spanless_eq_struct!(AssocTyConstraint; id ident kind span);
|
|
-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
|
|
-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
|
|
+spanless_eq_struct!(AttrItem; path args);
|
|
+spanless_eq_struct!(Attribute; kind id style span);
|
|
+spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
|
|
spanless_eq_struct!(Block; stmts id rules span);
|
|
-spanless_eq_struct!(Crate; module attrs span);
|
|
+spanless_eq_struct!(Crate; module attrs span proc_macros);
|
|
spanless_eq_struct!(EnumDef; variants);
|
|
-spanless_eq_struct!(Expr; id node span attrs);
|
|
-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
|
|
-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
|
|
-spanless_eq_struct!(FnDecl; inputs output c_variadic);
|
|
-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
|
|
-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
|
|
+spanless_eq_struct!(Expr; id kind span attrs !tokens);
|
|
+spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
|
|
+spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
|
|
+spanless_eq_struct!(FnDecl; inputs output);
|
|
+spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
|
|
+spanless_eq_struct!(FnSig; header decl span);
|
|
spanless_eq_struct!(ForeignMod; abi items);
|
|
-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
|
|
+spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
|
|
spanless_eq_struct!(Generics; params where_clause span);
|
|
spanless_eq_struct!(GlobalAsm; asm);
|
|
-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
|
|
-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
|
|
-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
|
|
-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
|
|
+spanless_eq_struct!(InlineAsm; template operands options line_spans);
|
|
+spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
|
|
spanless_eq_struct!(Label; ident);
|
|
spanless_eq_struct!(Lifetime; id ident);
|
|
-spanless_eq_struct!(Lit; token node span);
|
|
+spanless_eq_struct!(Lit; token kind span);
|
|
+spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
|
|
+spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
|
|
spanless_eq_struct!(Local; pat ty init id span attrs);
|
|
-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
|
|
-spanless_eq_struct!(MacroDef; tokens legacy);
|
|
-spanless_eq_struct!(MethodSig; header decl);
|
|
+spanless_eq_struct!(MacCall; path args prior_type_ascription);
|
|
+spanless_eq_struct!(MacCallStmt; mac style attrs);
|
|
+spanless_eq_struct!(MacroDef; body macro_rules);
|
|
spanless_eq_struct!(Mod; inner items inline);
|
|
spanless_eq_struct!(MutTy; ty mutbl);
|
|
+spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
|
|
spanless_eq_struct!(ParenthesizedArgs; span inputs output);
|
|
-spanless_eq_struct!(Pat; id node span);
|
|
+spanless_eq_struct!(Pat; id kind span tokens);
|
|
spanless_eq_struct!(Path; span segments);
|
|
spanless_eq_struct!(PathSegment; ident id args);
|
|
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
|
|
spanless_eq_struct!(QSelf; ty path_span position);
|
|
-spanless_eq_struct!(Stmt; id node span);
|
|
-spanless_eq_struct!(StructField; span ident vis id ty attrs);
|
|
+spanless_eq_struct!(Stmt; id kind span);
|
|
+spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
|
|
+spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
|
|
spanless_eq_struct!(Token; kind span);
|
|
-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
|
|
spanless_eq_struct!(TraitRef; path ref_id);
|
|
-spanless_eq_struct!(Ty; id node span);
|
|
+spanless_eq_struct!(Ty; id kind span);
|
|
spanless_eq_struct!(UseTree; prefix kind span);
|
|
-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
|
|
+spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
|
|
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
|
|
-spanless_eq_struct!(WhereClause; predicates span);
|
|
+spanless_eq_struct!(WhereClause; has_where_token predicates span);
|
|
spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
|
|
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
|
|
-spanless_eq_enum!(AsmDialect; Att Intel);
|
|
+spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
|
|
+spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
|
|
spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
|
|
+spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
|
|
+spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
|
|
spanless_eq_enum!(AttrStyle; Outer Inner);
|
|
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
|
|
spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
|
|
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
|
|
+spanless_eq_enum!(BorrowKind; Ref Raw);
|
|
spanless_eq_enum!(CaptureBy; Value Ref);
|
|
-spanless_eq_enum!(Constness; Const NotConst);
|
|
+spanless_eq_enum!(Const; Yes(0) No);
|
|
spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
|
|
-spanless_eq_enum!(Defaultness; Default Final);
|
|
+spanless_eq_enum!(Defaultness; Default(0) Final);
|
|
+spanless_eq_enum!(Extern; None Implicit Explicit(0));
|
|
spanless_eq_enum!(FloatTy; F32 F64);
|
|
-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
|
|
-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
|
|
+spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
|
|
+spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
|
|
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
|
|
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
|
|
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
|
|
-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
|
|
-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
|
|
-spanless_eq_enum!(ImplPolarity; Positive Negative);
|
|
+spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
|
|
+spanless_eq_enum!(ImplPolarity; Positive Negative(0));
|
|
+spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
|
|
+spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
|
|
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
|
|
-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
|
|
spanless_eq_enum!(IsAuto; Yes No);
|
|
+spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
|
|
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
|
|
+spanless_eq_enum!(LlvmAsmDialect; Att Intel);
|
|
+spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
|
|
spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
|
|
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
|
|
spanless_eq_enum!(Movability; Static Movable);
|
|
-spanless_eq_enum!(Mutability; Mutable Immutable);
|
|
+spanless_eq_enum!(Mutability; Mut Not);
|
|
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
|
|
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
|
|
-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
|
|
+spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
|
|
spanless_eq_enum!(StrStyle; Cooked Raw(0));
|
|
spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
|
|
-spanless_eq_enum!(TraitBoundModifier; None Maybe);
|
|
-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
|
|
+spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
|
|
spanless_eq_enum!(TraitObjectSyntax; Dyn None);
|
|
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
|
|
spanless_eq_enum!(UnOp; Deref Not Neg);
|
|
+spanless_eq_enum!(Unsafe; Yes(0) No);
|
|
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
|
|
-spanless_eq_enum!(Unsafety; Unsafe Normal);
|
|
spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
|
|
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
|
|
spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
|
|
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
|
|
-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
|
|
+spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
|
|
Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
|
|
While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
|
|
- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
|
|
- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
|
|
- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
|
|
- Try(0) Yield(0) Err);
|
|
-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
|
|
- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
|
|
- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
|
|
- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
|
|
+ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
|
|
+ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
|
|
+ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
|
|
+ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
|
|
+spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
|
|
+ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
|
|
+ Sym(expr));
|
|
+spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
|
|
+ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
|
|
+ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
|
|
+ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
|
|
+ MacCall(0) MacroDef(0));
|
|
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
|
|
- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
|
|
+ Float(0 1) Bool(0) Err(0));
|
|
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
|
|
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
|
|
- Paren(0) Mac(0));
|
|
+ Paren(0) MacCall(0));
|
|
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
|
|
Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
|
|
- ImplicitSelf Mac(0) Err CVarArgs);
|
|
+ ImplicitSelf MacCall(0) Err CVarArgs);
|
|
|
|
impl SpanlessEq for Ident {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
self.as_str() == other.as_str()
|
|
}
|
|
}
|
|
|
|
// Give up on comparing literals inside of macros because there are so many
|
|
@@ -409,49 +414,25 @@ impl SpanlessEq for TokenKind {
|
|
},
|
|
_ => self == other,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl SpanlessEq for TokenStream {
|
|
fn eq(&self, other: &Self) -> bool {
|
|
- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
|
|
+ let mut this = self.clone().into_trees();
|
|
+ let mut other = other.clone().into_trees();
|
|
+ loop {
|
|
+ let this = match this.next() {
|
|
+ None => return other.next().is_none(),
|
|
+ Some(val) => val,
|
|
+ };
|
|
+ let other = match other.next() {
|
|
+ None => return false,
|
|
+ Some(val) => val,
|
|
+ };
|
|
+ if !SpanlessEq::eq(&this, &other) {
|
|
+ return false;
|
|
+ }
|
|
+ }
|
|
}
|
|
}
|
|
-
|
|
-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
|
|
- let mut tokens = Vec::new();
|
|
- for tt in tts.clone().into_trees() {
|
|
- let c = match tt {
|
|
- TokenTree::Token(Token {
|
|
- kind: TokenKind::DocComment(c),
|
|
- ..
|
|
- }) => c,
|
|
- _ => {
|
|
- tokens.push(tt);
|
|
- continue;
|
|
- }
|
|
- };
|
|
- let contents = comments::strip_doc_comment_decoration(&c.as_str());
|
|
- let style = comments::doc_comment_style(&c.as_str());
|
|
- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
|
|
- if style == AttrStyle::Inner {
|
|
- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
|
|
- }
|
|
- let lit = token::Lit {
|
|
- kind: token::LitKind::Str,
|
|
- symbol: Symbol::intern(&contents),
|
|
- suffix: None,
|
|
- };
|
|
- let tts = vec![
|
|
- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
|
|
- TokenTree::token(TokenKind::Eq, DUMMY_SP),
|
|
- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
|
|
- ];
|
|
- tokens.push(TokenTree::Delimited(
|
|
- DelimSpan::dummy(),
|
|
- DelimToken::Bracket,
|
|
- tts.into_iter().collect::<TokenStream>().into(),
|
|
- ));
|
|
- }
|
|
- tokens
|
|
-}
|
|
diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
|
|
--- a/third_party/rust/syn/tests/common/mod.rs
|
|
+++ b/third_party/rust/syn/tests/common/mod.rs
|
|
@@ -1,14 +1,27 @@
|
|
#![allow(dead_code)]
|
|
|
|
+use rayon::ThreadPoolBuilder;
|
|
use std::env;
|
|
|
|
pub mod eq;
|
|
pub mod parse;
|
|
|
|
/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it.
|
|
pub fn abort_after() -> usize {
|
|
match env::var("ABORT_AFTER_FAILURE") {
|
|
Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
|
|
Err(_) => usize::max_value(),
|
|
}
|
|
}
|
|
+
|
|
+/// Configure Rayon threadpool.
|
|
+pub fn rayon_init() {
|
|
+ let stack_size = match env::var("RUST_MIN_STACK") {
|
|
+ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
|
|
+ Err(_) => 20 * 1024 * 1024,
|
|
+ };
|
|
+ ThreadPoolBuilder::new()
|
|
+ .stack_size(stack_size)
|
|
+ .build_global()
|
|
+ .unwrap();
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
|
|
--- a/third_party/rust/syn/tests/common/parse.rs
|
|
+++ b/third_party/rust/syn/tests/common/parse.rs
|
|
@@ -1,25 +1,25 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate syn;
|
|
-extern crate syntax;
|
|
-extern crate syntax_pos;
|
|
+extern crate rustc_ast;
|
|
+extern crate rustc_expand;
|
|
+extern crate rustc_parse as parse;
|
|
+extern crate rustc_session;
|
|
+extern crate rustc_span;
|
|
|
|
-use self::syntax::ast;
|
|
-use self::syntax::parse::{self, ParseSess};
|
|
-use self::syntax::ptr::P;
|
|
-use self::syntax::source_map::FilePathMapping;
|
|
-use self::syntax_pos::FileName;
|
|
+use rustc_ast::ast;
|
|
+use rustc_ast::ptr::P;
|
|
+use rustc_session::parse::ParseSess;
|
|
+use rustc_span::source_map::FilePathMapping;
|
|
+use rustc_span::FileName;
|
|
|
|
use std::panic;
|
|
|
|
-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
|
|
+pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
|
|
match panic::catch_unwind(|| {
|
|
let sess = ParseSess::new(FilePathMapping::empty());
|
|
- sess.span_diagnostic.set_continue_after_error(false);
|
|
let e = parse::new_parser_from_source_str(
|
|
&sess,
|
|
FileName::Custom("test_precedence".to_string()),
|
|
input.to_string(),
|
|
)
|
|
.parse_expr();
|
|
match e {
|
|
Ok(expr) => Some(expr),
|
|
@@ -27,17 +27,17 @@ pub fn libsyntax_expr(input: &str) -> Op
|
|
diagnostic.emit();
|
|
None
|
|
}
|
|
}
|
|
}) {
|
|
Ok(Some(e)) => Some(e),
|
|
Ok(None) => None,
|
|
Err(_) => {
|
|
- errorf!("libsyntax panicked\n");
|
|
+ errorf!("librustc panicked\n");
|
|
None
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
|
|
match syn::parse_str(input) {
|
|
Ok(e) => Some(e),
|
|
diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
|
|
--- a/third_party/rust/syn/tests/debug/gen.rs
|
|
+++ b/third_party/rust/syn/tests/debug/gen.rs
|
|
@@ -1,13 +1,13 @@
|
|
// This file is @generated by syn-internal-codegen.
|
|
// It is not intended for manual editing.
|
|
|
|
use super::{Lite, RefCast};
|
|
-use std::fmt::{self, Debug};
|
|
+use std::fmt::{self, Debug, Display};
|
|
impl Debug for Lite<syn::Abi> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
let _val = &self.value;
|
|
let mut formatter = formatter.debug_struct("Abi");
|
|
if let Some(val) = &_val.name {
|
|
#[derive(RefCast)]
|
|
#[repr(transparent)]
|
|
struct Print(syn::LitStr);
|
|
@@ -1034,19 +1034,19 @@ impl Debug for Lite<syn::Expr> {
|
|
if !_val.attrs.is_empty() {
|
|
formatter.field("attrs", Lite(&_val.attrs));
|
|
}
|
|
formatter.field("block", Lite(&_val.block));
|
|
formatter.finish()
|
|
}
|
|
syn::Expr::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
syn::Expr::While(_val) => {
|
|
let mut formatter = formatter.debug_struct("Expr::While");
|
|
if !_val.attrs.is_empty() {
|
|
formatter.field("attrs", Lite(&_val.attrs));
|
|
}
|
|
if let Some(val) = &_val.label {
|
|
@@ -2111,19 +2111,19 @@ impl Debug for Lite<syn::ForeignItem> {
|
|
}
|
|
}
|
|
formatter.field("semi_token", Print::ref_cast(val));
|
|
}
|
|
formatter.finish()
|
|
}
|
|
syn::ForeignItem::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
impl Debug for Lite<syn::ForeignItemFn> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
@@ -2427,19 +2427,19 @@ impl Debug for Lite<syn::ImplItem> {
|
|
}
|
|
}
|
|
formatter.field("semi_token", Print::ref_cast(val));
|
|
}
|
|
formatter.finish()
|
|
}
|
|
syn::ImplItem::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
impl Debug for Lite<syn::ImplItemConst> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
@@ -2935,19 +2935,19 @@ impl Debug for Lite<syn::Item> {
|
|
}
|
|
formatter.field("leading_colon", Print::ref_cast(val));
|
|
}
|
|
formatter.field("tree", Lite(&_val.tree));
|
|
formatter.finish()
|
|
}
|
|
syn::Item::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
impl Debug for Lite<syn::ItemConst> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
@@ -3432,19 +3432,19 @@ impl Debug for Lite<syn::Lit> {
|
|
syn::Lit::Float(_val) => write!(formatter, "{}", _val),
|
|
syn::Lit::Bool(_val) => {
|
|
let mut formatter = formatter.debug_struct("Lit::Bool");
|
|
formatter.field("value", Lite(&_val.value));
|
|
formatter.finish()
|
|
}
|
|
syn::Lit::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
}
|
|
}
|
|
}
|
|
impl Debug for Lite<syn::LitBool> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
let _val = &self.value;
|
|
@@ -3873,19 +3873,19 @@ impl Debug for Lite<syn::Pat> {
|
|
formatter.field("attrs", Lite(&_val.attrs));
|
|
}
|
|
formatter.field("pat", Lite(&_val.pat));
|
|
formatter.field("ty", Lite(&_val.ty));
|
|
formatter.finish()
|
|
}
|
|
syn::Pat::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
syn::Pat::Wild(_val) => {
|
|
let mut formatter = formatter.debug_struct("Pat::Wild");
|
|
if !_val.attrs.is_empty() {
|
|
formatter.field("attrs", Lite(&_val.attrs));
|
|
}
|
|
formatter.finish()
|
|
@@ -4669,19 +4669,19 @@ impl Debug for Lite<syn::TraitItem> {
|
|
}
|
|
}
|
|
formatter.field("semi_token", Print::ref_cast(val));
|
|
}
|
|
formatter.finish()
|
|
}
|
|
syn::TraitItem::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
impl Debug for Lite<syn::TraitItemConst> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
@@ -5035,19 +5035,19 @@ impl Debug for Lite<syn::Type> {
|
|
let mut formatter = formatter.debug_struct("Type::Tuple");
|
|
if !_val.elems.is_empty() {
|
|
formatter.field("elems", Lite(&_val.elems));
|
|
}
|
|
formatter.finish()
|
|
}
|
|
syn::Type::Verbatim(_val) => {
|
|
formatter.write_str("Verbatim")?;
|
|
- formatter.write_str("(")?;
|
|
- Debug::fmt(Lite(_val), formatter)?;
|
|
- formatter.write_str(")")?;
|
|
+ formatter.write_str("(`")?;
|
|
+ Display::fmt(_val, formatter)?;
|
|
+ formatter.write_str("`)")?;
|
|
Ok(())
|
|
}
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
impl Debug for Lite<syn::TypeArray> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
|
|
--- a/third_party/rust/syn/tests/debug/mod.rs
|
|
+++ b/third_party/rust/syn/tests/debug/mod.rs
|
|
@@ -1,15 +1,12 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate ref_cast;
|
|
-
|
|
mod gen;
|
|
|
|
-use self::proc_macro2::{Ident, Literal, TokenStream};
|
|
-use self::ref_cast::RefCast;
|
|
+use proc_macro2::{Ident, Literal, TokenStream};
|
|
+use ref_cast::RefCast;
|
|
use std::fmt::{self, Debug};
|
|
use std::ops::Deref;
|
|
use syn::punctuated::Punctuated;
|
|
|
|
#[derive(RefCast)]
|
|
#[repr(transparent)]
|
|
pub struct Lite<T: ?Sized> {
|
|
value: T,
|
|
@@ -61,17 +58,25 @@ impl Debug for Lite<Ident> {
|
|
impl Debug for Lite<Literal> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
write!(formatter, "{}", self.value)
|
|
}
|
|
}
|
|
|
|
impl Debug for Lite<TokenStream> {
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
- write!(formatter, "`{}`", self.value)
|
|
+ let string = self.value.to_string();
|
|
+ if string.len() <= 80 {
|
|
+ write!(formatter, "TokenStream(`{}`)", self.value)
|
|
+ } else {
|
|
+ formatter
|
|
+ .debug_tuple("TokenStream")
|
|
+ .field(&format_args!("`{}`", string))
|
|
+ .finish()
|
|
+ }
|
|
}
|
|
}
|
|
|
|
impl<'a, T> Debug for Lite<&'a T>
|
|
where
|
|
Lite<T>: Debug,
|
|
{
|
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
|
|
deleted file mode 100644
|
|
--- a/third_party/rust/syn/tests/features/error.rs
|
|
+++ /dev/null
|
|
@@ -1,1 +0,0 @@
|
|
-"Hello! You want: cargo test --release --all-features"
|
|
diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
|
|
deleted file mode 100644
|
|
--- a/third_party/rust/syn/tests/features/mod.rs
|
|
+++ /dev/null
|
|
@@ -1,22 +0,0 @@
|
|
-#[allow(unused_macros)]
|
|
-macro_rules! hide_from_rustfmt {
|
|
- ($mod:item) => {
|
|
- $mod
|
|
- };
|
|
-}
|
|
-
|
|
-#[cfg(not(all(
|
|
- feature = "derive",
|
|
- feature = "full",
|
|
- feature = "parsing",
|
|
- feature = "printing",
|
|
- feature = "visit",
|
|
- feature = "visit-mut",
|
|
- feature = "fold",
|
|
- feature = "clone-impls",
|
|
- feature = "extra-traits",
|
|
- feature = "proc-macro",
|
|
-)))]
|
|
-hide_from_rustfmt! {
|
|
- mod error;
|
|
-}
|
|
diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
|
|
--- a/third_party/rust/syn/tests/macros/mod.rs
|
|
+++ b/third_party/rust/syn/tests/macros/mod.rs
|
|
@@ -1,10 +1,8 @@
|
|
-extern crate proc_macro2;
|
|
-
|
|
#[path = "../debug/mod.rs"]
|
|
pub mod debug;
|
|
|
|
use syn;
|
|
use syn::parse::{Parse, Result};
|
|
|
|
#[macro_export]
|
|
macro_rules! errorf {
|
|
@@ -37,28 +35,28 @@ macro_rules! snapshot {
|
|
};
|
|
}
|
|
|
|
#[macro_export]
|
|
macro_rules! snapshot_impl {
|
|
(($expr:ident) as $t:ty, @$snapshot:literal) => {
|
|
let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
|
|
let debug = crate::macros::debug::Lite(&$expr);
|
|
- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
|
|
+ insta::assert_debug_snapshot!(debug, @$snapshot);
|
|
};
|
|
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
|
|
let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
|
|
let debug = crate::macros::debug::Lite(&syntax_tree);
|
|
- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
|
|
+ insta::assert_debug_snapshot!(debug, @$snapshot);
|
|
syntax_tree
|
|
}};
|
|
(($($expr:tt)*) , @$snapshot:literal) => {{
|
|
let syntax_tree = $($expr)*;
|
|
let debug = crate::macros::debug::Lite(&syntax_tree);
|
|
- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
|
|
+ insta::assert_debug_snapshot!(debug, @$snapshot);
|
|
syntax_tree
|
|
}};
|
|
(($($expr:tt)*) $next:tt $($rest:tt)*) => {
|
|
snapshot_impl!(($($expr)* $next) $($rest)*)
|
|
};
|
|
}
|
|
|
|
pub trait Tokens {
|
|
diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
|
|
--- a/third_party/rust/syn/tests/repo/mod.rs
|
|
+++ b/third_party/rust/syn/tests/repo/mod.rs
|
|
@@ -1,60 +1,135 @@
|
|
-extern crate walkdir;
|
|
+mod progress;
|
|
+
|
|
+use self::progress::Progress;
|
|
+use anyhow::Result;
|
|
+use flate2::read::GzDecoder;
|
|
+use std::fs;
|
|
+use std::path::Path;
|
|
+use tar::Archive;
|
|
+use walkdir::DirEntry;
|
|
+
|
|
+const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
|
|
+
|
|
+#[rustfmt::skip]
|
|
+static EXCLUDE: &[&str] = &[
|
|
+ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
|
|
+ "test/ui/const-generics/const-expression-parameter.rs",
|
|
|
|
-use std::process::Command;
|
|
+ // Deprecated anonymous parameter syntax in traits
|
|
+ "test/ui/issues/issue-13105.rs",
|
|
+ "test/ui/issues/issue-13775.rs",
|
|
+ "test/ui/issues/issue-34074.rs",
|
|
+ "test/ui/proc-macro/trait-fn-args-2015.rs",
|
|
|
|
-use self::walkdir::DirEntry;
|
|
+ // Not actually test cases
|
|
+ "test/rustdoc-ui/test-compile-fail2.rs",
|
|
+ "test/rustdoc-ui/test-compile-fail3.rs",
|
|
+ "test/ui/include-single-expr-helper.rs",
|
|
+ "test/ui/include-single-expr-helper-1.rs",
|
|
+ "test/ui/issues/auxiliary/issue-21146-inc.rs",
|
|
+ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
|
|
+ "test/ui/lint/expansion-time-include.rs",
|
|
+ "test/ui/macros/auxiliary/macro-comma-support.rs",
|
|
+ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
|
|
+];
|
|
|
|
pub fn base_dir_filter(entry: &DirEntry) -> bool {
|
|
let path = entry.path();
|
|
if path.is_dir() {
|
|
return true; // otherwise walkdir does not visit the files
|
|
}
|
|
if path.extension().map(|e| e != "rs").unwrap_or(true) {
|
|
return false;
|
|
}
|
|
- let path_string = path.to_string_lossy();
|
|
- let path_string = if cfg!(windows) {
|
|
- path_string.replace('\\', "/").into()
|
|
+
|
|
+ let mut path_string = path.to_string_lossy();
|
|
+ if cfg!(windows) {
|
|
+ path_string = path_string.replace('\\', "/").into();
|
|
+ }
|
|
+ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
|
|
+ path
|
|
+ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
|
|
+ path
|
|
} else {
|
|
- path_string
|
|
+ panic!("unexpected path in Rust dist: {}", path_string);
|
|
};
|
|
+
|
|
// TODO assert that parsing fails on the parse-fail cases
|
|
- if path_string.starts_with("tests/rust/src/test/parse-fail")
|
|
- || path_string.starts_with("tests/rust/src/test/compile-fail")
|
|
- || path_string.starts_with("tests/rust/src/test/rustfix")
|
|
+ if path.starts_with("test/parse-fail")
|
|
+ || path.starts_with("test/compile-fail")
|
|
+ || path.starts_with("test/rustfix")
|
|
{
|
|
return false;
|
|
}
|
|
|
|
- if path_string.starts_with("tests/rust/src/test/ui") {
|
|
- let stderr_path = path.with_extension("stderr");
|
|
+ if path.starts_with("test/ui") {
|
|
+ let stderr_path = entry.path().with_extension("stderr");
|
|
if stderr_path.exists() {
|
|
// Expected to fail in some way
|
|
return false;
|
|
}
|
|
}
|
|
|
|
- match path_string.as_ref() {
|
|
- // Deprecated placement syntax
|
|
- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
|
|
- // Deprecated anonymous parameter syntax in traits
|
|
- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
|
|
- "tests/rust/src/test/ui/issues/issue-13105.rs" |
|
|
- "tests/rust/src/test/ui/issues/issue-13775.rs" |
|
|
- "tests/rust/src/test/ui/issues/issue-34074.rs" |
|
|
- // Deprecated await macro syntax
|
|
- "tests/rust/src/test/ui/async-await/await-macro.rs" |
|
|
- // 2015-style dyn that libsyntax rejects
|
|
- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
|
|
- // not actually test cases
|
|
- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
|
|
- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
|
|
- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
|
|
- _ => true,
|
|
+ !EXCLUDE.contains(&path)
|
|
+}
|
|
+
|
|
+#[allow(dead_code)]
|
|
+pub fn edition(path: &Path) -> &'static str {
|
|
+ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
|
|
+ "2015"
|
|
+ } else {
|
|
+ "2018"
|
|
}
|
|
}
|
|
|
|
pub fn clone_rust() {
|
|
- let result = Command::new("tests/clone.sh").status().unwrap();
|
|
- assert!(result.success());
|
|
+ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
|
|
+ Err(_) => true,
|
|
+ Ok(contents) => contents.trim() != REVISION,
|
|
+ };
|
|
+ if needs_clone {
|
|
+ download_and_unpack().unwrap();
|
|
+ }
|
|
+ let mut missing = String::new();
|
|
+ let test_src = Path::new("tests/rust/src");
|
|
+ for exclude in EXCLUDE {
|
|
+ if !test_src.join(exclude).exists() {
|
|
+ missing += "\ntests/rust/src/";
|
|
+ missing += exclude;
|
|
+ }
|
|
+ }
|
|
+ if !missing.is_empty() {
|
|
+ panic!("excluded test file does not exist:{}\n", missing);
|
|
+ }
|
|
}
|
|
+
|
|
+fn download_and_unpack() -> Result<()> {
|
|
+ let url = format!(
|
|
+ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
|
|
+ REVISION
|
|
+ );
|
|
+ let response = reqwest::blocking::get(&url)?.error_for_status()?;
|
|
+ let progress = Progress::new(response);
|
|
+ let decoder = GzDecoder::new(progress);
|
|
+ let mut archive = Archive::new(decoder);
|
|
+ let prefix = format!("rust-{}", REVISION);
|
|
+
|
|
+ let tests_rust = Path::new("tests/rust");
|
|
+ if tests_rust.exists() {
|
|
+ fs::remove_dir_all(tests_rust)?;
|
|
+ }
|
|
+
|
|
+ for entry in archive.entries()? {
|
|
+ let mut entry = entry?;
|
|
+ let path = entry.path()?;
|
|
+ if path == Path::new("pax_global_header") {
|
|
+ continue;
|
|
+ }
|
|
+ let relative = path.strip_prefix(&prefix)?;
|
|
+ let out = tests_rust.join(relative);
|
|
+ entry.unpack(&out)?;
|
|
+ }
|
|
+
|
|
+ fs::write("tests/rust/COMMIT", REVISION)?;
|
|
+ Ok(())
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/repo/progress.rs
|
|
@@ -0,0 +1,37 @@
|
|
+use std::io::{Read, Result};
|
|
+use std::time::{Duration, Instant};
|
|
+
|
|
+pub struct Progress<R> {
|
|
+ bytes: usize,
|
|
+ tick: Instant,
|
|
+ stream: R,
|
|
+}
|
|
+
|
|
+impl<R> Progress<R> {
|
|
+ pub fn new(stream: R) -> Self {
|
|
+ Progress {
|
|
+ bytes: 0,
|
|
+ tick: Instant::now() + Duration::from_millis(2000),
|
|
+ stream,
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+impl<R: Read> Read for Progress<R> {
|
|
+ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
|
|
+ let num = self.stream.read(buf)?;
|
|
+ self.bytes += num;
|
|
+ let now = Instant::now();
|
|
+ if now > self.tick {
|
|
+ self.tick = now + Duration::from_millis(500);
|
|
+ errorf!("downloading... {} bytes\n", self.bytes);
|
|
+ }
|
|
+ Ok(num)
|
|
+ }
|
|
+}
|
|
+
|
|
+impl<R> Drop for Progress<R> {
|
|
+ fn drop(&mut self) {
|
|
+ errorf!("done ({} bytes)\n", self.bytes);
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
|
|
--- a/third_party/rust/syn/tests/test_asyncness.rs
|
|
+++ b/third_party/rust/syn/tests/test_asyncness.rs
|
|
@@ -1,41 +1,37 @@
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use syn::{Expr, Item};
|
|
|
|
#[test]
|
|
fn test_async_fn() {
|
|
let input = "async fn process() {}";
|
|
|
|
snapshot!(input as Item, @r###"
|
|
- ⋮Item::Fn {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ sig: Signature {
|
|
- ⋮ asyncness: Some,
|
|
- ⋮ ident: "process",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ output: Default,
|
|
- ⋮ },
|
|
- ⋮ block: Block,
|
|
- ⋮}
|
|
+ Item::Fn {
|
|
+ vis: Inherited,
|
|
+ sig: Signature {
|
|
+ asyncness: Some,
|
|
+ ident: "process",
|
|
+ generics: Generics,
|
|
+ output: Default,
|
|
+ },
|
|
+ block: Block,
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_async_closure() {
|
|
let input = "async || {}";
|
|
|
|
snapshot!(input as Expr, @r###"
|
|
- ⋮Expr::Closure {
|
|
- ⋮ asyncness: Some,
|
|
- ⋮ output: Default,
|
|
- ⋮ body: Expr::Block {
|
|
- ⋮ block: Block,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ Expr::Closure {
|
|
+ asyncness: Some,
|
|
+ output: Default,
|
|
+ body: Expr::Block {
|
|
+ block: Block,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
|
|
--- a/third_party/rust/syn/tests/test_attribute.rs
|
|
+++ b/third_party/rust/syn/tests/test_attribute.rs
|
|
@@ -1,295 +1,333 @@
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use syn::parse::Parser;
|
|
use syn::{Attribute, Meta};
|
|
|
|
#[test]
|
|
fn test_meta_item_word() {
|
|
let meta = test("#[foo]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮})
|
|
+ Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_name_value() {
|
|
let meta = test("#[foo = 5]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_bool_value() {
|
|
let meta = test("#[foo = true]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: Lit::Bool {
|
|
- ⋮ value: true,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: Lit::Bool {
|
|
+ value: true,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
let meta = test("#[foo = false]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: Lit::Bool {
|
|
- ⋮ value: false,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: Lit::Bool {
|
|
+ value: false,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_list_lit() {
|
|
let meta = test("#[foo(5)]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Lit(5),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Lit(5),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_list_word() {
|
|
let meta = test("#[foo(bar)]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "bar",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "bar",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_list_name_value() {
|
|
let meta = test("#[foo(bar = 5)]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "bar",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "bar",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_list_bool_value() {
|
|
let meta = test("#[foo(bar = true)]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "bar",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: Lit::Bool {
|
|
- ⋮ value: true,
|
|
- ⋮ },
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "bar",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: Lit::Bool {
|
|
+ value: true,
|
|
+ },
|
|
+ }),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_meta_item_multiple() {
|
|
let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "word",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮ }),
|
|
- ⋮ Meta(Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "list",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 6,
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "word2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "word",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }),
|
|
+ Meta(Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "list",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 6,
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "word2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_bool_lit() {
|
|
let meta = test("#[foo(true)]");
|
|
|
|
snapshot!(meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Lit(Lit::Bool {
|
|
- ⋮ value: true,
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Lit(Lit::Bool {
|
|
+ value: true,
|
|
+ }),
|
|
+ ],
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_negative_lit() {
|
|
+ let meta = test("#[form(min = -1, max = 200)]");
|
|
+
|
|
+ snapshot!(meta, @r###"
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "form",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "min",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: -1,
|
|
+ }),
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "max",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 200,
|
|
+ }),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
fn test(input: &str) -> Meta {
|
|
let attrs = Attribute::parse_outer.parse_str(input).unwrap();
|
|
|
|
assert_eq!(attrs.len(), 1);
|
|
let attr = attrs.into_iter().next().unwrap();
|
|
diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
|
|
--- a/third_party/rust/syn/tests/test_derive_input.rs
|
|
+++ b/third_party/rust/syn/tests/test_derive_input.rs
|
|
@@ -1,201 +1,196 @@
|
|
-extern crate quote;
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use quote::quote;
|
|
use syn::{Data, DeriveInput};
|
|
|
|
#[test]
|
|
fn test_unit() {
|
|
let input = quote! {
|
|
struct Unit;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "Unit",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "Unit",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_struct() {
|
|
let input = quote! {
|
|
#[derive(Debug, Clone)]
|
|
pub struct Item {
|
|
pub ident: Ident,
|
|
pub attrs: Vec<Attribute>
|
|
}
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ attrs: [
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "derive",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: `( Debug , Clone )`,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ident: "Item",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Fields::Named {
|
|
- ⋮ named: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ident: Some("ident"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Ident",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ident: Some("attrs"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Vec",
|
|
- ⋮ arguments: PathArguments::AngleBracketed {
|
|
- ⋮ args: [
|
|
- ⋮ Type(Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Attribute",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "derive",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(`(Debug , Clone)`),
|
|
+ },
|
|
+ ],
|
|
+ vis: Visibility::Public,
|
|
+ ident: "Item",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Fields::Named {
|
|
+ named: [
|
|
+ Field {
|
|
+ vis: Visibility::Public,
|
|
+ ident: Some("ident"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Ident",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ Field {
|
|
+ vis: Visibility::Public,
|
|
+ ident: Some("attrs"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Vec",
|
|
+ arguments: PathArguments::AngleBracketed {
|
|
+ args: [
|
|
+ Type(Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Attribute",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }),
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "derive",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Debug",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Clone",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "derive",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Debug",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Clone",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_union() {
|
|
let input = quote! {
|
|
union MaybeUninit<T> {
|
|
uninit: (),
|
|
value: T
|
|
}
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "MaybeUninit",
|
|
- ⋮ generics: Generics {
|
|
- ⋮ lt_token: Some,
|
|
- ⋮ params: [
|
|
- ⋮ Type(TypeParam {
|
|
- ⋮ ident: "T",
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ gt_token: Some,
|
|
- ⋮ },
|
|
- ⋮ data: Data::Union {
|
|
- ⋮ fields: FieldsNamed {
|
|
- ⋮ named: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: Some("uninit"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Tuple,
|
|
- ⋮ },
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: Some("value"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "T",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "MaybeUninit",
|
|
+ generics: Generics {
|
|
+ lt_token: Some,
|
|
+ params: [
|
|
+ Type(TypeParam {
|
|
+ ident: "T",
|
|
+ }),
|
|
+ ],
|
|
+ gt_token: Some,
|
|
+ },
|
|
+ data: Data::Union {
|
|
+ fields: FieldsNamed {
|
|
+ named: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ident: Some("uninit"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Tuple,
|
|
+ },
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ident: Some("value"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "T",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
#[cfg(feature = "full")]
|
|
fn test_enum() {
|
|
let input = quote! {
|
|
/// See the std::result module documentation for details.
|
|
@@ -207,472 +202,472 @@ fn test_enum() {
|
|
|
|
// Smuggling data into a proc_macro_derive,
|
|
// in the style of https://github.com/dtolnay/proc-macro-hack
|
|
ProcMacroHack = (0, "data").0
|
|
}
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ attrs: [
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "doc",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: `= r" See the std::result module documentation for details."`,
|
|
- ⋮ },
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "must_use",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: ``,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ident: "Result",
|
|
- ⋮ generics: Generics {
|
|
- ⋮ lt_token: Some,
|
|
- ⋮ params: [
|
|
- ⋮ Type(TypeParam {
|
|
- ⋮ ident: "T",
|
|
- ⋮ }),
|
|
- ⋮ Type(TypeParam {
|
|
- ⋮ ident: "E",
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ gt_token: Some,
|
|
- ⋮ },
|
|
- ⋮ data: Data::Enum {
|
|
- ⋮ variants: [
|
|
- ⋮ Variant {
|
|
- ⋮ ident: "Ok",
|
|
- ⋮ fields: Fields::Unnamed {
|
|
- ⋮ unnamed: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "T",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Variant {
|
|
- ⋮ ident: "Err",
|
|
- ⋮ fields: Fields::Unnamed {
|
|
- ⋮ unnamed: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "E",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Variant {
|
|
- ⋮ ident: "Surprise",
|
|
- ⋮ fields: Unit,
|
|
- ⋮ discriminant: Some(Expr::Lit {
|
|
- ⋮ lit: 0isize,
|
|
- ⋮ }),
|
|
- ⋮ },
|
|
- ⋮ Variant {
|
|
- ⋮ ident: "ProcMacroHack",
|
|
- ⋮ fields: Unit,
|
|
- ⋮ discriminant: Some(Expr::Field {
|
|
- ⋮ base: Expr::Tuple {
|
|
- ⋮ elems: [
|
|
- ⋮ Expr::Lit {
|
|
- ⋮ lit: 0,
|
|
- ⋮ },
|
|
- ⋮ Expr::Lit {
|
|
- ⋮ lit: "data",
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ member: Unnamed(Index {
|
|
- ⋮ index: 0,
|
|
- ⋮ }),
|
|
- ⋮ }),
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "doc",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
|
|
+ },
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "must_use",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ vis: Visibility::Public,
|
|
+ ident: "Result",
|
|
+ generics: Generics {
|
|
+ lt_token: Some,
|
|
+ params: [
|
|
+ Type(TypeParam {
|
|
+ ident: "T",
|
|
+ }),
|
|
+ Type(TypeParam {
|
|
+ ident: "E",
|
|
+ }),
|
|
+ ],
|
|
+ gt_token: Some,
|
|
+ },
|
|
+ data: Data::Enum {
|
|
+ variants: [
|
|
+ Variant {
|
|
+ ident: "Ok",
|
|
+ fields: Fields::Unnamed {
|
|
+ unnamed: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "T",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ Variant {
|
|
+ ident: "Err",
|
|
+ fields: Fields::Unnamed {
|
|
+ unnamed: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "E",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ Variant {
|
|
+ ident: "Surprise",
|
|
+ fields: Unit,
|
|
+ discriminant: Some(Expr::Lit {
|
|
+ lit: 0isize,
|
|
+ }),
|
|
+ },
|
|
+ Variant {
|
|
+ ident: "ProcMacroHack",
|
|
+ fields: Unit,
|
|
+ discriminant: Some(Expr::Field {
|
|
+ base: Expr::Tuple {
|
|
+ elems: [
|
|
+ Expr::Lit {
|
|
+ lit: 0,
|
|
+ },
|
|
+ Expr::Lit {
|
|
+ lit: "data",
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ member: Unnamed(Index {
|
|
+ index: 0,
|
|
+ }),
|
|
+ }),
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
let meta_items: Vec<_> = input
|
|
.attrs
|
|
.into_iter()
|
|
.map(|attr| attr.parse_meta().unwrap())
|
|
.collect();
|
|
|
|
snapshot!(meta_items, @r###"
|
|
- ⋮[
|
|
- ⋮ Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "doc",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: " See the std::result module documentation for details.",
|
|
- ⋮ },
|
|
- ⋮ Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "must_use",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮]
|
|
+ [
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "doc",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: " See the std::result module documentation for details.",
|
|
+ },
|
|
+ Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "must_use",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ }),
|
|
+ ]
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_attr_with_path() {
|
|
let input = quote! {
|
|
#[::attr_args::identity
|
|
fn main() { assert_eq!(foo(), "Hello, world!"); }]
|
|
struct Dummy;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ attrs: [
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ leading_colon: Some,
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "attr_args",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "identity",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "Dummy",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ leading_colon: Some,
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "attr_args",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "identity",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
|
|
+ },
|
|
+ ],
|
|
+ vis: Inherited,
|
|
+ ident: "Dummy",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
assert!(input.attrs[0].parse_meta().is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn test_attr_with_non_mod_style_path() {
|
|
let input = quote! {
|
|
#[inert <T>]
|
|
struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ attrs: [
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "inert",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: `< T >`,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "inert",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(`< T >`),
|
|
+ },
|
|
+ ],
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
assert!(input.attrs[0].parse_meta().is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn test_attr_with_mod_style_path_with_self() {
|
|
let input = quote! {
|
|
#[foo::self]
|
|
struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ attrs: [
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "self",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: ``,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "self",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
|
|
- ⋮Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "self",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮})
|
|
+ Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "self",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_pub_restricted() {
|
|
// Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
|
|
let input = quote! {
|
|
pub(in m) struct Z(pub(in m::n) u8);
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Visibility::Restricted {
|
|
- ⋮ in_token: Some,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "m",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ident: "Z",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Fields::Unnamed {
|
|
- ⋮ unnamed: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Restricted {
|
|
- ⋮ in_token: Some,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "m",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "n",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "u8",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Visibility::Restricted {
|
|
+ in_token: Some,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "m",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ident: "Z",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Fields::Unnamed {
|
|
+ unnamed: [
|
|
+ Field {
|
|
+ vis: Visibility::Restricted {
|
|
+ in_token: Some,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "m",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "n",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "u8",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_vis_crate() {
|
|
let input = quote! {
|
|
crate struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Visibility::Crate,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Visibility::Crate,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_pub_restricted_crate() {
|
|
let input = quote! {
|
|
pub(crate) struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Visibility::Restricted {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "crate",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Visibility::Restricted {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "crate",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_pub_restricted_super() {
|
|
let input = quote! {
|
|
pub(super) struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Visibility::Restricted {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "super",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Visibility::Restricted {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "super",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_pub_restricted_in_super() {
|
|
let input = quote! {
|
|
pub(in super) struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Visibility::Restricted {
|
|
- ⋮ in_token: Some,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "super",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Visibility::Restricted {
|
|
+ in_token: Some,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "super",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_fields_on_unit_struct() {
|
|
let input = quote! {
|
|
struct S;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
let data = match input.data {
|
|
Data::Struct(data) => data,
|
|
_ => panic!("expected a struct"),
|
|
};
|
|
|
|
assert_eq!(0, data.fields.iter().count());
|
|
@@ -683,215 +678,215 @@ fn test_fields_on_named_struct() {
|
|
let input = quote! {
|
|
struct S {
|
|
foo: i32,
|
|
pub bar: String,
|
|
}
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Fields::Named {
|
|
- ⋮ named: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: Some("foo"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "i32",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ident: Some("bar"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "String",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Fields::Named {
|
|
+ named: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ident: Some("foo"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "i32",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ Field {
|
|
+ vis: Visibility::Public,
|
|
+ ident: Some("bar"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "String",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
let data = match input.data {
|
|
Data::Struct(data) => data,
|
|
_ => panic!("expected a struct"),
|
|
};
|
|
|
|
snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
|
|
- ⋮[
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: Some("foo"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "i32",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ident: Some("bar"),
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "String",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮]
|
|
+ [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ident: Some("foo"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "i32",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ Field {
|
|
+ vis: Visibility::Public,
|
|
+ ident: Some("bar"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "String",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ]
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_fields_on_tuple_struct() {
|
|
let input = quote! {
|
|
struct S(i32, pub String);
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Fields::Unnamed {
|
|
- ⋮ unnamed: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "i32",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "String",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Fields::Unnamed {
|
|
+ unnamed: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "i32",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ Field {
|
|
+ vis: Visibility::Public,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "String",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
let data = match input.data {
|
|
Data::Struct(data) => data,
|
|
_ => panic!("expected a struct"),
|
|
};
|
|
|
|
snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
|
|
- ⋮[
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "i32",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ Field {
|
|
- ⋮ vis: Visibility::Public,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "String",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮]
|
|
+ [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "i32",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ Field {
|
|
+ vis: Visibility::Public,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "String",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ]
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_ambiguous_crate() {
|
|
let input = quote! {
|
|
// The field type is `(crate::X)` not `crate (::X)`.
|
|
struct S(crate::X);
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics,
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Fields::Unnamed {
|
|
- ⋮ unnamed: [
|
|
- ⋮ Field {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "crate",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "X",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Fields::Unnamed {
|
|
+ unnamed: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "crate",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "X",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
|
|
--- a/third_party/rust/syn/tests/test_expr.rs
|
|
+++ b/third_party/rust/syn/tests/test_expr.rs
|
|
@@ -1,40 +1,302 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate syn;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
-use std::str::FromStr;
|
|
-
|
|
-use proc_macro2::TokenStream;
|
|
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
|
+use quote::quote;
|
|
+use std::iter::FromIterator;
|
|
use syn::{Expr, ExprRange};
|
|
|
|
#[test]
|
|
fn test_expr_parse() {
|
|
- let code = "..100u32";
|
|
- let tt = TokenStream::from_str(code).unwrap();
|
|
- let expr: Expr = syn::parse2(tt.clone()).unwrap();
|
|
- let expr_range: ExprRange = syn::parse2(tt).unwrap();
|
|
- assert_eq!(expr, Expr::Range(expr_range));
|
|
+ let tokens = quote!(..100u32);
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Range {
|
|
+ limits: HalfOpen,
|
|
+ to: Some(Expr::Lit {
|
|
+ lit: 100u32,
|
|
+ }),
|
|
+ }
|
|
+ "###);
|
|
+
|
|
+ let tokens = quote!(..100u32);
|
|
+ snapshot!(tokens as ExprRange, @r###"
|
|
+ ExprRange {
|
|
+ limits: HalfOpen,
|
|
+ to: Some(Expr::Lit {
|
|
+ lit: 100u32,
|
|
+ }),
|
|
+ }
|
|
+ "###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_await() {
|
|
// Must not parse as Expr::Field.
|
|
- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
|
|
+ let tokens = quote!(fut.await);
|
|
|
|
- snapshot!(expr, @r###"
|
|
- ⋮Expr::Await {
|
|
- ⋮ base: Expr::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "fut",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Await {
|
|
+ base: Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "fut",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
+
|
|
+#[rustfmt::skip]
|
|
+#[test]
|
|
+fn test_tuple_multi_index() {
|
|
+ for &input in &[
|
|
+ "tuple.0.0",
|
|
+ "tuple .0.0",
|
|
+ "tuple. 0.0",
|
|
+ "tuple.0 .0",
|
|
+ "tuple.0. 0",
|
|
+ "tuple . 0 . 0",
|
|
+ ] {
|
|
+ snapshot!(input as Expr, @r###"
|
|
+ Expr::Field {
|
|
+ base: Expr::Field {
|
|
+ base: Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "tuple",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ member: Unnamed(Index {
|
|
+ index: 0,
|
|
+ }),
|
|
+ },
|
|
+ member: Unnamed(Index {
|
|
+ index: 0,
|
|
+ }),
|
|
+ }
|
|
+ "###);
|
|
+ }
|
|
+
|
|
+ for tokens in vec![
|
|
+ quote!(tuple.0.0),
|
|
+ quote!(tuple .0.0),
|
|
+ quote!(tuple. 0.0),
|
|
+ quote!(tuple.0 .0),
|
|
+ quote!(tuple.0. 0),
|
|
+ quote!(tuple . 0 . 0),
|
|
+ ] {
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Field {
|
|
+ base: Expr::Field {
|
|
+ base: Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "tuple",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ member: Unnamed(Index {
|
|
+ index: 0,
|
|
+ }),
|
|
+ },
|
|
+ member: Unnamed(Index {
|
|
+ index: 0,
|
|
+ }),
|
|
+ }
|
|
+ "###);
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_macro_variable_func() {
|
|
+ // mimics the token stream corresponding to `$fn()`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
|
|
+ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Call {
|
|
+ func: Expr::Group {
|
|
+ expr: Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "f",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
|
|
+ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
|
|
+ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Call {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "outside",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ func: Expr::Group {
|
|
+ expr: Expr::Path {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "inside",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "f",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_macro_variable_macro() {
|
|
+ // mimics the token stream corresponding to `$macro!()`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
|
|
+ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
|
|
+ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Macro {
|
|
+ mac: Macro {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "m",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ delimiter: Paren,
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_macro_variable_struct() {
|
|
+ // mimics the token stream corresponding to `$struct {}`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
|
|
+ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Struct {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "S",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_macro_variable_match_arm() {
|
|
+ // mimics the token stream corresponding to `match v { _ => $expr }`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Ident(Ident::new("match", Span::call_site())),
|
|
+ TokenTree::Ident(Ident::new("v", Span::call_site())),
|
|
+ TokenTree::Group(Group::new(
|
|
+ Delimiter::Brace,
|
|
+ TokenStream::from_iter(vec![
|
|
+ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
|
|
+ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
|
|
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
|
|
+ ]),
|
|
+ )),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Expr, @r###"
|
|
+ Expr::Match {
|
|
+ expr: Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "v",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ arms: [
|
|
+ Arm {
|
|
+ pat: Pat::Wild,
|
|
+ body: Expr::Group {
|
|
+ expr: Expr::Tuple {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "a",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
|
|
--- a/third_party/rust/syn/tests/test_generics.rs
|
|
+++ b/third_party/rust/syn/tests/test_generics.rs
|
|
@@ -1,110 +1,105 @@
|
|
-extern crate quote;
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use quote::quote;
|
|
use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
|
|
|
|
#[test]
|
|
fn test_split_for_impl() {
|
|
let input = quote! {
|
|
struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
|
|
};
|
|
|
|
snapshot!(input as DeriveInput, @r###"
|
|
- ⋮DeriveInput {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ ident: "S",
|
|
- ⋮ generics: Generics {
|
|
- ⋮ lt_token: Some,
|
|
- ⋮ params: [
|
|
- ⋮ Lifetime(LifetimeDef {
|
|
- ⋮ lifetime: Lifetime {
|
|
- ⋮ ident: "a",
|
|
- ⋮ },
|
|
- ⋮ }),
|
|
- ⋮ Lifetime(LifetimeDef {
|
|
- ⋮ lifetime: Lifetime {
|
|
- ⋮ ident: "b",
|
|
- ⋮ },
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ bounds: [
|
|
- ⋮ Lifetime {
|
|
- ⋮ ident: "a",
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ Type(TypeParam {
|
|
- ⋮ attrs: [
|
|
- ⋮ Attribute {
|
|
- ⋮ style: Outer,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "may_dangle",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ tokens: ``,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ ident: "T",
|
|
- ⋮ colon_token: Some,
|
|
- ⋮ bounds: [
|
|
- ⋮ Lifetime(Lifetime {
|
|
- ⋮ ident: "a",
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ eq_token: Some,
|
|
- ⋮ default: Some(Type::Tuple),
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ gt_token: Some,
|
|
- ⋮ where_clause: Some(WhereClause {
|
|
- ⋮ predicates: [
|
|
- ⋮ Type(PredicateType {
|
|
- ⋮ bounded_ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "T",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ bounds: [
|
|
- ⋮ Trait(TraitBound {
|
|
- ⋮ modifier: None,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Debug",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ },
|
|
- ⋮ data: Data::Struct {
|
|
- ⋮ fields: Unit,
|
|
- ⋮ semi_token: Some,
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics {
|
|
+ lt_token: Some,
|
|
+ params: [
|
|
+ Lifetime(LifetimeDef {
|
|
+ lifetime: Lifetime {
|
|
+ ident: "a",
|
|
+ },
|
|
+ }),
|
|
+ Lifetime(LifetimeDef {
|
|
+ lifetime: Lifetime {
|
|
+ ident: "b",
|
|
+ },
|
|
+ colon_token: Some,
|
|
+ bounds: [
|
|
+ Lifetime {
|
|
+ ident: "a",
|
|
+ },
|
|
+ ],
|
|
+ }),
|
|
+ Type(TypeParam {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "may_dangle",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ ident: "T",
|
|
+ colon_token: Some,
|
|
+ bounds: [
|
|
+ Lifetime(Lifetime {
|
|
+ ident: "a",
|
|
+ }),
|
|
+ ],
|
|
+ eq_token: Some,
|
|
+ default: Some(Type::Tuple),
|
|
+ }),
|
|
+ ],
|
|
+ gt_token: Some,
|
|
+ where_clause: Some(WhereClause {
|
|
+ predicates: [
|
|
+ Type(PredicateType {
|
|
+ bounded_ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "T",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ bounds: [
|
|
+ Trait(TraitBound {
|
|
+ modifier: None,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Debug",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ },
|
|
+ data: Data::Struct {
|
|
+ fields: Unit,
|
|
+ semi_token: Some,
|
|
+ },
|
|
+ }
|
|
"###);
|
|
|
|
let generics = input.generics;
|
|
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
|
|
|
|
let generated = quote! {
|
|
impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
|
|
};
|
|
@@ -126,156 +121,156 @@ fn test_split_for_impl() {
|
|
};
|
|
assert_eq!(generated.to_string(), expected.to_string());
|
|
}
|
|
|
|
#[test]
|
|
fn test_ty_param_bound() {
|
|
let tokens = quote!('a);
|
|
snapshot!(tokens as TypeParamBound, @r###"
|
|
- ⋮Lifetime(Lifetime {
|
|
- ⋮ ident: "a",
|
|
- ⋮})
|
|
+ Lifetime(Lifetime {
|
|
+ ident: "a",
|
|
+ })
|
|
"###);
|
|
|
|
let tokens = quote!('_);
|
|
snapshot!(tokens as TypeParamBound, @r###"
|
|
- ⋮Lifetime(Lifetime {
|
|
- ⋮ ident: "_",
|
|
- ⋮})
|
|
+ Lifetime(Lifetime {
|
|
+ ident: "_",
|
|
+ })
|
|
"###);
|
|
|
|
let tokens = quote!(Debug);
|
|
snapshot!(tokens as TypeParamBound, @r###"
|
|
- ⋮Trait(TraitBound {
|
|
- ⋮ modifier: None,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Debug",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮})
|
|
+ Trait(TraitBound {
|
|
+ modifier: None,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Debug",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ })
|
|
"###);
|
|
|
|
let tokens = quote!(?Sized);
|
|
snapshot!(tokens as TypeParamBound, @r###"
|
|
- ⋮Trait(TraitBound {
|
|
- ⋮ modifier: Maybe,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Sized",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮})
|
|
+ Trait(TraitBound {
|
|
+ modifier: Maybe,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Sized",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ })
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_fn_precedence_in_where_clause() {
|
|
// This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not
|
|
// `FnOnce() -> (i32 + Send)`.
|
|
let input = quote! {
|
|
fn f<G>()
|
|
where
|
|
G: FnOnce() -> i32 + Send,
|
|
{
|
|
}
|
|
};
|
|
|
|
snapshot!(input as ItemFn, @r###"
|
|
- ⋮ItemFn {
|
|
- ⋮ vis: Inherited,
|
|
- ⋮ sig: Signature {
|
|
- ⋮ ident: "f",
|
|
- ⋮ generics: Generics {
|
|
- ⋮ lt_token: Some,
|
|
- ⋮ params: [
|
|
- ⋮ Type(TypeParam {
|
|
- ⋮ ident: "G",
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ gt_token: Some,
|
|
- ⋮ where_clause: Some(WhereClause {
|
|
- ⋮ predicates: [
|
|
- ⋮ Type(PredicateType {
|
|
- ⋮ bounded_ty: Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "G",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ bounds: [
|
|
- ⋮ Trait(TraitBound {
|
|
- ⋮ modifier: None,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "FnOnce",
|
|
- ⋮ arguments: PathArguments::Parenthesized {
|
|
- ⋮ output: Type(
|
|
- ⋮ Type::Path {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "i32",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ),
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ }),
|
|
- ⋮ Trait(TraitBound {
|
|
- ⋮ modifier: None,
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "Send",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ },
|
|
- ⋮ output: Default,
|
|
- ⋮ },
|
|
- ⋮ block: Block,
|
|
- ⋮}
|
|
+ ItemFn {
|
|
+ vis: Inherited,
|
|
+ sig: Signature {
|
|
+ ident: "f",
|
|
+ generics: Generics {
|
|
+ lt_token: Some,
|
|
+ params: [
|
|
+ Type(TypeParam {
|
|
+ ident: "G",
|
|
+ }),
|
|
+ ],
|
|
+ gt_token: Some,
|
|
+ where_clause: Some(WhereClause {
|
|
+ predicates: [
|
|
+ Type(PredicateType {
|
|
+ bounded_ty: Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "G",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ bounds: [
|
|
+ Trait(TraitBound {
|
|
+ modifier: None,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "FnOnce",
|
|
+ arguments: PathArguments::Parenthesized {
|
|
+ output: Type(
|
|
+ Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "i32",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ),
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }),
|
|
+ Trait(TraitBound {
|
|
+ modifier: None,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "Send",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ },
|
|
+ output: Default,
|
|
+ },
|
|
+ block: Block,
|
|
+ }
|
|
"###);
|
|
|
|
let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
|
|
assert_eq!(where_clause.predicates.len(), 1);
|
|
|
|
let predicate = match &where_clause.predicates[0] {
|
|
WherePredicate::Type(pred) => pred,
|
|
_ => panic!("wrong predicate kind"),
|
|
};
|
|
|
|
assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
|
|
|
|
let first_bound = &predicate.bounds[0];
|
|
- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
|
|
+ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
|
|
|
|
let second_bound = &predicate.bounds[1];
|
|
assert_eq!(quote!(#second_bound).to_string(), "Send");
|
|
}
|
|
|
|
#[test]
|
|
fn test_where_clause_at_end_of_input() {
|
|
let input = quote! {
|
|
diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
|
|
--- a/third_party/rust/syn/tests/test_grouping.rs
|
|
+++ b/third_party/rust/syn/tests/test_grouping.rs
|
|
@@ -1,13 +1,8 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
|
|
use syn::Expr;
|
|
|
|
use std::iter::FromIterator;
|
|
|
|
@@ -23,36 +18,36 @@ fn test_grouping() {
|
|
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
|
TokenTree::Literal(Literal::i32_suffixed(3)),
|
|
]),
|
|
)),
|
|
TokenTree::Punct(Punct::new('*', Spacing::Alone)),
|
|
TokenTree::Literal(Literal::i32_suffixed(4)),
|
|
]);
|
|
|
|
- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
|
|
+ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
|
|
|
|
snapshot!(tokens as Expr, @r###"
|
|
- ⋮Expr::Binary {
|
|
- ⋮ left: Expr::Lit {
|
|
- ⋮ lit: 1i32,
|
|
- ⋮ },
|
|
- ⋮ op: Add,
|
|
- ⋮ right: Expr::Binary {
|
|
- ⋮ left: Expr::Group {
|
|
- ⋮ expr: Expr::Binary {
|
|
- ⋮ left: Expr::Lit {
|
|
- ⋮ lit: 2i32,
|
|
- ⋮ },
|
|
- ⋮ op: Add,
|
|
- ⋮ right: Expr::Lit {
|
|
- ⋮ lit: 3i32,
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮ op: Mul,
|
|
- ⋮ right: Expr::Lit {
|
|
- ⋮ lit: 4i32,
|
|
- ⋮ },
|
|
- ⋮ },
|
|
- ⋮}
|
|
+ Expr::Binary {
|
|
+ left: Expr::Lit {
|
|
+ lit: 1i32,
|
|
+ },
|
|
+ op: Add,
|
|
+ right: Expr::Binary {
|
|
+ left: Expr::Group {
|
|
+ expr: Expr::Binary {
|
|
+ left: Expr::Lit {
|
|
+ lit: 2i32,
|
|
+ },
|
|
+ op: Add,
|
|
+ right: Expr::Lit {
|
|
+ lit: 3i32,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+ op: Mul,
|
|
+ right: Expr::Lit {
|
|
+ lit: 4i32,
|
|
+ },
|
|
+ },
|
|
+ }
|
|
"###);
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
|
|
--- a/third_party/rust/syn/tests/test_ident.rs
|
|
+++ b/third_party/rust/syn/tests/test_ident.rs
|
|
@@ -1,13 +1,8 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
use proc_macro2::{Ident, Span, TokenStream};
|
|
use std::str::FromStr;
|
|
use syn::Result;
|
|
|
|
fn parse(s: &str) -> Result<Ident> {
|
|
syn::parse2(TokenStream::from_str(s).unwrap())
|
|
}
|
|
|
|
diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_item.rs
|
|
@@ -0,0 +1,45 @@
|
|
+#[macro_use]
|
|
+mod macros;
|
|
+
|
|
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
|
+use quote::quote;
|
|
+use std::iter::FromIterator;
|
|
+use syn::Item;
|
|
+
|
|
+#[test]
|
|
+fn test_macro_variable_attr() {
|
|
+ // mimics the token stream corresponding to `$attr fn f() {}`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
|
|
+ TokenTree::Ident(Ident::new("fn", Span::call_site())),
|
|
+ TokenTree::Ident(Ident::new("f", Span::call_site())),
|
|
+ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
|
+ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Item, @r###"
|
|
+ Item::Fn {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Outer,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "test",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(``),
|
|
+ },
|
|
+ ],
|
|
+ vis: Inherited,
|
|
+ sig: Signature {
|
|
+ ident: "f",
|
|
+ generics: Generics,
|
|
+ output: Default,
|
|
+ },
|
|
+ block: Block,
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
|
|
--- a/third_party/rust/syn/tests/test_iterators.rs
|
|
+++ b/third_party/rust/syn/tests/test_iterators.rs
|
|
@@ -1,15 +1,10 @@
|
|
use syn::punctuated::{Pair, Punctuated};
|
|
-
|
|
-extern crate quote;
|
|
-#[macro_use]
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
+use syn::Token;
|
|
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
macro_rules! check_exact_size_iterator {
|
|
($iter:expr) => {{
|
|
let iter = $iter;
|
|
let size_hint = iter.size_hint();
|
|
diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
|
|
--- a/third_party/rust/syn/tests/test_lit.rs
|
|
+++ b/third_party/rust/syn/tests/test_lit.rs
|
|
@@ -1,18 +1,16 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate quote;
|
|
-extern crate syn;
|
|
+#[macro_use]
|
|
+mod macros;
|
|
|
|
-mod features;
|
|
-
|
|
-use proc_macro2::{TokenStream, TokenTree};
|
|
+use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
|
|
use quote::ToTokens;
|
|
+use std::iter::FromIterator;
|
|
use std::str::FromStr;
|
|
-use syn::Lit;
|
|
+use syn::{Lit, LitFloat, LitInt};
|
|
|
|
fn lit(s: &str) -> Lit {
|
|
match TokenStream::from_str(s)
|
|
.unwrap()
|
|
.into_iter()
|
|
.next()
|
|
.unwrap()
|
|
{
|
|
@@ -45,16 +43,19 @@ fn strings() {
|
|
test_string("\"'\"", "'");
|
|
test_string("\"\"", "");
|
|
test_string("\"\\u{1F415}\"", "\u{1F415}");
|
|
test_string(
|
|
"\"contains\nnewlines\\\nescaped newlines\"",
|
|
"contains\nnewlinesescaped newlines",
|
|
);
|
|
test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
|
|
+ test_string("\"...\"q", "...");
|
|
+ test_string("r\"...\"q", "...");
|
|
+ test_string("r##\"...\"##q", "...");
|
|
}
|
|
|
|
#[test]
|
|
fn byte_strings() {
|
|
fn test_byte_string(s: &str, value: &[u8]) {
|
|
match lit(s) {
|
|
Lit::ByteStr(lit) => {
|
|
assert_eq!(lit.value(), value);
|
|
@@ -74,16 +75,19 @@ fn byte_strings() {
|
|
test_byte_string("b\"\\\"\"", b"\"");
|
|
test_byte_string("b\"'\"", b"'");
|
|
test_byte_string("b\"\"", b"");
|
|
test_byte_string(
|
|
"b\"contains\nnewlines\\\nescaped newlines\"",
|
|
b"contains\nnewlinesescaped newlines",
|
|
);
|
|
test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
|
|
+ test_byte_string("b\"...\"q", b"...");
|
|
+ test_byte_string("br\"...\"q", b"...");
|
|
+ test_byte_string("br##\"...\"##q", b"...");
|
|
}
|
|
|
|
#[test]
|
|
fn bytes() {
|
|
fn test_byte(s: &str, value: u8) {
|
|
match lit(s) {
|
|
Lit::Byte(lit) => {
|
|
assert_eq!(lit.value(), value);
|
|
@@ -95,16 +99,17 @@ fn bytes() {
|
|
}
|
|
|
|
test_byte("b'a'", b'a');
|
|
test_byte("b'\\n'", b'\n');
|
|
test_byte("b'\\r'", b'\r');
|
|
test_byte("b'\\t'", b'\t');
|
|
test_byte("b'\\''", b'\'');
|
|
test_byte("b'\"'", b'"');
|
|
+ test_byte("b'a'q", b'a');
|
|
}
|
|
|
|
#[test]
|
|
fn chars() {
|
|
fn test_char(s: &str, value: char) {
|
|
match lit(s) {
|
|
Lit::Char(lit) => {
|
|
assert_eq!(lit.value(), value);
|
|
@@ -120,16 +125,17 @@ fn chars() {
|
|
test_char("'a'", 'a');
|
|
test_char("'\\n'", '\n');
|
|
test_char("'\\r'", '\r');
|
|
test_char("'\\t'", '\t');
|
|
test_char("'🐕'", '🐕'); // NOTE: This is an emoji
|
|
test_char("'\\''", '\'');
|
|
test_char("'\"'", '"');
|
|
test_char("'\\u{1F415}'", '\u{1F415}');
|
|
+ test_char("'a'q", 'a');
|
|
}
|
|
|
|
#[test]
|
|
fn ints() {
|
|
fn test_int(s: &str, value: u64, suffix: &str) {
|
|
match lit(s) {
|
|
Lit::Int(lit) => {
|
|
assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
|
|
@@ -180,9 +186,64 @@ fn floats() {
|
|
}
|
|
}
|
|
|
|
test_float("5.5", 5.5, "");
|
|
test_float("5.5E12", 5.5e12, "");
|
|
test_float("5.5e12", 5.5e12, "");
|
|
test_float("1.0__3e-12", 1.03e-12, "");
|
|
test_float("1.03e+12", 1.03e12, "");
|
|
+ test_float("9e99e99", 9e99, "e99");
|
|
}
|
|
+
|
|
+#[test]
|
|
+fn negative() {
|
|
+ let span = Span::call_site();
|
|
+ assert_eq!("-1", LitInt::new("-1", span).to_string());
|
|
+ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
|
|
+ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
|
|
+ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
|
|
+ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
|
|
+ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
|
|
+ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
|
|
+ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn suffix() {
|
|
+ fn get_suffix(token: &str) -> String {
|
|
+ let lit = syn::parse_str::<Lit>(token).unwrap();
|
|
+ match lit {
|
|
+ Lit::Str(lit) => lit.suffix().to_owned(),
|
|
+ Lit::ByteStr(lit) => lit.suffix().to_owned(),
|
|
+ Lit::Byte(lit) => lit.suffix().to_owned(),
|
|
+ Lit::Char(lit) => lit.suffix().to_owned(),
|
|
+ Lit::Int(lit) => lit.suffix().to_owned(),
|
|
+ Lit::Float(lit) => lit.suffix().to_owned(),
|
|
+ _ => unimplemented!(),
|
|
+ }
|
|
+ }
|
|
+
|
|
+ assert_eq!(get_suffix("\"\"s"), "s");
|
|
+ assert_eq!(get_suffix("r\"\"r"), "r");
|
|
+ assert_eq!(get_suffix("b\"\"b"), "b");
|
|
+ assert_eq!(get_suffix("br\"\"br"), "br");
|
|
+ assert_eq!(get_suffix("r#\"\"#r"), "r");
|
|
+ assert_eq!(get_suffix("'c'c"), "c");
|
|
+ assert_eq!(get_suffix("b'b'b"), "b");
|
|
+ assert_eq!(get_suffix("1i32"), "i32");
|
|
+ assert_eq!(get_suffix("1_i32"), "i32");
|
|
+ assert_eq!(get_suffix("1.0f32"), "f32");
|
|
+ assert_eq!(get_suffix("1.0_f32"), "f32");
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_deep_group_empty() {
|
|
+ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
|
+ Delimiter::None,
|
|
+ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
|
+ Delimiter::None,
|
|
+ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
|
|
+ ))]),
|
|
+ ))]);
|
|
+
|
|
+ snapshot!(tokens as Lit, @r#""hi""# );
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
|
|
--- a/third_party/rust/syn/tests/test_meta.rs
|
|
+++ b/third_party/rust/syn/tests/test_meta.rs
|
|
@@ -1,343 +1,339 @@
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use syn::{Meta, MetaList, MetaNameValue, NestedMeta};
|
|
|
|
#[test]
|
|
fn test_parse_meta_item_word() {
|
|
let input = "hello";
|
|
|
|
snapshot!(input as Meta, @r###"
|
|
- ⋮Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "hello",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮})
|
|
+ Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "hello",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })
|
|
"###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_parse_meta_name_value() {
|
|
let input = "foo = 5";
|
|
let (inner, meta) = (input, input);
|
|
|
|
snapshot!(inner as MetaNameValue, @r###"
|
|
- ⋮MetaNameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ MetaNameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(meta as Meta, @r###"
|
|
- ⋮Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
|
|
assert_eq!(meta, inner.into());
|
|
}
|
|
|
|
#[test]
|
|
fn test_parse_meta_name_value_with_keyword() {
|
|
let input = "static = 5";
|
|
let (inner, meta) = (input, input);
|
|
|
|
snapshot!(inner as MetaNameValue, @r###"
|
|
- ⋮MetaNameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "static",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ MetaNameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "static",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(meta as Meta, @r###"
|
|
- ⋮Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "static",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "static",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
|
|
assert_eq!(meta, inner.into());
|
|
}
|
|
|
|
#[test]
|
|
fn test_parse_meta_name_value_with_bool() {
|
|
let input = "true = 5";
|
|
let (inner, meta) = (input, input);
|
|
|
|
snapshot!(inner as MetaNameValue, @r###"
|
|
- ⋮MetaNameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "true",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ MetaNameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "true",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(meta as Meta, @r###"
|
|
- ⋮Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "true",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮}
|
|
+ Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "true",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }
|
|
"###);
|
|
|
|
assert_eq!(meta, inner.into());
|
|
}
|
|
|
|
#[test]
|
|
fn test_parse_meta_item_list_lit() {
|
|
let input = "foo(5)";
|
|
let (inner, meta) = (input, input);
|
|
|
|
snapshot!(inner as MetaList, @r###"
|
|
- ⋮MetaList {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Lit(5),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ MetaList {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Lit(5),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(meta as Meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Lit(5),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Lit(5),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
|
|
assert_eq!(meta, inner.into());
|
|
}
|
|
|
|
#[test]
|
|
fn test_parse_meta_item_multiple() {
|
|
let input = "foo(word, name = 5, list(name2 = 6), word2)";
|
|
let (inner, meta) = (input, input);
|
|
|
|
snapshot!(inner as MetaList, @r###"
|
|
- ⋮MetaList {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "word",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮ }),
|
|
- ⋮ Meta(Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "list",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 6,
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "word2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ MetaList {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "word",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }),
|
|
+ Meta(Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "list",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 6,
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "word2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
|
|
snapshot!(meta as Meta, @r###"
|
|
- ⋮Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "foo",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "word",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 5,
|
|
- ⋮ }),
|
|
- ⋮ Meta(Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "list",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 6,
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮ }),
|
|
- ⋮ Meta(Path(Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "word2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ })),
|
|
- ⋮ ],
|
|
- ⋮}
|
|
+ Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "foo",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "word",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 5,
|
|
+ }),
|
|
+ Meta(Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "list",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 6,
|
|
+ }),
|
|
+ ],
|
|
+ }),
|
|
+ Meta(Path(Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "word2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ })),
|
|
+ ],
|
|
+ }
|
|
"###);
|
|
|
|
assert_eq!(meta, inner.into());
|
|
}
|
|
|
|
#[test]
|
|
fn test_parse_nested_meta() {
|
|
let input = "5";
|
|
snapshot!(input as NestedMeta, @"Lit(5)");
|
|
|
|
let input = "list(name2 = 6)";
|
|
snapshot!(input as NestedMeta, @r###"
|
|
- ⋮Meta(Meta::List {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "list",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ nested: [
|
|
- ⋮ Meta(Meta::NameValue {
|
|
- ⋮ path: Path {
|
|
- ⋮ segments: [
|
|
- ⋮ PathSegment {
|
|
- ⋮ ident: "name2",
|
|
- ⋮ arguments: None,
|
|
- ⋮ },
|
|
- ⋮ ],
|
|
- ⋮ },
|
|
- ⋮ lit: 6,
|
|
- ⋮ }),
|
|
- ⋮ ],
|
|
- ⋮})
|
|
+ Meta(Meta::List {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "list",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ nested: [
|
|
+ Meta(Meta::NameValue {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "name2",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ lit: 6,
|
|
+ }),
|
|
+ ],
|
|
+ })
|
|
"###);
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
|
|
--- a/third_party/rust/syn/tests/test_parse_buffer.rs
|
|
+++ b/third_party/rust/syn/tests/test_parse_buffer.rs
|
|
@@ -1,12 +1,12 @@
|
|
-#[macro_use]
|
|
-extern crate syn;
|
|
-
|
|
+use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
|
|
+use std::iter::FromIterator;
|
|
use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
|
|
+use syn::{parenthesized, Token};
|
|
|
|
#[test]
|
|
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
|
|
fn smuggled_speculative_cursor_between_sources() {
|
|
struct BreakRules;
|
|
impl Parse for BreakRules {
|
|
fn parse(input1: ParseStream) -> Result<Self> {
|
|
let nested = |input2: ParseStream| {
|
|
@@ -48,8 +48,43 @@ fn smuggled_speculative_cursor_into_brac
|
|
parenthesized!(a in input);
|
|
input.advance_to(&a);
|
|
Ok(Self)
|
|
}
|
|
}
|
|
|
|
syn::parse_str::<BreakRules>("()").unwrap();
|
|
}
|
|
+
|
|
+#[test]
|
|
+fn trailing_empty_none_group() {
|
|
+ fn parse(input: ParseStream) -> Result<()> {
|
|
+ input.parse::<Token![+]>()?;
|
|
+
|
|
+ let content;
|
|
+ parenthesized!(content in input);
|
|
+ content.parse::<Token![+]>()?;
|
|
+
|
|
+ Ok(())
|
|
+ }
|
|
+
|
|
+ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
|
+ TokenTree::Group(Group::new(
|
|
+ Delimiter::Parenthesis,
|
|
+ TokenStream::from_iter(vec![
|
|
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
|
+ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
|
|
+ ]),
|
|
+ )),
|
|
+ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
|
|
+ TokenTree::Group(Group::new(
|
|
+ Delimiter::None,
|
|
+ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
|
+ Delimiter::None,
|
|
+ TokenStream::new(),
|
|
+ ))]),
|
|
+ )),
|
|
+ ]);
|
|
+
|
|
+ parse.parse2(tokens).unwrap();
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_parse_stream.rs
|
|
@@ -0,0 +1,12 @@
|
|
+use syn::ext::IdentExt;
|
|
+use syn::parse::ParseStream;
|
|
+use syn::{Ident, Token};
|
|
+
|
|
+#[test]
|
|
+fn test_peek() {
|
|
+ let _ = |input: ParseStream| {
|
|
+ let _ = input.peek(Ident);
|
|
+ let _ = input.peek(Ident::peek_any);
|
|
+ let _ = input.peek(Token![::]);
|
|
+ };
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
|
|
--- a/third_party/rust/syn/tests/test_pat.rs
|
|
+++ b/third_party/rust/syn/tests/test_pat.rs
|
|
@@ -1,23 +1,38 @@
|
|
-extern crate quote;
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
use quote::quote;
|
|
-use syn::Pat;
|
|
+use syn::{Item, Pat, Stmt};
|
|
|
|
#[test]
|
|
fn test_pat_ident() {
|
|
match syn::parse2(quote!(self)).unwrap() {
|
|
Pat::Ident(_) => (),
|
|
value => panic!("expected PatIdent, got {:?}", value),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_pat_path() {
|
|
match syn::parse2(quote!(self::CONST)).unwrap() {
|
|
Pat::Path(_) => (),
|
|
value => panic!("expected PatPath, got {:?}", value),
|
|
}
|
|
}
|
|
+
|
|
+#[test]
|
|
+fn test_leading_vert() {
|
|
+ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
|
|
+
|
|
+ syn::parse_str::<Item>("fn f() {}").unwrap();
|
|
+ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
|
|
+ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
|
|
+
|
|
+ syn::parse_str::<Stmt>("let | () = ();").unwrap();
|
|
+ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
|
|
+ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_path.rs
|
|
@@ -0,0 +1,52 @@
|
|
+#[macro_use]
|
|
+mod macros;
|
|
+
|
|
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
|
+use quote::quote;
|
|
+use std::iter::FromIterator;
|
|
+use syn::{Expr, Type};
|
|
+
|
|
+#[test]
|
|
+fn parse_interpolated_leading_component() {
|
|
+ // mimics the token stream corresponding to `$mod::rest`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
|
|
+ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
|
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
|
+ TokenTree::Ident(Ident::new("rest", Span::call_site())),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens.clone() as Expr, @r###"
|
|
+ Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "first",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "rest",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+
|
|
+ snapshot!(tokens as Type, @r###"
|
|
+ Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "first",
|
|
+ arguments: None,
|
|
+ },
|
|
+ PathSegment {
|
|
+ ident: "rest",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
|
|
--- a/third_party/rust/syn/tests/test_precedence.rs
|
|
+++ b/third_party/rust/syn/tests/test_precedence.rs
|
|
@@ -1,43 +1,34 @@
|
|
#![cfg(not(syn_disable_nightly_tests))]
|
|
#![recursion_limit = "1024"]
|
|
#![feature(rustc_private)]
|
|
|
|
//! The tests in this module do the following:
|
|
//!
|
|
-//! 1. Parse a given expression in both `syn` and `libsyntax`.
|
|
+//! 1. Parse a given expression in both `syn` and `librustc`.
|
|
//! 2. Fold over the expression adding brackets around each subexpression (with
|
|
-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
|
|
+//! some complications - see the `syn_brackets` and `librustc_brackets`
|
|
//! methods).
|
|
//! 3. Serialize the `syn` expression back into a string, and re-parse it with
|
|
-//! `libsyntax`.
|
|
+//! `librustc`.
|
|
//! 4. Respan all of the expressions, replacing the spans with the default
|
|
//! spans.
|
|
//! 5. Compare the expressions with one another, if they are not equal fail.
|
|
|
|
-extern crate quote;
|
|
-extern crate rayon;
|
|
-extern crate regex;
|
|
+extern crate rustc_ast;
|
|
extern crate rustc_data_structures;
|
|
-extern crate smallvec;
|
|
-extern crate syn;
|
|
-extern crate syntax;
|
|
-extern crate syntax_pos;
|
|
-extern crate walkdir;
|
|
-
|
|
-mod features;
|
|
+extern crate rustc_span;
|
|
|
|
use quote::quote;
|
|
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
|
use regex::Regex;
|
|
-use smallvec::smallvec;
|
|
-use syntax::ast;
|
|
-use syntax::ptr::P;
|
|
-use syntax_pos::edition::Edition;
|
|
+use rustc_ast::ast;
|
|
+use rustc_ast::ptr::P;
|
|
+use rustc_span::edition::Edition;
|
|
use walkdir::{DirEntry, WalkDir};
|
|
|
|
use std::fs::File;
|
|
use std::io::Read;
|
|
use std::process;
|
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
|
|
|
use common::eq::SpanlessEq;
|
|
@@ -68,17 +59,17 @@ fn test_simple_precedence() {
|
|
for input in EXPRS {
|
|
let expr = if let Some(expr) = parse::syn_expr(input) {
|
|
expr
|
|
} else {
|
|
failed += 1;
|
|
continue;
|
|
};
|
|
|
|
- let pf = match test_expressions(vec![expr]) {
|
|
+ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
|
|
(1, 0) => "passed",
|
|
(0, 1) => {
|
|
failed += 1;
|
|
"failed"
|
|
}
|
|
_ => unreachable!(),
|
|
};
|
|
errorf!("=== {}: {}\n", input, pf);
|
|
@@ -86,18 +77,18 @@ fn test_simple_precedence() {
|
|
|
|
if failed > 0 {
|
|
panic!("Failed {} tests", failed);
|
|
}
|
|
}
|
|
|
|
/// Test expressions from rustc, like in `test_round_trip`.
|
|
#[test]
|
|
-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
|
|
fn test_rustc_precedence() {
|
|
+ common::rayon_init();
|
|
repo::clone_rust();
|
|
let abort_after = common::abort_after();
|
|
if abort_after == 0 {
|
|
panic!("Skipping all precedence tests");
|
|
}
|
|
|
|
let passed = AtomicUsize::new(0);
|
|
let failed = AtomicUsize::new(0);
|
|
@@ -113,34 +104,26 @@ fn test_rustc_precedence() {
|
|
.unwrap()
|
|
.into_par_iter()
|
|
.for_each(|entry| {
|
|
let path = entry.path();
|
|
if path.is_dir() {
|
|
return;
|
|
}
|
|
|
|
- // Our version of `libsyntax` can't parse this tests
|
|
- if path
|
|
- .to_str()
|
|
- .unwrap()
|
|
- .ends_with("optional_comma_in_match_arm.rs")
|
|
- {
|
|
- return;
|
|
- }
|
|
-
|
|
let mut file = File::open(path).unwrap();
|
|
let mut content = String::new();
|
|
file.read_to_string(&mut content).unwrap();
|
|
let content = edition_regex.replace_all(&content, "_$0");
|
|
|
|
let (l_passed, l_failed) = match syn::parse_file(&content) {
|
|
Ok(file) => {
|
|
+ let edition = repo::edition(path).parse().unwrap();
|
|
let exprs = collect_exprs(file);
|
|
- test_expressions(exprs)
|
|
+ test_expressions(edition, exprs)
|
|
}
|
|
Err(msg) => {
|
|
errorf!("syn failed to parse\n{:?}\n", msg);
|
|
(0, 1)
|
|
}
|
|
};
|
|
|
|
errorf!(
|
|
@@ -164,147 +147,182 @@ fn test_rustc_precedence() {
|
|
errorf!("\n===== Precedence Test Results =====\n");
|
|
errorf!("{} passed | {} failed\n", passed, failed);
|
|
|
|
if failed > 0 {
|
|
panic!("{} failures", failed);
|
|
}
|
|
}
|
|
|
|
-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
|
|
+fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
|
|
let mut passed = 0;
|
|
let mut failed = 0;
|
|
|
|
- syntax::with_globals(Edition::Edition2018, || {
|
|
+ rustc_span::with_session_globals(edition, || {
|
|
for expr in exprs {
|
|
let raw = quote!(#expr).to_string();
|
|
|
|
- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
|
|
+ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
|
|
e
|
|
} else {
|
|
failed += 1;
|
|
- errorf!("\nFAIL - libsyntax failed to parse raw\n");
|
|
+ errorf!("\nFAIL - librustc failed to parse raw\n");
|
|
continue;
|
|
};
|
|
|
|
let syn_expr = syn_brackets(expr);
|
|
- let syn_ast = if let Some(e) = parse::libsyntax_expr("e!(#syn_expr).to_string()) {
|
|
+ let syn_ast = if let Some(e) = parse::librustc_expr("e!(#syn_expr).to_string()) {
|
|
e
|
|
} else {
|
|
failed += 1;
|
|
- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
|
|
+ errorf!("\nFAIL - librustc failed to parse bracketed\n");
|
|
continue;
|
|
};
|
|
|
|
- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
|
|
+ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
|
|
passed += 1;
|
|
} else {
|
|
failed += 1;
|
|
- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
|
|
+ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
|
|
}
|
|
}
|
|
});
|
|
|
|
(passed, failed)
|
|
}
|
|
|
|
-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
|
|
- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
|
|
+fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
|
|
+ parse::librustc_expr(input).and_then(librustc_brackets)
|
|
}
|
|
|
|
/// Wrap every expression which is not already wrapped in parens with parens, to
|
|
/// reveal the precidence of the parsed expressions, and produce a stringified
|
|
/// form of the resulting expression.
|
|
///
|
|
-/// This method operates on libsyntax objects.
|
|
-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
|
+/// This method operates on librustc objects.
|
|
+fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
|
+ use rustc_ast::ast::{
|
|
+ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
|
|
+ };
|
|
+ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
|
|
+ use rustc_data_structures::map_in_place::MapInPlace;
|
|
use rustc_data_structures::thin_vec::ThinVec;
|
|
- use smallvec::SmallVec;
|
|
+ use rustc_span::DUMMY_SP;
|
|
use std::mem;
|
|
- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
|
|
- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
|
|
- use syntax_pos::DUMMY_SP;
|
|
|
|
struct BracketsVisitor {
|
|
failed: bool,
|
|
};
|
|
|
|
+ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
|
|
+ if f.is_shorthand {
|
|
+ noop_visit_expr(&mut f.expr, vis);
|
|
+ } else {
|
|
+ vis.visit_expr(&mut f.expr);
|
|
+ }
|
|
+ vec![f]
|
|
+ }
|
|
+
|
|
+ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
|
|
+ let kind = match stmt.kind {
|
|
+ // Don't wrap toplevel expressions in statements.
|
|
+ StmtKind::Expr(mut e) => {
|
|
+ noop_visit_expr(&mut e, vis);
|
|
+ StmtKind::Expr(e)
|
|
+ }
|
|
+ StmtKind::Semi(mut e) => {
|
|
+ noop_visit_expr(&mut e, vis);
|
|
+ StmtKind::Semi(e)
|
|
+ }
|
|
+ s => s,
|
|
+ };
|
|
+
|
|
+ vec![Stmt { kind, ..stmt }]
|
|
+ }
|
|
+
|
|
+ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
|
|
+ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
|
|
+ match &mut e.kind {
|
|
+ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
|
|
+ ExprKind::Struct(path, fields, expr) => {
|
|
+ vis.visit_path(path);
|
|
+ fields.flat_map_in_place(|field| flat_map_field(field, vis));
|
|
+ visit_opt(expr, |expr| vis.visit_expr(expr));
|
|
+ vis.visit_id(&mut e.id);
|
|
+ vis.visit_span(&mut e.span);
|
|
+ visit_thin_attrs(&mut e.attrs, vis);
|
|
+ }
|
|
+ _ => noop_visit_expr(e, vis),
|
|
+ }
|
|
+ }
|
|
+
|
|
impl MutVisitor for BracketsVisitor {
|
|
fn visit_expr(&mut self, e: &mut P<Expr>) {
|
|
noop_visit_expr(e, self);
|
|
- match e.node {
|
|
+ match e.kind {
|
|
ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
|
|
_ => {
|
|
let inner = mem::replace(
|
|
e,
|
|
P(Expr {
|
|
id: ast::DUMMY_NODE_ID,
|
|
- node: ExprKind::Err,
|
|
+ kind: ExprKind::Err,
|
|
span: DUMMY_SP,
|
|
attrs: ThinVec::new(),
|
|
+ tokens: None,
|
|
}),
|
|
);
|
|
- e.node = ExprKind::Paren(inner);
|
|
+ e.kind = ExprKind::Paren(inner);
|
|
}
|
|
}
|
|
}
|
|
|
|
- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
|
|
- if f.is_shorthand {
|
|
- noop_visit_expr(&mut f.expr, self);
|
|
- } else {
|
|
- self.visit_expr(&mut f.expr);
|
|
+ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
|
|
+ match arg {
|
|
+ // Don't wrap const generic arg as that's invalid syntax.
|
|
+ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
|
|
+ _ => noop_visit_generic_arg(arg, self),
|
|
}
|
|
- SmallVec::from([f])
|
|
+ }
|
|
+
|
|
+ fn visit_block(&mut self, block: &mut P<Block>) {
|
|
+ self.visit_id(&mut block.id);
|
|
+ block
|
|
+ .stmts
|
|
+ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
|
|
+ self.visit_span(&mut block.span);
|
|
}
|
|
|
|
// We don't want to look at expressions that might appear in patterns or
|
|
// types yet. We'll look into comparing those in the future. For now
|
|
// focus on expressions appearing in other places.
|
|
fn visit_pat(&mut self, pat: &mut P<Pat>) {
|
|
let _ = pat;
|
|
}
|
|
|
|
fn visit_ty(&mut self, ty: &mut P<Ty>) {
|
|
let _ = ty;
|
|
}
|
|
|
|
- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
|
|
- let node = match stmt.node {
|
|
- // Don't wrap toplevel expressions in statements.
|
|
- StmtKind::Expr(mut e) => {
|
|
- noop_visit_expr(&mut e, self);
|
|
- StmtKind::Expr(e)
|
|
- }
|
|
- StmtKind::Semi(mut e) => {
|
|
- noop_visit_expr(&mut e, self);
|
|
- StmtKind::Semi(e)
|
|
- }
|
|
- s => s,
|
|
- };
|
|
-
|
|
- smallvec![Stmt { node, ..stmt }]
|
|
- }
|
|
-
|
|
- fn visit_mac(&mut self, mac: &mut Mac) {
|
|
- // By default when folding over macros, libsyntax panics. This is
|
|
+ fn visit_mac(&mut self, mac: &mut MacCall) {
|
|
+ // By default when folding over macros, librustc panics. This is
|
|
// because it's usually not what you want, you want to run after
|
|
// macro expansion. We do want to do that (syn doesn't do macro
|
|
// expansion), so we implement visit_mac to just return the macro
|
|
// unchanged.
|
|
let _ = mac;
|
|
}
|
|
}
|
|
|
|
let mut folder = BracketsVisitor { failed: false };
|
|
- folder.visit_expr(&mut libsyntax_expr);
|
|
+ folder.visit_expr(&mut librustc_expr);
|
|
if folder.failed {
|
|
None
|
|
} else {
|
|
- Some(libsyntax_expr)
|
|
+ Some(librustc_expr)
|
|
}
|
|
}
|
|
|
|
/// Wrap every expression which is not already wrapped in parens with parens, to
|
|
/// reveal the precedence of the parsed expressions, and produce a stringified
|
|
/// form of the resulting expression.
|
|
fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
|
|
use syn::fold::*;
|
|
@@ -313,24 +331,43 @@ fn syn_brackets(syn_expr: syn::Expr) ->
|
|
struct ParenthesizeEveryExpr;
|
|
impl Fold for ParenthesizeEveryExpr {
|
|
fn fold_expr(&mut self, expr: Expr) -> Expr {
|
|
match expr {
|
|
Expr::Group(_) => unreachable!(),
|
|
Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
|
|
fold_expr(self, expr)
|
|
}
|
|
- node => Expr::Paren(ExprParen {
|
|
+ _ => Expr::Paren(ExprParen {
|
|
attrs: Vec::new(),
|
|
- expr: Box::new(fold_expr(self, node)),
|
|
+ expr: Box::new(fold_expr(self, expr)),
|
|
paren_token: token::Paren::default(),
|
|
}),
|
|
}
|
|
}
|
|
|
|
+ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
|
|
+ match arg {
|
|
+ // Don't wrap const generic arg as that's invalid syntax.
|
|
+ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
|
|
+ _ => fold_generic_argument(self, arg),
|
|
+ }
|
|
+ }
|
|
+
|
|
+ fn fold_generic_method_argument(
|
|
+ &mut self,
|
|
+ arg: GenericMethodArgument,
|
|
+ ) -> GenericMethodArgument {
|
|
+ match arg {
|
|
+ // Don't wrap const generic arg as that's invalid syntax.
|
|
+ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
|
|
+ _ => fold_generic_method_argument(self, arg),
|
|
+ }
|
|
+ }
|
|
+
|
|
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
|
|
match stmt {
|
|
// Don't wrap toplevel expressions in statements.
|
|
Stmt::Expr(e) => Stmt::Expr(fold_expr(self, e)),
|
|
Stmt::Semi(e, semi) => Stmt::Semi(fold_expr(self, e), semi),
|
|
s => s,
|
|
}
|
|
}
|
|
@@ -355,17 +392,20 @@ fn syn_brackets(syn_expr: syn::Expr) ->
|
|
fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
|
|
use syn::fold::*;
|
|
use syn::punctuated::Punctuated;
|
|
use syn::*;
|
|
|
|
struct CollectExprs(Vec<Expr>);
|
|
impl Fold for CollectExprs {
|
|
fn fold_expr(&mut self, expr: Expr) -> Expr {
|
|
- self.0.push(expr);
|
|
+ match expr {
|
|
+ Expr::Verbatim(tokens) if tokens.is_empty() => {}
|
|
+ _ => self.0.push(expr),
|
|
+ }
|
|
|
|
Expr::Tuple(ExprTuple {
|
|
attrs: vec![],
|
|
elems: Punctuated::new(),
|
|
paren_token: token::Paren::default(),
|
|
})
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_receiver.rs
|
|
@@ -0,0 +1,127 @@
|
|
+use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
|
|
+
|
|
+#[test]
|
|
+fn test_by_value() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn by_value(self: Self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Typed(_)) => (),
|
|
+ value => panic!("expected FnArg::Typed, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_by_mut_value() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn by_mut(mut self: Self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Typed(_)) => (),
|
|
+ value => panic!("expected FnArg::Typed, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_by_ref() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn by_ref(self: &Self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Typed(_)) => (),
|
|
+ value => panic!("expected FnArg::Typed, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_by_box() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn by_box(self: Box<Self>);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Typed(_)) => (),
|
|
+ value => panic!("expected FnArg::Typed, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_by_pin() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn by_pin(self: Pin<Self>);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Typed(_)) => (),
|
|
+ value => panic!("expected FnArg::Typed, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_explicit_type() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn explicit_type(self: Pin<MyType>);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Typed(_)) => (),
|
|
+ value => panic!("expected FnArg::Typed, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_value_shorthand() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn value_shorthand(self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Receiver(Receiver {
|
|
+ reference: None,
|
|
+ mutability: None,
|
|
+ ..
|
|
+ })) => (),
|
|
+ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_mut_value_shorthand() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn mut_value_shorthand(mut self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Receiver(Receiver {
|
|
+ reference: None,
|
|
+ mutability: Some(_),
|
|
+ ..
|
|
+ })) => (),
|
|
+ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_ref_shorthand() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn ref_shorthand(&self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Receiver(Receiver {
|
|
+ reference: Some(_),
|
|
+ mutability: None,
|
|
+ ..
|
|
+ })) => (),
|
|
+ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
|
|
+ }
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_ref_mut_shorthand() {
|
|
+ let TraitItemMethod { sig, .. } = parse_quote! {
|
|
+ fn ref_mut_shorthand(&mut self);
|
|
+ };
|
|
+ match sig.receiver() {
|
|
+ Some(FnArg::Receiver(Receiver {
|
|
+ reference: Some(_),
|
|
+ mutability: Some(_),
|
|
+ ..
|
|
+ })) => (),
|
|
+ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
|
|
+ }
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
|
|
--- a/third_party/rust/syn/tests/test_round_trip.rs
|
|
+++ b/third_party/rust/syn/tests/test_round_trip.rs
|
|
@@ -1,28 +1,26 @@
|
|
#![cfg(not(syn_disable_nightly_tests))]
|
|
#![recursion_limit = "1024"]
|
|
#![feature(rustc_private)]
|
|
|
|
-extern crate quote;
|
|
-extern crate rayon;
|
|
-extern crate syn;
|
|
-extern crate syntax;
|
|
-extern crate syntax_pos;
|
|
-extern crate walkdir;
|
|
-
|
|
-mod features;
|
|
+extern crate rustc_ast;
|
|
+extern crate rustc_errors;
|
|
+extern crate rustc_expand;
|
|
+extern crate rustc_parse as parse;
|
|
+extern crate rustc_session;
|
|
+extern crate rustc_span;
|
|
|
|
use quote::quote;
|
|
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
|
-use syntax::ast;
|
|
-use syntax::parse::{self, PResult, ParseSess};
|
|
-use syntax::source_map::FilePathMapping;
|
|
-use syntax_pos::edition::Edition;
|
|
-use syntax_pos::FileName;
|
|
+use rustc_ast::ast;
|
|
+use rustc_errors::PResult;
|
|
+use rustc_session::parse::ParseSess;
|
|
+use rustc_span::source_map::FilePathMapping;
|
|
+use rustc_span::FileName;
|
|
use walkdir::{DirEntry, WalkDir};
|
|
|
|
use std::fs::File;
|
|
use std::io::Read;
|
|
use std::panic;
|
|
use std::process;
|
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
|
use std::time::Instant;
|
|
@@ -33,18 +31,18 @@ mod macros;
|
|
#[allow(dead_code)]
|
|
mod common;
|
|
|
|
mod repo;
|
|
|
|
use common::eq::SpanlessEq;
|
|
|
|
#[test]
|
|
-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
|
|
fn test_round_trip() {
|
|
+ common::rayon_init();
|
|
repo::clone_rust();
|
|
let abort_after = common::abort_after();
|
|
if abort_after == 0 {
|
|
panic!("Skipping all round_trip tests");
|
|
}
|
|
|
|
let failed = AtomicUsize::new(0);
|
|
|
|
@@ -73,43 +71,44 @@ fn test_round_trip() {
|
|
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
|
|
if prev_failed + 1 >= abort_after {
|
|
process::exit(1);
|
|
}
|
|
return;
|
|
}
|
|
};
|
|
let back = quote!(#krate).to_string();
|
|
+ let edition = repo::edition(path).parse().unwrap();
|
|
|
|
let equal = panic::catch_unwind(|| {
|
|
- syntax::with_globals(Edition::Edition2018, || {
|
|
+ rustc_span::with_session_globals(edition, || {
|
|
let sess = ParseSess::new(FilePathMapping::empty());
|
|
- let before = match libsyntax_parse(content, &sess) {
|
|
+ let before = match librustc_parse(content, &sess) {
|
|
Ok(before) => before,
|
|
Err(mut diagnostic) => {
|
|
diagnostic.cancel();
|
|
if diagnostic
|
|
.message()
|
|
.starts_with("file not found for module")
|
|
{
|
|
errorf!("=== {}: ignore\n", path.display());
|
|
} else {
|
|
errorf!(
|
|
- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
|
|
+ "=== {}: ignore - librustc failed to parse original content: {}\n",
|
|
path.display(),
|
|
diagnostic.message()
|
|
);
|
|
}
|
|
return true;
|
|
}
|
|
};
|
|
- let after = match libsyntax_parse(back, &sess) {
|
|
+ let after = match librustc_parse(back, &sess) {
|
|
Ok(after) => after,
|
|
Err(mut diagnostic) => {
|
|
- errorf!("=== {}: libsyntax failed to parse", path.display());
|
|
+ errorf!("=== {}: librustc failed to parse", path.display());
|
|
diagnostic.emit();
|
|
return false;
|
|
}
|
|
};
|
|
|
|
if SpanlessEq::eq(&before, &after) {
|
|
errorf!(
|
|
"=== {}: pass in {}ms\n",
|
|
@@ -125,29 +124,29 @@ fn test_round_trip() {
|
|
before,
|
|
after,
|
|
);
|
|
false
|
|
}
|
|
})
|
|
});
|
|
match equal {
|
|
- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
|
|
+ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
|
|
Ok(true) => {}
|
|
Ok(false) => {
|
|
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
|
|
if prev_failed + 1 >= abort_after {
|
|
process::exit(1);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
let failed = failed.load(Ordering::SeqCst);
|
|
if failed > 0 {
|
|
panic!("{} failures", failed);
|
|
}
|
|
}
|
|
|
|
-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
|
|
+fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
|
|
let name = FileName::Custom("test_round_trip".to_string());
|
|
parse::parse_crate_from_source_str(name, content, sess)
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_shebang.rs
|
|
@@ -0,0 +1,59 @@
|
|
+#[macro_use]
|
|
+mod macros;
|
|
+
|
|
+#[test]
|
|
+fn test_basic() {
|
|
+ let content = "#!/usr/bin/env rustx\nfn main() {}";
|
|
+ let file = syn::parse_file(content).unwrap();
|
|
+ snapshot!(file, @r###"
|
|
+ File {
|
|
+ shebang: Some("#!/usr/bin/env rustx"),
|
|
+ items: [
|
|
+ Item::Fn {
|
|
+ vis: Inherited,
|
|
+ sig: Signature {
|
|
+ ident: "main",
|
|
+ generics: Generics,
|
|
+ output: Default,
|
|
+ },
|
|
+ block: Block,
|
|
+ },
|
|
+ ],
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_comment() {
|
|
+ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
|
|
+ let file = syn::parse_file(content).unwrap();
|
|
+ snapshot!(file, @r###"
|
|
+ File {
|
|
+ attrs: [
|
|
+ Attribute {
|
|
+ style: Inner,
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "allow",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ tokens: TokenStream(`(dead_code)`),
|
|
+ },
|
|
+ ],
|
|
+ items: [
|
|
+ Item::Fn {
|
|
+ vis: Inherited,
|
|
+ sig: Signature {
|
|
+ ident: "main",
|
|
+ generics: Generics,
|
|
+ output: Default,
|
|
+ },
|
|
+ block: Block,
|
|
+ },
|
|
+ ],
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
|
|
--- a/third_party/rust/syn/tests/test_should_parse.rs
|
|
+++ b/third_party/rust/syn/tests/test_should_parse.rs
|
|
@@ -1,12 +1,8 @@
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
macro_rules! should_parse {
|
|
($name:ident, { $($in:tt)* }) => {
|
|
#[test]
|
|
fn $name() {
|
|
// Make sure we can parse the file!
|
|
syn::parse_file(stringify!($($in)*)).unwrap();
|
|
}
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
|
|
--- a/third_party/rust/syn/tests/test_size.rs
|
|
+++ b/third_party/rust/syn/tests/test_size.rs
|
|
@@ -1,12 +1,10 @@
|
|
#![cfg(target_pointer_width = "64")]
|
|
|
|
-mod features;
|
|
-
|
|
use std::mem;
|
|
use syn::*;
|
|
|
|
#[test]
|
|
fn test_expr_size() {
|
|
assert_eq!(mem::size_of::<Expr>(), 280);
|
|
}
|
|
|
|
diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_stmt.rs
|
|
@@ -0,0 +1,44 @@
|
|
+#[macro_use]
|
|
+mod macros;
|
|
+
|
|
+use syn::Stmt;
|
|
+
|
|
+#[test]
|
|
+fn test_raw_operator() {
|
|
+ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
|
|
+
|
|
+ snapshot!(stmt, @r###"
|
|
+ Local(Local {
|
|
+ pat: Pat::Wild,
|
|
+ init: Some(Verbatim(`& raw const x`)),
|
|
+ })
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_raw_variable() {
|
|
+ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
|
|
+
|
|
+ snapshot!(stmt, @r###"
|
|
+ Local(Local {
|
|
+ pat: Pat::Wild,
|
|
+ init: Some(Expr::Reference {
|
|
+ expr: Expr::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "raw",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ }),
|
|
+ })
|
|
+ "###);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_raw_invalid() {
|
|
+ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
|
|
--- a/third_party/rust/syn/tests/test_token_trees.rs
|
|
+++ b/third_party/rust/syn/tests/test_token_trees.rs
|
|
@@ -1,14 +1,8 @@
|
|
-extern crate proc_macro2;
|
|
-extern crate quote;
|
|
-extern crate syn;
|
|
-
|
|
-mod features;
|
|
-
|
|
#[macro_use]
|
|
mod macros;
|
|
|
|
use proc_macro2::TokenStream;
|
|
use quote::quote;
|
|
use syn::Lit;
|
|
|
|
#[test]
|
|
@@ -16,17 +10,21 @@ fn test_struct() {
|
|
let input = "
|
|
#[derive(Debug, Clone)]
|
|
pub struct Item {
|
|
pub ident: Ident,
|
|
pub attrs: Vec<Attribute>,
|
|
}
|
|
";
|
|
|
|
- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
|
|
+ snapshot!(input as TokenStream, @r###"
|
|
+ TokenStream(
|
|
+ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
|
|
+ )
|
|
+ "###);
|
|
}
|
|
|
|
#[test]
|
|
fn test_literal_mangling() {
|
|
let code = "0_4";
|
|
let parsed: Lit = syn::parse_str(code).unwrap();
|
|
assert_eq!(code, quote!(#parsed).to_string());
|
|
}
|
|
diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_ty.rs
|
|
@@ -0,0 +1,53 @@
|
|
+#[macro_use]
|
|
+mod macros;
|
|
+
|
|
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
|
+use quote::quote;
|
|
+use std::iter::FromIterator;
|
|
+use syn::Type;
|
|
+
|
|
+#[test]
|
|
+fn test_mut_self() {
|
|
+ syn::parse_str::<Type>("fn(mut self)").unwrap();
|
|
+ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
|
|
+ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
|
|
+ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
|
|
+ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_macro_variable_type() {
|
|
+ // mimics the token stream corresponding to `$ty<T>`
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
|
|
+ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
|
+ TokenTree::Ident(Ident::new("T", Span::call_site())),
|
|
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as Type, @r###"
|
|
+ Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "ty",
|
|
+ arguments: PathArguments::AngleBracketed {
|
|
+ args: [
|
|
+ Type(Type::Path {
|
|
+ path: Path {
|
|
+ segments: [
|
|
+ PathSegment {
|
|
+ ident: "T",
|
|
+ arguments: None,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }),
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
|
|
new file mode 100644
|
|
--- /dev/null
|
|
+++ b/third_party/rust/syn/tests/test_visibility.rs
|
|
@@ -0,0 +1,145 @@
|
|
+#[macro_use]
|
|
+mod macros;
|
|
+
|
|
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
|
+use std::iter::FromIterator;
|
|
+use syn::parse::{Parse, ParseStream};
|
|
+use syn::{DeriveInput, Result, Visibility};
|
|
+
|
|
+#[derive(Debug)]
|
|
+struct VisRest {
|
|
+ vis: Visibility,
|
|
+ rest: TokenStream,
|
|
+}
|
|
+
|
|
+impl Parse for VisRest {
|
|
+ fn parse(input: ParseStream) -> Result<Self> {
|
|
+ Ok(VisRest {
|
|
+ vis: input.parse()?,
|
|
+ rest: input.parse()?,
|
|
+ })
|
|
+ }
|
|
+}
|
|
+
|
|
+macro_rules! assert_vis_parse {
|
|
+ ($input:expr, Ok($p:pat)) => {
|
|
+ assert_vis_parse!($input, Ok($p) + "");
|
|
+ };
|
|
+
|
|
+ ($input:expr, Ok($p:pat) + $rest:expr) => {
|
|
+ let expected = $rest.parse::<TokenStream>().unwrap();
|
|
+ let parse: VisRest = syn::parse_str($input).unwrap();
|
|
+
|
|
+ match parse.vis {
|
|
+ $p => {}
|
|
+ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
|
|
+ }
|
|
+
|
|
+ // NOTE: Round-trips through `to_string` to avoid potential whitespace
|
|
+ // diffs.
|
|
+ assert_eq!(parse.rest.to_string(), expected.to_string());
|
|
+ };
|
|
+
|
|
+ ($input:expr, Err) => {
|
|
+ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
|
|
+ };
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_pub() {
|
|
+ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_crate() {
|
|
+ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_inherited() {
|
|
+ assert_vis_parse!("", Ok(Visibility::Inherited));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_in() {
|
|
+ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_pub_crate() {
|
|
+ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_pub_self() {
|
|
+ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_pub_super() {
|
|
+ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_missing_in() {
|
|
+ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_missing_in_path() {
|
|
+ assert_vis_parse!("pub(in)", Err);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_crate_path() {
|
|
+ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_junk_after_in() {
|
|
+ assert_vis_parse!("pub(in some::path @@garbage)", Err);
|
|
+}
|
|
+
|
|
+#[test]
|
|
+fn test_empty_group_vis() {
|
|
+ // mimics `struct S { $vis $field: () }` where $vis is empty
|
|
+ let tokens = TokenStream::from_iter(vec![
|
|
+ TokenTree::Ident(Ident::new("struct", Span::call_site())),
|
|
+ TokenTree::Ident(Ident::new("S", Span::call_site())),
|
|
+ TokenTree::Group(Group::new(
|
|
+ Delimiter::Brace,
|
|
+ TokenStream::from_iter(vec![
|
|
+ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
|
|
+ TokenTree::Group(Group::new(
|
|
+ Delimiter::None,
|
|
+ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
|
|
+ "f",
|
|
+ Span::call_site(),
|
|
+ ))]),
|
|
+ )),
|
|
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
|
+ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
|
+ ]),
|
|
+ )),
|
|
+ ]);
|
|
+
|
|
+ snapshot!(tokens as DeriveInput, @r###"
|
|
+ DeriveInput {
|
|
+ vis: Inherited,
|
|
+ ident: "S",
|
|
+ generics: Generics,
|
|
+ data: Data::Struct {
|
|
+ fields: Fields::Named {
|
|
+ named: [
|
|
+ Field {
|
|
+ vis: Inherited,
|
|
+ ident: Some("f"),
|
|
+ colon_token: Some,
|
|
+ ty: Type::Tuple,
|
|
+ },
|
|
+ ],
|
|
+ },
|
|
+ },
|
|
+ }
|
|
+ "###);
|
|
+}
|
|
diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
|
|
--- a/third_party/rust/syn/tests/zzz_stable.rs
|
|
+++ b/third_party/rust/syn/tests/zzz_stable.rs
|
|
@@ -1,21 +1,19 @@
|
|
#![cfg(syn_disable_nightly_tests)]
|
|
|
|
-extern crate termcolor;
|
|
-
|
|
use std::io::{self, Write};
|
|
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
|
|
|
const MSG: &str = "\
|
|
‖
|
|
‖ WARNING:
|
|
‖ This is not a nightly compiler so not all tests were able to
|
|
‖ run. Syn includes tests that compare Syn's parser against the
|
|
-‖ compiler's parser, which requires access to unstable libsyntax
|
|
+‖ compiler's parser, which requires access to unstable librustc
|
|
‖ data structures and a nightly compiler.
|
|
‖
|
|
";
|
|
|
|
#[test]
|
|
fn notice() -> io::Result<()> {
|
|
let header = "WARNING";
|
|
let index_of_header = MSG.find(header).unwrap();
|
|
|