diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index de44a2031ab82..6b9c01fce90f9 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -905,26 +905,6 @@ pub struct Stmt { } impl Stmt { - pub fn tokens(&self) -> Option<&LazyTokenStream> { - match self.kind { - StmtKind::Local(ref local) => local.tokens.as_ref(), - StmtKind::Item(ref item) => item.tokens.as_ref(), - StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(), - StmtKind::Empty => None, - StmtKind::MacCall(ref mac) => mac.tokens.as_ref(), - } - } - - pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> { - match self.kind { - StmtKind::Local(ref mut local) => local.tokens.as_mut(), - StmtKind::Item(ref mut item) => item.tokens.as_mut(), - StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(), - StmtKind::Empty => None, - StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(), - } - } - pub fn has_trailing_semicolon(&self) -> bool { match &self.kind { StmtKind::Semi(_) => true, @@ -979,6 +959,38 @@ pub enum StmtKind { MacCall(P), } +impl StmtKind { + pub fn tokens(&self) -> Option<&LazyTokenStream> { + match self { + StmtKind::Local(ref local) => local.tokens.as_ref(), + StmtKind::Item(ref item) => item.tokens.as_ref(), + StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(), + StmtKind::Empty => None, + StmtKind::MacCall(ref mac) => mac.tokens.as_ref(), + } + } + + pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> { + match self { + StmtKind::Local(ref mut local) => local.tokens.as_mut(), + StmtKind::Item(ref mut item) => item.tokens.as_mut(), + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(), + StmtKind::Empty => None, + StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(), + } + } + + pub fn set_tokens(&mut self, tokens: Option) { + match self { + StmtKind::Local(ref mut local) => local.tokens = tokens, + StmtKind::Item(ref mut item) => item.tokens = tokens, + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens, + StmtKind::Empty => {} + StmtKind::MacCall(ref mut mac) => mac.tokens = tokens, + } + } +} + #[derive(Clone, Encodable, Decodable, Debug)] pub struct MacCallStmt { pub mac: MacCall, diff --git a/compiler/rustc_ast/src/ast_like.rs b/compiler/rustc_ast/src/ast_like.rs index 6649cda69a0b0..3671a013de53d 100644 --- a/compiler/rustc_ast/src/ast_like.rs +++ b/compiler/rustc_ast/src/ast_like.rs @@ -1,35 +1,42 @@ use super::ptr::P; -use super::tokenstream::LazyTokenStream; +use super::tokenstream::{AttributesData, LazyTokenStream}; use super::{Arm, Field, FieldPat, GenericParam, Param, StructField, Variant}; -use super::{AssocItem, Expr, ForeignItem, Item, Local}; +use super::{AssocItem, Expr, ForeignItem, Item, Local, MacCallStmt}; use super::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility}; use super::{AttrVec, Attribute, Stmt, StmtKind}; +use rustc_span::sym; /// An `AstLike` represents an AST node (or some wrapper around /// and AST node) which stores some combination of attributes /// and tokens. pub trait AstLike: Sized { + const SUPPORTS_INNER_ATTRS: bool; fn attrs(&self) -> &[Attribute]; fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec)); /// Called by `Parser::collect_tokens` to store the collected /// tokens inside an AST node - fn finalize_tokens(&mut self, _tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, _tokens: LazyTokenStream) -> Option { // This default impl makes this trait easier to implement // in tools like `rust-analyzer` panic!("`finalize_tokens` is not supported!") } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)); } impl AstLike for P { + const SUPPORTS_INNER_ATTRS: bool = T::SUPPORTS_INNER_ATTRS; fn attrs(&self) -> &[Attribute] { (**self).attrs() } fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec)) { (**self).visit_attrs(f); } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { (**self).finalize_tokens(tokens) } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + (**self).visit_tokens(f); + } } fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec)) { @@ -41,6 +48,10 @@ fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec)) { } impl AstLike for StmtKind { + // This might be an `StmtKind::Item`, which contains + // an item that supports inner attrs + const SUPPORTS_INNER_ATTRS: bool = true; + fn attrs(&self) -> &[Attribute] { match *self { StmtKind::Local(ref local) => local.attrs(), @@ -60,21 +71,34 @@ impl AstLike for StmtKind { StmtKind::MacCall(mac) => visit_attrvec(&mut mac.attrs, f), } } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { - let stmt_tokens = match self { - StmtKind::Local(ref mut local) => &mut local.tokens, - StmtKind::Item(ref mut item) => &mut item.tokens, - StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => &mut expr.tokens, - StmtKind::Empty => return, - StmtKind::MacCall(ref mut mac) => &mut mac.tokens, + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { + match self { + StmtKind::Local(ref mut local) => local.finalize_tokens(tokens), + StmtKind::MacCall(ref mut mac) => mac.finalize_tokens(tokens), + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => { + expr.finalize_tokens(tokens) + } + StmtKind::Item(ref mut item) => item.finalize_tokens(tokens), + StmtKind::Empty => None, + } + } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + let tokens = match self { + StmtKind::Local(ref mut local) => Some(&mut local.tokens), + StmtKind::Item(ref mut item) => Some(&mut item.tokens), + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => Some(&mut expr.tokens), + StmtKind::Empty => None, + StmtKind::MacCall(ref mut mac) => Some(&mut mac.tokens), }; - if stmt_tokens.is_none() { - *stmt_tokens = Some(tokens); + if let Some(tokens) = tokens { + f(tokens); } } } impl AstLike for Stmt { + const SUPPORTS_INNER_ATTRS: bool = StmtKind::SUPPORTS_INNER_ATTRS; + fn attrs(&self) -> &[Attribute] { self.kind.attrs() } @@ -82,31 +106,49 @@ impl AstLike for Stmt { fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec)) { self.kind.visit_attrs(f); } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { self.kind.finalize_tokens(tokens) } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + self.kind.visit_tokens(f) + } } impl AstLike for Attribute { + const SUPPORTS_INNER_ATTRS: bool = false; + fn attrs(&self) -> &[Attribute] { &[] } fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec)) {} - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { match &mut self.kind { AttrKind::Normal(_, attr_tokens) => { if attr_tokens.is_none() { *attr_tokens = Some(tokens); } + None } AttrKind::DocComment(..) => { panic!("Called finalize_tokens on doc comment attr {:?}", self) } } } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + match &mut self.kind { + AttrKind::Normal(_, attr_tokens) => { + f(attr_tokens); + } + AttrKind::DocComment(..) => { + panic!("Called visit_tokens on doc comment attr {:?}", self) + } + } + } } impl AstLike for Option { + const SUPPORTS_INNER_ATTRS: bool = T::SUPPORTS_INNER_ATTRS; + fn attrs(&self) -> &[Attribute] { self.as_ref().map(|inner| inner.attrs()).unwrap_or(&[]) } @@ -115,13 +157,25 @@ impl AstLike for Option { inner.visit_attrs(f); } } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { + self.as_mut().and_then(|inner| inner.finalize_tokens(tokens)) + } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { if let Some(inner) = self { - inner.finalize_tokens(tokens); + inner.visit_tokens(f); } } } +// NOTE: Builtin attributes like `cfg` and `cfg_attr` cannot be renamed via imports. +// Therefore, the absence of a literal `cfg` or `cfg_attr` guarantees that +// we don't need to do any eager expansion. +pub fn has_cfg_or_cfg_any(attrs: &[Attribute]) -> bool { + attrs.iter().any(|attr| { + attr.ident().map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr) + }) +} + /// Helper trait for the macros below. Abstracts over /// the two types of attribute fields that AST nodes /// may have (`Vec` or `AttrVec`) @@ -142,8 +196,13 @@ impl VecOrAttrVec for AttrVec { } macro_rules! derive_has_tokens_and_attrs { - ($($ty:path),*) => { $( + ( + const SUPPORTS_INNER_ATTRS: bool = $inner_attrs:literal; + $($ty:path),* + ) => { $( impl AstLike for $ty { + const SUPPORTS_INNER_ATTRS: bool = $inner_attrs; + fn attrs(&self) -> &[Attribute] { &self.attrs } @@ -152,12 +211,22 @@ macro_rules! derive_has_tokens_and_attrs { VecOrAttrVec::visit(&mut self.attrs, f) } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { if self.tokens.is_none() { self.tokens = Some(tokens); } + if has_cfg_or_cfg_any(&self.attrs) { + Some(AttributesData { attrs: self.attrs.clone().into(), tokens: self.tokens.clone().unwrap() }) + } else { + None + } } + + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + f(&mut self.tokens) + } + } )* } } @@ -165,6 +234,8 @@ macro_rules! derive_has_tokens_and_attrs { macro_rules! derive_has_attrs_no_tokens { ($($ty:path),*) => { $( impl AstLike for $ty { + const SUPPORTS_INNER_ATTRS: bool = false; + fn attrs(&self) -> &[Attribute] { &self.attrs } @@ -173,7 +244,15 @@ macro_rules! derive_has_attrs_no_tokens { VecOrAttrVec::visit(&mut self.attrs, f) } - fn finalize_tokens(&mut self, _tokens: LazyTokenStream) {} + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { + if has_cfg_or_cfg_any(&self.attrs) { + Some(AttributesData { attrs: self.attrs.clone().into(), tokens }) + } else { + None + } + } + fn visit_tokens(&mut self, _f: impl FnOnce(&mut Option)) {} + } )* } } @@ -181,6 +260,8 @@ macro_rules! derive_has_attrs_no_tokens { macro_rules! derive_has_tokens_no_attrs { ($($ty:path),*) => { $( impl AstLike for $ty { + const SUPPORTS_INNER_ATTRS: bool = false; + fn attrs(&self) -> &[Attribute] { &[] } @@ -188,20 +269,31 @@ macro_rules! derive_has_tokens_no_attrs { fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec)) { } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { if self.tokens.is_none() { self.tokens = Some(tokens); } - + None + } + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + f(&mut self.tokens) } } )* } } -// These AST nodes support both inert and active -// attributes, so they also have tokens. +// These ast nodes support both active and inert attributes, +// so they have tokens collected to pass to proc macros +derive_has_tokens_and_attrs! { + // Both `Item` and `AssocItem` can have bodies, which + // can contain inner attributes + const SUPPORTS_INNER_ATTRS: bool = true; + Item, AssocItem +} + derive_has_tokens_and_attrs! { - Item, Expr, Local, AssocItem, ForeignItem + const SUPPORTS_INNER_ATTRS: bool = false; + ForeignItem, Expr, Local, MacCallStmt } // These ast nodes only support inert attributes, so they don't diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 52ac7540f6943..ee5643cb2d0cf 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -6,7 +6,10 @@ use crate::ast::{Lit, LitKind}; use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem}; use crate::ast::{Path, PathSegment}; use crate::token::{self, CommentKind, Token}; -use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree, TreeAndSpacing}; +use crate::tokenstream::{ + DelimSpan, LazyTokenStream, PreexpTokenStream, PreexpTokenTree, Spacing, TokenStream, + TokenTree, TreeAndSpacing, +}; use rustc_index::bit_set::GrowableBitSet; use rustc_span::source_map::BytePos; @@ -309,14 +312,18 @@ impl Attribute { } } - pub fn tokens(&self) -> TokenStream { + pub fn tokens(&self) -> PreexpTokenStream { match self.kind { AttrKind::Normal(_, ref tokens) => tokens .as_ref() .unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self)) .create_token_stream(), - AttrKind::DocComment(comment_kind, data) => TokenStream::from(TokenTree::Token( - Token::new(token::DocComment(comment_kind, self.style, data), self.span), + AttrKind::DocComment(comment_kind, data) => PreexpTokenStream::from(( + PreexpTokenTree::Token(Token::new( + token::DocComment(comment_kind, self.style, data), + self.span, + )), + Spacing::Alone, )), } } diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index c286738811ca1..f9ac9437d0673 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -630,6 +630,33 @@ pub fn noop_flat_map_param(mut param: Param, vis: &mut T) -> Smal smallvec![param] } +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_preexp_tt(tt: &mut PreexpTokenTree, vis: &mut T) { + match tt { + PreexpTokenTree::Token(token) => { + visit_token(token, vis); + } + PreexpTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => { + vis.visit_span(open); + vis.visit_span(close); + visit_preexp_tts(tts, vis); + } + PreexpTokenTree::Attributes(data) => { + for attr in &mut *data.attrs { + match &mut attr.kind { + AttrKind::Normal(_, attr_tokens) => { + visit_lazy_tts(attr_tokens, vis); + } + AttrKind::DocComment(..) => { + // FIXME: Do we really need to visit doc comments? + } + } + } + visit_lazy_tts_opt_mut(Some(&mut data.tokens), vis); + } + } +} + // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. pub fn visit_tt(tt: &mut TokenTree, vis: &mut T) { match tt { @@ -652,16 +679,30 @@ pub fn visit_tts(TokenStream(tts): &mut TokenStream, vis: &mut T) } } -pub fn visit_lazy_tts(lazy_tts: &mut Option, vis: &mut T) { +pub fn visit_preexp_tts( + PreexpTokenStream(tts): &mut PreexpTokenStream, + vis: &mut T, +) { + if vis.token_visiting_enabled() && !tts.is_empty() { + let tts = Lrc::make_mut(tts); + visit_vec(tts, |(tree, _is_joint)| visit_preexp_tt(tree, vis)); + } +} + +pub fn visit_lazy_tts_opt_mut(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) { if vis.token_visiting_enabled() { - visit_opt(lazy_tts, |lazy_tts| { + if let Some(lazy_tts) = lazy_tts { let mut tts = lazy_tts.create_token_stream(); - visit_tts(&mut tts, vis); + visit_preexp_tts(&mut tts, vis); *lazy_tts = LazyTokenStream::new(tts); - }) + } } } +pub fn visit_lazy_tts(lazy_tts: &mut Option, vis: &mut T) { + visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis); +} + // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. // Applies ident visitor if it's an ident; applies other visits to interpolated nodes. // In practice the ident part is not actually used by specific visitors right now, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 9ac05f316f034..8787695bac427 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -22,6 +22,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; +use crate::AttrVec; use std::{fmt, iter, mem}; /// When the main Rust parser encounters a syntax-extension invocation, it @@ -127,11 +128,11 @@ where } pub trait CreateTokenStream: sync::Send + sync::Sync { - fn create_token_stream(&self) -> TokenStream; + fn create_token_stream(&self) -> PreexpTokenStream; } -impl CreateTokenStream for TokenStream { - fn create_token_stream(&self) -> TokenStream { +impl CreateTokenStream for PreexpTokenStream { + fn create_token_stream(&self) -> PreexpTokenStream { self.clone() } } @@ -147,14 +148,14 @@ impl LazyTokenStream { LazyTokenStream(Lrc::new(Box::new(inner))) } - pub fn create_token_stream(&self) -> TokenStream { + pub fn create_token_stream(&self) -> PreexpTokenStream { self.0.create_token_stream() } } impl fmt::Debug for LazyTokenStream { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt("LazyTokenStream", f) + write!(f, "LazyTokenStream({:?})", self.create_token_stream()) } } @@ -177,6 +178,135 @@ impl HashStable for LazyTokenStream { } } +#[derive(Clone, Debug, Default, Encodable, Decodable)] +pub struct PreexpTokenStream(pub Lrc>); + +#[derive(Clone, Debug, Encodable, Decodable)] +pub enum PreexpTokenTree { + Token(Token), + Delimited(DelimSpan, DelimToken, PreexpTokenStream), + Attributes(AttributesData), +} + +impl PreexpTokenStream { + pub fn new(tokens: Vec<(PreexpTokenTree, Spacing)>) -> PreexpTokenStream { + PreexpTokenStream(Lrc::new(tokens)) + } + + pub fn to_tokenstream(&self) -> TokenStream { + let trees: Vec<_> = self + .0 + .iter() + .flat_map(|tree| match &tree.0 { + PreexpTokenTree::Token(inner) => { + smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter() + } + PreexpTokenTree::Delimited(span, delim, stream) => smallvec![( + TokenTree::Delimited( + *span, + *delim, + stream.create_token_stream().to_tokenstream() + ), + tree.1, + )] + .into_iter(), + PreexpTokenTree::Attributes(data) => { + let mut outer_attrs = Vec::new(); + let mut inner_attrs = Vec::new(); + let attrs: Vec<_> = data.attrs.clone().into(); + for attr in attrs { + match attr.style { + crate::AttrStyle::Outer => { + assert!( + inner_attrs.len() == 0, + "Found outer attribute {:?} after inner attrs {:?}", + attr, + inner_attrs + ); + outer_attrs.push(attr); + } + crate::AttrStyle::Inner => { + inner_attrs.push(attr); + } + } + } + + let mut target_tokens: Vec<_> = data + .tokens + .create_token_stream() + .to_tokenstream() + .0 + .iter() + .cloned() + .collect(); + if !inner_attrs.is_empty() { + // Inner attributes are only supported on extern blocks, functions, impls, + // and modules. All of these have their inner attributes placed at + // the beginning of the rightmost outermost braced group: + // e.g. fn foo() { #![my_attr} } + // + // Therefore, we can insert them back into the right location + // without needing to do any extra position tracking. + // + // Note: Outline modules are an exception - they can + // have attributes like `#![my_attr]` at the start of a file. + // Support for custom attributes in this position is not + // properly implemented - we always synthesize fake tokens, + // so we never reach this code. + let mut last_braced = target_tokens + .iter() + .rev() + .position(|(t, _)| { + matches!(t, TokenTree::Delimited(_, DelimToken::Brace, _)) + }) + .expect("Missing final brace-delimited group"); + last_braced = target_tokens.len() - 1 - last_braced; + + let mut builder = TokenStreamBuilder::new(); + for inner_attr in inner_attrs { + builder.push(inner_attr.tokens().to_tokenstream()); + } + match &target_tokens[last_braced] { + ( + TokenTree::Delimited(span, DelimToken::Brace, delim_tokens), + spacing, + ) => { + builder.push(delim_tokens.clone()); + target_tokens[last_braced] = ( + TokenTree::Delimited(*span, DelimToken::Brace, builder.build()), + *spacing, + ); + } + _ => unreachable!(), + } + } + + let flat: SmallVec<[_; 1]> = data + .attrs + .iter() + .filter(|attr| attr.style == crate::AttrStyle::Outer) + .flat_map(|attr| { + // FIXME: Make this more efficient + let tokens: Vec<_> = + attr.tokens().to_tokenstream().0.clone().iter().cloned().collect(); + tokens.into_iter() + }) + .chain(target_tokens.into_iter()) + .collect(); + flat.into_iter() + } + }) + .collect(); + TokenStream::new(trees) + } +} + +#[derive(Clone, Debug, Encodable, Decodable)] +pub struct AttributesData { + pub attrs: AttrVec, + pub tokens: LazyTokenStream, +} + /// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s. /// /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s @@ -239,6 +369,12 @@ impl TokenStream { } } +impl From<(PreexpTokenTree, Spacing)> for PreexpTokenStream { + fn from((tree, spacing): (PreexpTokenTree, Spacing)) -> PreexpTokenStream { + PreexpTokenStream::new(vec![(tree, spacing)]) + } +} + impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { TokenStream::new(vec![(tree, Spacing::Alone)]) diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs index fad64858ce3ff..d608d5d817095 100644 --- a/compiler/rustc_builtin_macros/src/derive.rs +++ b/compiler/rustc_builtin_macros/src/derive.rs @@ -52,23 +52,10 @@ impl MultiItemModifier for Expander { // FIXME: Try to cache intermediate results to avoid collecting same paths multiple times. match ecx.resolver.resolve_derives(ecx.current_expansion.id, derives, ecx.force_mode) { Ok(()) => { + // Cfg-strip the tokens, since we will be invoking a proc-macro with them let mut visitor = - StripUnconfigured { sess, features: ecx.ecfg.features, modified: false }; - let mut item = visitor.fully_configure(item); - if visitor.modified { - // Erase the tokens if cfg-stripping modified the item - // This will cause us to synthesize fake tokens - // when `nt_to_tokenstream` is called on this item. - match &mut item { - Annotatable::Item(item) => item, - Annotatable::Stmt(stmt) => match &mut stmt.kind { - StmtKind::Item(item) => item, - _ => unreachable!(), - }, - _ => unreachable!(), - } - .tokens = None; - } + StripUnconfigured { sess, features: ecx.ecfg.features, config_tokens: true }; + let item = visitor.fully_configure(item); ExpandResult::Ready(vec![item]) } Err(Indeterminate) => ExpandResult::Retry(item), diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index ce8103c0f850d..b26d55679782a 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -3,7 +3,9 @@ use crate::module::DirectoryOwnership; use rustc_ast::ptr::P; use rustc_ast::token::{self, Nonterminal}; -use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, LazyTokenStream, TokenStream}; +use rustc_ast::tokenstream::{ + AttributesData, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream, +}; use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::{self as ast, AstLike, Attribute, NodeId, PatKind}; use rustc_attr::{self as attr, Deprecation, Stability}; @@ -45,6 +47,7 @@ pub enum Annotatable { } impl AstLike for Annotatable { + const SUPPORTS_INNER_ATTRS: bool = true; fn attrs(&self) -> &[Attribute] { match *self { Annotatable::Item(ref item) => &item.attrs, @@ -81,9 +84,27 @@ impl AstLike for Annotatable { } } - fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) -> Option { panic!("Called finalize_tokens on an Annotatable: {:?}", tokens); } + + fn visit_tokens(&mut self, f: impl FnOnce(&mut Option)) { + match self { + Annotatable::Item(item) => item.visit_tokens(f), + Annotatable::TraitItem(trait_item) => trait_item.visit_tokens(f), + Annotatable::ImplItem(impl_item) => impl_item.visit_tokens(f), + Annotatable::ForeignItem(foreign_item) => foreign_item.visit_tokens(f), + Annotatable::Stmt(stmt) => stmt.visit_tokens(f), + Annotatable::Expr(expr) => expr.visit_tokens(f), + Annotatable::Arm(arm) => arm.visit_tokens(f), + Annotatable::Field(field) => field.visit_tokens(f), + Annotatable::FieldPat(fp) => fp.visit_tokens(f), + Annotatable::GenericParam(gp) => gp.visit_tokens(f), + Annotatable::Param(p) => p.visit_tokens(f), + Annotatable::StructField(sf) => sf.visit_tokens(f), + Annotatable::Variant(v) => v.visit_tokens(f), + } + } } impl Annotatable { @@ -145,10 +166,7 @@ impl Annotatable { } crate fn into_tokens(self, sess: &ParseSess) -> TokenStream { - // Tokens of an attribute target may be invalidated by some outer `#[derive]` performing - // "full configuration" (attributes following derives on the same item should be the most - // common case), that's why synthesizing tokens is allowed. - nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::Yes) + nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No) } pub fn expect_item(self) -> P { diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 7d0becf1f5d85..a7cca49d5843b 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -5,8 +5,10 @@ use crate::base::Annotatable; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; use rustc_ast::token::{DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree}; -use rustc_ast::{self as ast, AstLike, AttrItem, Attribute, MetaItem}; +use rustc_ast::tokenstream::{ + DelimSpan, LazyTokenStream, PreexpTokenStream, PreexpTokenTree, Spacing, TokenTree, +}; +use rustc_ast::{self as ast, AstLike, AttrItem, AttrStyle, Attribute, MetaItem}; use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::map_in_place::MapInPlace; @@ -28,7 +30,10 @@ use smallvec::SmallVec; pub struct StripUnconfigured<'a> { pub sess: &'a Session, pub features: Option<&'a Features>, - pub modified: bool, + /// If `true`, perform cfg-stripping on attached tokens. + /// This is only used for the input to derive macros, + /// which needs eager expansion of `cfg` and `cfg_attr` + pub config_tokens: bool, } fn get_features( @@ -199,7 +204,7 @@ fn get_features( // `cfg_attr`-process the crate's attributes and compute the crate's features. pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) { - let mut strip_unconfigured = StripUnconfigured { sess, features: None, modified: false }; + let mut strip_unconfigured = StripUnconfigured { sess, features: None, config_tokens: false }; let unconfigured_attrs = krate.attrs.clone(); let diag = &sess.parse_sess.span_diagnostic; @@ -246,9 +251,16 @@ impl<'a> StripUnconfigured<'a> { pub fn configure(&mut self, mut node: T) -> Option { self.process_cfg_attrs(&mut node); if self.in_cfg(node.attrs()) { + if self.config_tokens { + node.visit_tokens(|tokens| { + if let Some(tokens) = tokens { + let preexp_tokens = tokens.create_token_stream(); + *tokens = LazyTokenStream::new(self.configure_tokens(&preexp_tokens)); + } + }); + } Some(node) } else { - self.modified = true; None } } @@ -258,12 +270,37 @@ impl<'a> StripUnconfigured<'a> { mut attrs: Vec, ) -> Option> { attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr)); - if self.in_cfg(&attrs) { - Some(attrs) - } else { - self.modified = true; - None - } + if self.in_cfg(&attrs) { Some(attrs) } else { None } + } + + fn configure_tokens(&mut self, stream: &PreexpTokenStream) -> PreexpTokenStream { + let trees: Vec<_> = stream + .0 + .iter() + .flat_map(|tree| match tree.0.clone() { + PreexpTokenTree::Attributes(mut data) => { + let mut attrs: Vec<_> = std::mem::take(&mut data.attrs).into(); + attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr)); + data.attrs = attrs.into(); + + if self.in_cfg(&data.attrs) { + data.tokens = LazyTokenStream::new( + self.configure_tokens(&data.tokens.create_token_stream()), + ); + Some((PreexpTokenTree::Attributes(data), tree.1)).into_iter() + } else { + None.into_iter() + } + } + PreexpTokenTree::Delimited(sp, delim, mut inner) => { + inner = self.configure_tokens(&inner); + Some((PreexpTokenTree::Delimited(sp, delim, inner), tree.1)) + } + .into_iter(), + token_tree @ PreexpTokenTree::Token(_) => Some((token_tree, tree.1)).into_iter(), + }) + .collect(); + PreexpTokenStream::new(trees) } /// Parse and expand all `cfg_attr` attributes into a list of attributes @@ -290,9 +327,6 @@ impl<'a> StripUnconfigured<'a> { return vec![attr]; } - // A `#[cfg_attr]` either gets removed, or replaced with a new attribute - self.modified = true; - let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) { None => return vec![], Some(r) => r, @@ -316,7 +350,7 @@ impl<'a> StripUnconfigured<'a> { expanded_attrs .into_iter() .flat_map(|(item, span)| { - let orig_tokens = attr.tokens(); + let orig_tokens = attr.tokens().to_tokenstream(); // We are taking an attribute of the form `#[cfg_attr(pred, attr)]` // and producing an attribute of the form `#[attr]`. We @@ -326,25 +360,34 @@ impl<'a> StripUnconfigured<'a> { // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // for `attr` when we expand it to `#[attr]` - let pound_token = orig_tokens.trees().next().unwrap(); - if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) { - panic!("Bad tokens for attribute {:?}", attr); + let mut orig_trees = orig_tokens.trees(); + let pound_token = match orig_trees.next().unwrap() { + TokenTree::Token(token @ Token { kind: TokenKind::Pound, .. }) => token, + _ => panic!("Bad tokens for attribute {:?}", attr), + }; + let pound_span = pound_token.span; + + let mut trees = vec![(PreexpTokenTree::Token(pound_token), Spacing::Alone)]; + if attr.style == AttrStyle::Inner { + // For inner attributes, we do the same thing for the `!` in `#![some_attr]` + let bang_token = match orig_trees.next().unwrap() { + TokenTree::Token(token @ Token { kind: TokenKind::Not, .. }) => token, + _ => panic!("Bad tokens for attribute {:?}", attr), + }; + trees.push((PreexpTokenTree::Token(bang_token), Spacing::Alone)); } // We don't really have a good span to use for the syntheized `[]` // in `#[attr]`, so just use the span of the `#` token. - let bracket_group = TokenTree::Delimited( - DelimSpan::from_single(pound_token.span()), + let bracket_group = PreexpTokenTree::Delimited( + DelimSpan::from_single(pound_span), DelimToken::Bracket, item.tokens .as_ref() .unwrap_or_else(|| panic!("Missing tokens for {:?}", item)) .create_token_stream(), ); - let tokens = Some(LazyTokenStream::new(TokenStream::new(vec![ - (pound_token, Spacing::Alone), - (bracket_group, Spacing::Alone), - ]))); - + trees.push((bracket_group, Spacing::Alone)); + let tokens = Some(LazyTokenStream::new(PreexpTokenStream::new(trees))); self.process_cfg_attr(attr::mk_attr_from_item(item, tokens, attr.style, span)) }) .collect() diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index b474cad1242e8..07248c3fbbba0 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -12,7 +12,7 @@ use rustc_ast::ptr::P; use rustc_ast::token; use rustc_ast::tokenstream::TokenStream; use rustc_ast::visit::{self, AssocCtxt, Visitor}; -use rustc_ast::{AstLike, AttrItem, AttrStyle, Block, Inline, ItemKind, LitKind, MacArgs}; +use rustc_ast::{AstLike, AttrItem, Block, Inline, ItemKind, LitKind, MacArgs}; use rustc_ast::{MacCallStmt, MacStmtStyle, MetaItemKind, ModKind, NestedMetaItem}; use rustc_ast::{NodeId, PatKind, Path, StmtKind, Unsafe}; use rustc_ast_pretty::pprust; @@ -598,10 +598,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let invocations = { let mut collector = InvocationCollector { + // Non-derive macro invocations cannot see the results of cfg expansion - they + // will either be removed along with the item, or invoked before the cfg/cfg_attr + // attribute is expanded. Therefore, we don't need to configure the tokens + // Derive macros *can* see the results of cfg-expansion - they are handled + // specially in `fully_expand_fragment` cfg: StripUnconfigured { sess: &self.cx.sess, features: self.cx.ecfg.features, - modified: false, + config_tokens: false, }, cx: self.cx, invocations: Vec::new(), @@ -696,13 +701,24 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtensionKind::Attr(expander) => { self.gate_proc_macro_input(&item); self.gate_proc_macro_attr_item(span, &item); - let tokens = match attr.style { - AttrStyle::Outer => item.into_tokens(&self.cx.sess.parse_sess), - // FIXME: Properly collect tokens for inner attributes - AttrStyle::Inner => rustc_parse::fake_token_stream( + let mut fake_tokens = false; + if let Annotatable::Item(item_inner) = &item { + if let ItemKind::Mod(..) = &item_inner.kind { + // We are invoking an inner attribute + // at the crate/module root. + // FIXME: Collect tokens and use them instead of generating + // fake ones. These are unstable, so it needs to be + // fixed prior to stabilization + fake_tokens = item_inner.ident.name.is_empty(); + } + } + let tokens = if fake_tokens { + rustc_parse::fake_token_stream( &self.cx.sess.parse_sess, &item.into_nonterminal(), - ), + ) + } else { + item.into_tokens(&self.cx.sess.parse_sess) }; let attr_item = attr.unwrap_normal_item(); if let MacArgs::Eq(..) = attr_item.args { diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs index c5d8ff25ea94b..bd162de198360 100644 --- a/compiler/rustc_expand/src/lib.rs +++ b/compiler/rustc_expand/src/lib.rs @@ -5,6 +5,7 @@ #![feature(proc_macro_internals)] #![feature(proc_macro_span)] #![feature(try_blocks)] +#![recursion_limit = "256"] #[macro_use] extern crate rustc_macros; diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 8cbaa7c945a81..72830b6cf1d29 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -93,7 +93,7 @@ impl MultiItemModifier for ProcMacroDerive { let input = if item.pretty_printing_compatibility_hack() { TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into() } else { - nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::Yes) + nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::No) }; let server = proc_macro_server::Rustc::new(ecx); diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index cea4de72df549..27ccfd50bc769 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -8,9 +8,13 @@ #![feature(box_patterns)] use rustc_ast as ast; -use rustc_ast::token::{self, Nonterminal}; -use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream}; +use rustc_ast::token::{self, Nonterminal, Token, TokenKind}; +use rustc_ast::tokenstream::{ + self, AttributesData, CanSynthesizeMissingTokens, LazyTokenStream, PreexpTokenStream, + PreexpTokenTree, Spacing, TokenStream, +}; use rustc_ast::AstLike; +use rustc_ast::Attribute; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; @@ -20,8 +24,6 @@ use rustc_span::{FileName, SourceFile, Span}; use std::path::Path; use std::str; -use tracing::debug; - pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] @@ -254,19 +256,20 @@ pub fn nt_to_tokenstream( // before we fall back to the stringification. let convert_tokens = - |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); + |tokens: Option<&LazyTokenStream>| Some(tokens?.create_token_stream().to_tokenstream()); let tokens = match *nt { - Nonterminal::NtItem(ref item) => prepend_attrs(sess, &item.attrs, nt, item.tokens.as_ref()), + Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()), Nonterminal::NtStmt(ref stmt) => { - let do_prepend = |tokens| prepend_attrs(sess, stmt.attrs(), nt, tokens); if let ast::StmtKind::Empty = stmt.kind { - let tokens: TokenStream = - tokenstream::TokenTree::token(token::Semi, stmt.span).into(); - do_prepend(Some(&LazyTokenStream::new(tokens))) + let tokens = PreexpTokenStream::new(vec![( + tokenstream::PreexpTokenTree::Token(Token::new(TokenKind::Semi, stmt.span)), + Spacing::Alone, + )]); + prepend_attrs(&stmt.attrs(), Some(&LazyTokenStream::new(tokens))) } else { - do_prepend(stmt.tokens()) + prepend_attrs(&stmt.attrs(), stmt.kind.tokens()) } } Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()), @@ -282,10 +285,7 @@ pub fn nt_to_tokenstream( Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { - if expr.tokens.is_none() { - debug!("missing tokens for expr {:?}", expr); - } - prepend_attrs(sess, &expr.attrs, nt, expr.tokens.as_ref()) + prepend_attrs(&expr.attrs, expr.tokens.as_ref()) } }; @@ -298,30 +298,19 @@ pub fn nt_to_tokenstream( } } +fn prepend_attrs(attrs: &[Attribute], tokens: Option<&LazyTokenStream>) -> Option { + let tokens = tokens?; + if attrs.is_empty() { + return Some(tokens.create_token_stream().to_tokenstream()); + } + let attr_data = AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() }; + let wrapped = + PreexpTokenStream::new(vec![(PreexpTokenTree::Attributes(attr_data), Spacing::Alone)]); + Some(wrapped.to_tokenstream()) +} + pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream { let source = pprust::nonterminal_to_string(nt); let filename = FileName::macro_expansion_source_code(&source); parse_stream_from_source_str(filename, source, sess, Some(nt.span())) } - -fn prepend_attrs( - sess: &ParseSess, - attrs: &[ast::Attribute], - nt: &Nonterminal, - tokens: Option<&tokenstream::LazyTokenStream>, -) -> Option { - if attrs.is_empty() { - return Some(tokens?.create_token_stream()); - } - let mut builder = tokenstream::TokenStreamBuilder::new(); - for attr in attrs { - // FIXME: Correctly handle tokens for inner attributes. - // For now, we fall back to reparsing the original AST node - if attr.style == ast::AttrStyle::Inner { - return Some(fake_token_stream(sess, nt)); - } - builder.push(attr.tokens()); - } - builder.push(tokens?.create_token_stream()); - Some(builder.build()) -} diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 95d4a48b845ef..4d54ca7f4b0fa 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -1,10 +1,11 @@ -use super::{AttrWrapper, Parser, PathStyle}; +use super::{Capturing, Parser, PathStyle}; use rustc_ast as ast; use rustc_ast::attr; use rustc_ast::token::{self, Nonterminal}; use rustc_ast_pretty::pprust; -use rustc_errors::{error_code, PResult}; +use rustc_errors::PResult; use rustc_span::{sym, Span}; +use std::convert::TryInto; use tracing::debug; @@ -15,7 +16,7 @@ pub enum InnerAttrPolicy<'a> { Forbidden { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option }, } -const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ +pub const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ permitted in this context"; pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPolicy::Forbidden { @@ -25,58 +26,6 @@ pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPo }; impl<'a> Parser<'a> { - /// Parses attributes that appear before an item. - pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> { - let mut attrs: Vec = Vec::new(); - let mut just_parsed_doc_comment = false; - loop { - debug!("parse_outer_attributes: self.token={:?}", self.token); - let attr = if self.check(&token::Pound) { - let inner_error_reason = if just_parsed_doc_comment { - "an inner attribute is not permitted following an outer doc comment" - } else if !attrs.is_empty() { - "an inner attribute is not permitted following an outer attribute" - } else { - DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG - }; - let inner_parse_policy = InnerAttrPolicy::Forbidden { - reason: inner_error_reason, - saw_doc_comment: just_parsed_doc_comment, - prev_attr_sp: attrs.last().map(|a| a.span), - }; - just_parsed_doc_comment = false; - Some(self.parse_attribute(inner_parse_policy)?) - } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - if attr_style != ast::AttrStyle::Outer { - self.sess - .span_diagnostic - .struct_span_err_with_code( - self.token.span, - "expected outer doc comment", - error_code!(E0753), - ) - .note( - "inner doc comments like this (starting with \ - `//!` or `/*!`) can only appear before items", - ) - .emit(); - } - self.bump(); - just_parsed_doc_comment = true; - Some(attr::mk_doc_comment(comment_kind, attr_style, data, self.prev_token.span)) - } else { - None - }; - - if let Some(attr) = attr { - attrs.push(attr); - } else { - break; - } - } - Ok(AttrWrapper::new(attrs)) - } - /// Matches `attribute = # ! [ meta_item ]`. /// `inner_parse_policy` prescribes how to handle inner attributes. // Public for rustfmt usage. @@ -177,6 +126,7 @@ impl<'a> Parser<'a> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec> { let mut attrs: Vec = vec![]; loop { + let start_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap(); // Only try to parse if it is an inner attribute (has `!`). let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { Some(self.parse_attribute(InnerAttrPolicy::Permitted)?) @@ -191,6 +141,11 @@ impl<'a> Parser<'a> { None }; if let Some(attr) = attr { + let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap(); + if let Capturing::Yes { tokens_for_attrs } = &mut self.capture_state.capturing { + self.capture_state.replace_ranges.push((start_pos..end_pos, vec![])); + *tokens_for_attrs |= maybe_needs_tokens(std::slice::from_ref(&attr)); + } attrs.push(attr); } else { break; @@ -311,6 +266,9 @@ pub fn maybe_needs_tokens(attrs: &[ast::Attribute]) -> bool { // One of the attributes may either itself be a macro, // or expand to macro attributes (`cfg_attr`). attrs.iter().any(|attr| { + if attr.is_doc_comment() { + return false; + } attr.ident().map_or(true, |ident| { ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name) }) diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index f45d8d6c7a00e..71e7ede78459e 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,13 +1,21 @@ -use super::attr; -use super::{ForceCollect, Parser, TokenCursor, TrailingToken}; -use rustc_ast::token::{self, Token, TokenKind}; -use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing}; +use super::attr::{InnerAttrPolicy, DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG}; +use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; +use rustc_ast::token::{self, DelimToken, Token, TokenKind}; +use rustc_ast::tokenstream::{ + AttributesData, CreateTokenStream, PreexpTokenStream, PreexpTokenTree, +}; use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing}; use rustc_ast::AstLike; +use rustc_ast::AttrVec; use rustc_ast::{self as ast}; -use rustc_errors::PResult; +use rustc_errors::{error_code, PResult}; use rustc_span::{Span, DUMMY_SP}; +use std::convert::TryInto; +use std::ops::Range; + +use tracing::debug; + /// A wrapper type to ensure that the parser handles outer attributes correctly. /// When we parse outer attributes, we need to ensure that we capture tokens /// for the attribute target. This allows us to perform cfg-expansion on @@ -23,26 +31,169 @@ use rustc_span::{Span, DUMMY_SP}; /// cannot directly access the `attrs` field #[derive(Debug, Clone)] pub struct AttrWrapper { - attrs: Vec, + attrs: AttrVec, + start_pos: usize, } +// This struct is passed around very frequently, +// so make sure it doesn't accidentally get larger +#[cfg(target_arch = "x86_64")] +rustc_data_structures::static_assert_size!(AttrWrapper, 16); + impl AttrWrapper { pub fn empty() -> AttrWrapper { - AttrWrapper { attrs: vec![] } - } - pub fn new(attrs: Vec) -> AttrWrapper { - AttrWrapper { attrs } + AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX } } // FIXME: Delay span bug here? - pub(crate) fn take_for_recovery(self) -> Vec { + pub(crate) fn take_for_recovery(self) -> AttrVec { self.attrs } pub fn is_empty(&self) -> bool { self.attrs.is_empty() } + + pub fn maybe_needs_tokens(&self) -> bool { + crate::parser::attr::maybe_needs_tokens(&self.attrs) + } +} + +// Produces a `TokenStream` on-demand. Using `cursor_snapshot` +// and `num_calls`, we can reconstruct the `TokenStream` seen +// by the callback. This allows us to avoid producing a `TokenStream` +// if it is never needed - for example, a captured `macro_rules!` +// argument that is never passed to a proc macro. +// In practice token stream creation happens rarely compared to +// calls to `collect_tokens` (see some statistics in #78736), +// so we are doing as little up-front work as possible. +// +// This also makes `Parser` very cheap to clone, since +// there is no intermediate collection buffer to clone. +#[derive(Clone)] +struct LazyTokenStreamImpl { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: u32, + desugar_doc_comments: bool, + append_unglued_token: Option<(Token, Spacing)>, + replace_ranges: Box<[ReplaceRange]>, +} + +impl CreateTokenStream for LazyTokenStreamImpl { + fn create_token_stream(&self) -> PreexpTokenStream { + let num_calls = self.num_calls; + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. The combination + // of chaining the initial token and using `take` produces the desired + // result - we produce an empty `TokenStream` if no calls were made, + // and omit the final token otherwise. + let mut cursor_snapshot = self.cursor_snapshot.clone(); + let tokens = std::iter::once(self.start_token.clone()) + .chain((0..self.num_calls).map(|_| { + if self.desugar_doc_comments { + cursor_snapshot.next_desugared() + } else { + cursor_snapshot.next() + } + })) + .take(num_calls as usize) + .map(|(token, spacing)| (FlatToken::Token(token), spacing)); + + if !self.replace_ranges.is_empty() { + let mut tokens: Vec<_> = tokens.collect(); + + let mut replace_ranges = self.replace_ranges.clone(); + replace_ranges.sort_by_key(|(range, _)| range.start); + replace_ranges.reverse(); + + for (range, new_tokens) in replace_ranges.iter() { + assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range); + // Replace ranges are only allowed to decrease the number of tokens. + assert!( + range.len() >= new_tokens.len(), + "Range {:?} has greater len than {:?}", + range, + new_tokens + ); + + // Replace any removed tokens with `FlatToken::Empty`. + // This keeps the total length of `tokens` constant throughout the + // replacement process, allowing us to use all of the `ReplaceRanges` entries + // without adjusting indices. + let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone)) + .take(range.len() - new_tokens.len()); + + tokens.splice( + (range.start as usize)..(range.end as usize), + new_tokens.clone().into_iter().chain(filler), + ); + } + + make_token_stream(tokens.into_iter(), self.append_unglued_token.clone()) + } else { + make_token_stream(tokens, self.append_unglued_token.clone()) + } + } } impl<'a> Parser<'a> { + /// Parses attributes that appear before an item. + pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> { + let mut attrs: Vec = Vec::new(); + let mut just_parsed_doc_comment = false; + let start_pos = self.token_cursor.num_next_calls; + loop { + debug!("parse_outer_attributes: self.token={:?}", self.token); + let attr = if self.check(&token::Pound) { + let inner_error_reason = if just_parsed_doc_comment { + "an inner attribute is not permitted following an outer doc comment" + } else if !attrs.is_empty() { + "an inner attribute is not permitted following an outer attribute" + } else { + DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG + }; + let inner_parse_policy = InnerAttrPolicy::Forbidden { + reason: inner_error_reason, + saw_doc_comment: just_parsed_doc_comment, + prev_attr_sp: attrs.last().map(|a| a.span), + }; + just_parsed_doc_comment = false; + Some(self.parse_attribute(inner_parse_policy)?) + } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { + if attr_style != ast::AttrStyle::Outer { + self.sess + .span_diagnostic + .struct_span_err_with_code( + self.token.span, + "expected outer doc comment", + error_code!(E0753), + ) + .note( + "inner doc comments like this (starting with \ + `//!` or `/*!`) can only appear before items", + ) + .emit(); + } + self.bump(); + just_parsed_doc_comment = true; + Some(rustc_ast::attr::mk_doc_comment( + comment_kind, + attr_style, + data, + self.prev_token.span, + )) + } else { + None + }; + + if let Some(attr) = attr { + attrs.push(attr); + } else { + break; + } + } + Ok(AttrWrapper { attrs: attrs.into(), start_pos }) + } + /// Records all tokens consumed by the provided callback, /// including the current token. These tokens are collected /// into a `LazyTokenStream`, and returned along with the result @@ -65,77 +216,119 @@ impl<'a> Parser<'a> { force_collect: ForceCollect, f: impl FnOnce(&mut Self, Vec) -> PResult<'a, (R, TrailingToken)>, ) -> PResult<'a, R> { - if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) { - return Ok(f(self, attrs.attrs)?.0); + // We have no attributes that could observe the tokens, and there + // are no encloding `capture_tokens` calls that need our tokens for + // eager expansion of attributes. + if matches!(force_collect, ForceCollect::No) + && !attrs.maybe_needs_tokens() + && !R::SUPPORTS_INNER_ATTRS + && !(matches!(self.capture_state.capturing, Capturing::Yes { tokens_for_attrs: true }) + && ast::ast_like::has_cfg_or_cfg_any(&attrs.attrs)) + { + return Ok(f(self, attrs.attrs.into())?.0); } + let start_token = (self.token.clone(), self.token_spacing); let cursor_snapshot = self.token_cursor.clone(); - let (mut ret, trailing_token) = f(self, attrs.attrs)?; - - // Produces a `TokenStream` on-demand. Using `cursor_snapshot` - // and `num_calls`, we can reconstruct the `TokenStream` seen - // by the callback. This allows us to avoid producing a `TokenStream` - // if it is never needed - for example, a captured `macro_rules!` - // argument that is never passed to a proc macro. - // In practice token stream creation happens rarely compared to - // calls to `collect_tokens` (see some statistics in #78736), - // so we are doing as little up-front work as possible. - // - // This also makes `Parser` very cheap to clone, since - // there is no intermediate collection buffer to clone. - #[derive(Clone)] - struct LazyTokenStreamImpl { - start_token: (Token, Spacing), - cursor_snapshot: TokenCursor, - num_calls: usize, - desugar_doc_comments: bool, - append_unglued_token: Option, - } - impl CreateTokenStream for LazyTokenStreamImpl { - fn create_token_stream(&self) -> TokenStream { - // The token produced by the final call to `next` or `next_desugared` - // was not actually consumed by the callback. The combination - // of chaining the initial token and using `take` produces the desired - // result - we produce an empty `TokenStream` if no calls were made, - // and omit the final token otherwise. - let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = std::iter::once(self.start_token.clone()) - .chain((0..self.num_calls).map(|_| { - if self.desugar_doc_comments { - cursor_snapshot.next_desugared() - } else { - cursor_snapshot.next() - } - })) - .take(self.num_calls); - - make_token_stream(tokens, self.append_unglued_token.clone()) + let has_outer_attrs = !attrs.attrs.is_empty(); + let prev_capturing = self.capture_state.capturing; + let outer_attrs_needs_tokens = super::attr::maybe_needs_tokens(&attrs.attrs); + self.capture_state.capturing = match prev_capturing { + Capturing::No => Capturing::Yes { tokens_for_attrs: outer_attrs_needs_tokens }, + Capturing::Yes { tokens_for_attrs } => { + Capturing::Yes { tokens_for_attrs: tokens_for_attrs || outer_attrs_needs_tokens } } + }; + let replace_ranges_start = self.capture_state.replace_ranges.len(); + + let ret = f(self, attrs.attrs.into()); + + let replace_ranges_end = self.capture_state.replace_ranges.len(); + self.capture_state.capturing = prev_capturing; + + let (mut ret, trailing) = ret?; + + // We have no attributes that could observe the tokens, and there + // are no encloding `capture_tokens` calls that need our tokens for + // eager expansion of attributes. + if matches!(force_collect, ForceCollect::No) + && !crate::parser::attr::maybe_needs_tokens(ret.attrs()) + // Subtle: We call `has_cfg_or_cfg_any` with the attrs from `ret`. + // This ensures that we consider inner attributes (e.g. `#![cfg]`), + // which require us to have tokens available + // We also call `has_cfg_or_cfg_any` at the beginning of this function, + // but we only bail out if there's no possibility of inner attributes + // (!R::SUPPORTS_INNER_ATTRS) + && !(matches!(self.capture_state.capturing, Capturing:: Yes { tokens_for_attrs: true }) + && ast::ast_like::has_cfg_or_cfg_any(ret.attrs())) + { + return Ok(ret); } - let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls; - match trailing_token { + let cursor_snapshot_next_calls = cursor_snapshot.num_next_calls; + let mut end_pos = self.token_cursor.num_next_calls; + + match trailing { TrailingToken::None => {} TrailingToken::Semi => { assert_eq!(self.token.kind, token::Semi); - num_calls += 1; + end_pos += 1; } TrailingToken::MaybeComma => { if self.token.kind == token::Comma { - num_calls += 1; + end_pos += 1; } } } - let lazy_impl = LazyTokenStreamImpl { + let num_calls = end_pos - cursor_snapshot_next_calls; + + // Handle previous replace ranges + let replace_ranges: Box<[ReplaceRange]> = if ret.attrs().is_empty() { + Box::new([]) + } else { + let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap(); + self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] + .iter() + .cloned() + .map(|(range, tokens)| { + ((range.start - start_calls)..(range.end - start_calls), tokens) + }) + .collect() + }; + + let tokens = LazyTokenStream::new(LazyTokenStreamImpl { start_token, - num_calls, + num_calls: num_calls.try_into().unwrap(), cursor_snapshot, desugar_doc_comments: self.desugar_doc_comments, append_unglued_token: self.token_cursor.append_unglued_token.clone(), - }; - ret.finalize_tokens(LazyTokenStream::new(lazy_impl)); + replace_ranges: replace_ranges.into(), + }); + + let final_attrs: Option = ret.finalize_tokens(tokens); + if let Some(final_attrs) = final_attrs { + if matches!(self.capture_state.capturing, Capturing::Yes { tokens_for_attrs: true }) { + let start_pos = + if has_outer_attrs { attrs.start_pos } else { cursor_snapshot_next_calls }; + let mut new_tokens = vec![(FlatToken::AttrTarget(final_attrs), Spacing::Alone)]; + if let Some((unglued, spacing)) = self.token_cursor.append_unglued_token.clone() { + end_pos += 1; + new_tokens.push((FlatToken::Token(unglued), spacing)); + } + let range: Range = + (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap()); + self.capture_state.replace_ranges.push((range, new_tokens)); + } + } + + // We only need replace ranges to handle `#[derive]`. If all of + // the outer calls to `capture_tokens` had no outer attributes, + // then we can't possibly have a `derive` + if !matches!(self.capture_state.capturing, Capturing::Yes { tokens_for_attrs: true }) { + self.capture_state.replace_ranges.clear(); + } Ok(ret) } } @@ -144,42 +337,61 @@ impl<'a> Parser<'a> { /// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair /// of open and close delims. fn make_token_stream( - tokens: impl Iterator, - append_unglued_token: Option, -) -> TokenStream { + tokens: impl Iterator, + append_unglued_token: Option<(Token, Spacing)>, +) -> PreexpTokenStream { + //let orig_tokens = tokens.clone(); #[derive(Debug)] struct FrameData { open: Span, - inner: Vec<(TokenTree, Spacing)>, + open_delim: DelimToken, + inner: Vec<(PreexpTokenTree, Spacing)>, } - let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }]; + let mut stack = + vec![FrameData { open: DUMMY_SP, open_delim: DelimToken::NoDelim, inner: vec![] }]; for (token, spacing) in tokens { match token { - Token { kind: TokenKind::OpenDelim(_), span } => { - stack.push(FrameData { open: span, inner: vec![] }); + FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => { + stack.push(FrameData { open: span, open_delim: delim, inner: vec![] }); } - Token { kind: TokenKind::CloseDelim(delim), span } => { + FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => { let frame_data = stack.pop().expect("Token stack was empty!"); + if stack.is_empty() { + panic!("Popped token {:?} for last frame {:?}", token, frame_data); + } + assert_eq!( + frame_data.open_delim, delim, + "Mismatched open/close delims: open={:?} close={:?}", + frame_data.open, span + ); let dspan = DelimSpan::from_pair(frame_data.open, span); - let stream = TokenStream::new(frame_data.inner); - let delimited = TokenTree::Delimited(dspan, delim, stream); + let stream = PreexpTokenStream::new(frame_data.inner); + let delimited = PreexpTokenTree::Delimited(dspan, delim, stream); stack .last_mut() - .unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!")) + .unwrap_or_else(|| { + panic!("Bottom token frame is missing for token: {:?}", token) + }) .inner .push((delimited, Spacing::Alone)); } - token => { - stack - .last_mut() - .expect("Bottom token frame is missing!") - .inner - .push((TokenTree::Token(token), spacing)); - } + FlatToken::Token(token) => stack + .last_mut() + .expect("Bottom token frame is missing!") + .inner + .push((PreexpTokenTree::Token(token), spacing)), + FlatToken::AttrTarget(data) => stack + .last_mut() + .expect("Bottom token frame is missing!") + .inner + .push((PreexpTokenTree::Attributes(data), spacing)), + FlatToken::Empty => {} } } let mut final_buf = stack.pop().expect("Missing final buf!"); - final_buf.inner.extend(append_unglued_token); + if let Some((append_unglued_token, spacing)) = append_unglued_token { + final_buf.inner.push((PreexpTokenTree::Token(append_unglued_token), spacing)); + } assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack); - TokenStream::new(final_buf.inner) + PreexpTokenStream::new(final_buf.inner) } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 4cc2224d27e42..d7155aa34f23e 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -19,8 +19,9 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; +use rustc_ast::tokenstream::AttributesData; use rustc_ast::tokenstream::{self, DelimSpan, Spacing}; -use rustc_ast::tokenstream::{TokenStream, TokenTree, TreeAndSpacing}; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AnonConst, AstLike, AttrStyle, AttrVec, Const, CrateSugar, Extern}; use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe}; @@ -34,6 +35,7 @@ use rustc_span::source_map::{Span, DUMMY_SP}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use tracing::debug; +use std::ops::Range; use std::{cmp, mem, slice}; bitflags::bitflags! { @@ -134,6 +136,54 @@ pub struct Parser<'a> { pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, /// If present, this `Parser` is not parsing Rust code but rather a macro call. subparser_name: Option<&'static str>, + capture_state: CaptureState, +} + +pub type ReplaceRange = (Range, Vec<(FlatToken, Spacing)>); + +/// Controls how we capture tokens. Capturing can be expensive, +/// so we try to avoid performing capturing in cases where +/// we will never need a `PreexpTokenStream` +#[derive(Copy, Clone)] +pub enum Capturing { + /// We aren't performing any capturing - this is the default mode. + No, + /// We are capturing tokens + Yes { + /// 1. We are forcing collection of tokens for a macro_rules! matcher + + /// If `true`, we are capturing tokens because we've encountered + /// an attribute that may need them. In this mode, we will also + /// capture tokens for 'nested' items that have `#[cfg]` or `#[cfg_attr]`. + /// For example: + /// + /// ```rust + /// #[some_attr] + /// struct Foo { + /// val: u8, + /// #[cfg(FALSE)] removed: u8 + /// } + /// ``` + /// + /// Here, `some_attr` could be `derive` (imported under a different name). + /// Therefore, we need to separately capture the tokens for `removed: u8` + /// to allow eager expansion. + /// + /// If `false`, we are capturing tokens for some other reason: + /// * We are forcing collection of tokens for a `macro_rules!` matcher + /// We are parsing something that might have inner attributes + /// (e.g. a function), and need to start capturing in case + /// we end up parsing custom inner attributes + /// + /// In that case, we don't need special handling for `#[cfg]` and `#[cfg_attr]` + tokens_for_attrs: bool, + }, +} + +#[derive(Clone)] +struct CaptureState { + capturing: Capturing, + replace_ranges: Vec, } impl<'a> Drop for Parser<'a> { @@ -171,7 +221,7 @@ struct TokenCursor { // field is used to track this token - it gets // appended to the captured stream when // we evaluate a `LazyTokenStream` - append_unglued_token: Option, + append_unglued_token: Option<(Token, Spacing)>, } #[derive(Clone)] @@ -385,6 +435,7 @@ impl<'a> Parser<'a> { last_unexpected_token_span: None, last_type_ascription: None, subparser_name, + capture_state: CaptureState { capturing: Capturing::No, replace_ranges: Vec::new() }, }; // Make parser point to the first token. @@ -606,8 +657,7 @@ impl<'a> Parser<'a> { // If we consume any additional tokens, then this token // is not needed (we'll capture the entire 'glued' token), // and `next_tok` will set this field to `None` - self.token_cursor.append_unglued_token = - Some((TokenTree::Token(self.token.clone()), Spacing::Alone)); + self.token_cursor.append_unglued_token = Some((self.token.clone(), Spacing::Alone)); // Use the spacing of the glued token as the spacing // of the unglued second token. self.bump_with((Token::new(second, second_span), self.token_spacing)); @@ -1287,3 +1337,10 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, sess: &Pa } } } + +#[derive(Debug, Clone)] +pub enum FlatToken { + Token(Token), + AttrTarget(AttributesData), + Empty, +} diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 07746f2390dd9..62e93cfae5e72 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -66,20 +66,15 @@ impl<'a> Parser<'a> { Ok(Some(if self.token.is_keyword(kw::Let) { self.parse_local_mk(lo, attrs, capture_semi, force_collect)? } else if self.is_kw_followed_by_ident(kw::Mut) { - self.recover_stmt_local( - lo, - attrs.take_for_recovery().into(), - "missing keyword", - "let mut", - )? + self.recover_stmt_local(lo, attrs, "missing keyword", "let mut")? } else if self.is_kw_followed_by_ident(kw::Auto) { self.bump(); // `auto` let msg = "write `let` instead of `auto` to introduce a new variable"; - self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")? + self.recover_stmt_local(lo, attrs, msg, "let")? } else if self.is_kw_followed_by_ident(sym::var) { self.bump(); // `var` let msg = "write `let` instead of `var` to introduce a new variable"; - self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")? + self.recover_stmt_local(lo, attrs, msg, "let")? } else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() { // We have avoided contextual keywords like `union`, items with `crate` visibility, // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something @@ -182,7 +177,7 @@ impl<'a> Parser<'a> { fn recover_stmt_local( &mut self, lo: Span, - attrs: AttrVec, + attrs: AttrWrapper, msg: &str, sugg: &str, ) -> PResult<'a, Stmt> { @@ -212,9 +207,15 @@ impl<'a> Parser<'a> { }) } - fn recover_local_after_let(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, Stmt> { - let local = self.parse_local(attrs)?; - Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Local(local))) + fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> { + self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + let local = this.parse_local(attrs.into())?; + // FIXME - maybe capture semicolon in recovery? + Ok(( + this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)), + TrailingToken::None, + )) + }) } /// Parses a local variable declaration. diff --git a/src/test/ui/proc-macro/attr-complex-fn.rs b/src/test/ui/proc-macro/attr-complex-fn.rs new file mode 100644 index 0000000000000..3c6cff6bb3841 --- /dev/null +++ b/src/test/ui/proc-macro/attr-complex-fn.rs @@ -0,0 +1,23 @@ +// check-pass +// compile-flags: -Z span-debug --error-format human +// aux-build:test-macros.rs + +#![feature(stmt_expr_attributes)] +#![feature(custom_inner_attributes)] +#![feature(rustc_attrs)] + +#[macro_use] +extern crate test_macros; + +trait MyTrait {} +struct MyStruct; + +#[print_attr] +fn foo>>() {} + +impl MyTrait for MyStruct<{true}> { + #![print_attr] + #![rustc_dummy] +} + +fn main() {} diff --git a/src/test/ui/proc-macro/attr-complex-fn.stdout b/src/test/ui/proc-macro/attr-complex-fn.stdout new file mode 100644 index 0000000000000..f54a6ef41d414 --- /dev/null +++ b/src/test/ui/proc-macro/attr-complex-fn.stdout @@ -0,0 +1,171 @@ +PRINT-ATTR INPUT (DISPLAY): fn foo < T : MyTrait < MyStruct < { true } >> > () { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "fn", + span: $DIR/attr-complex-fn.rs:16:1: 16:3 (#0), + }, + Ident { + ident: "foo", + span: $DIR/attr-complex-fn.rs:16:4: 16:7 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:16:7: 16:8 (#0), + }, + Ident { + ident: "T", + span: $DIR/attr-complex-fn.rs:16:8: 16:9 (#0), + }, + Punct { + ch: ':', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:16:9: 16:10 (#0), + }, + Ident { + ident: "MyTrait", + span: $DIR/attr-complex-fn.rs:16:11: 16:18 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:16:18: 16:19 (#0), + }, + Ident { + ident: "MyStruct", + span: $DIR/attr-complex-fn.rs:16:19: 16:27 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:16:27: 16:28 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "true", + span: $DIR/attr-complex-fn.rs:16:30: 16:34 (#0), + }, + ], + span: $DIR/attr-complex-fn.rs:16:28: 16:36 (#0), + }, + Punct { + ch: '>', + spacing: Joint, + span: $DIR/attr-complex-fn.rs:16:36: 16:38 (#0), + }, + Punct { + ch: '>', + spacing: Joint, + span: $DIR/attr-complex-fn.rs:16:36: 16:38 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:16:38: 16:39 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/attr-complex-fn.rs:16:39: 16:41 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/attr-complex-fn.rs:16:42: 16:44 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > { # ! [rustc_dummy] } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "impl", + span: $DIR/attr-complex-fn.rs:18:1: 18:5 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:18:5: 18:6 (#0), + }, + Ident { + ident: "T", + span: $DIR/attr-complex-fn.rs:18:6: 18:7 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:18:7: 18:8 (#0), + }, + Ident { + ident: "MyTrait", + span: $DIR/attr-complex-fn.rs:18:9: 18:16 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:18:16: 18:17 (#0), + }, + Ident { + ident: "T", + span: $DIR/attr-complex-fn.rs:18:17: 18:18 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:18:18: 18:19 (#0), + }, + Ident { + ident: "for", + span: $DIR/attr-complex-fn.rs:18:20: 18:23 (#0), + }, + Ident { + ident: "MyStruct", + span: $DIR/attr-complex-fn.rs:18:24: 18:32 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:18:32: 18:33 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "true", + span: $DIR/attr-complex-fn.rs:18:34: 18:38 (#0), + }, + ], + span: $DIR/attr-complex-fn.rs:18:33: 18:39 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:18:39: 18:40 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/attr-complex-fn.rs:20:5: 20:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/attr-complex-fn.rs:20:6: 20:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/attr-complex-fn.rs:20:8: 20:19 (#0), + }, + ], + span: $DIR/attr-complex-fn.rs:20:7: 20:20 (#0), + }, + ], + span: $DIR/attr-complex-fn.rs:18:41: 21:2 (#0), + }, +] diff --git a/src/test/ui/proc-macro/attribute-after-derive.stdout b/src/test/ui/proc-macro/attribute-after-derive.stdout index 11f492353271a..3255cab54ce0d 100644 --- a/src/test/ui/proc-macro/attribute-after-derive.stdout +++ b/src/test/ui/proc-macro/attribute-after-derive.stdout @@ -87,32 +87,32 @@ PRINT-DERIVE INPUT (DISPLAY): struct AttributeDerive { } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0), + span: $DIR/attribute-after-derive.rs:18:1: 18:7 (#0), }, Ident { ident: "AttributeDerive", - span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0), + span: $DIR/attribute-after-derive.rs:18:8: 18:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0), + span: $DIR/attribute-after-derive.rs:18:24: 21:2 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): struct DeriveAttribute { } PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:25:1: 25:7 (#0), }, Ident { ident: "DeriveAttribute", - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:25:8: 25:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:25:24: 28:2 (#0), }, ] PRINT-DERIVE INPUT (DISPLAY): #[print_attr] struct DeriveAttribute { } @@ -120,29 +120,29 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:24:1: 24:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_attr", - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:24:3: 24:13 (#0), }, ], - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:24:2: 24:14 (#0), }, Ident { ident: "struct", - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:25:1: 25:7 (#0), }, Ident { ident: "DeriveAttribute", - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:25:8: 25:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0), + span: $DIR/attribute-after-derive.rs:25:24: 28:2 (#0), }, ] diff --git a/src/test/ui/proc-macro/auxiliary/test-macros.rs b/src/test/ui/proc-macro/auxiliary/test-macros.rs index a7ed4bc88250d..c7daefe528f09 100644 --- a/src/test/ui/proc-macro/auxiliary/test-macros.rs +++ b/src/test/ui/proc-macro/auxiliary/test-macros.rs @@ -128,6 +128,13 @@ pub fn print_attr_args(args: TokenStream, input: TokenStream) -> TokenStream { input } +#[proc_macro_attribute] +pub fn print_target_and_args(args: TokenStream, input: TokenStream) -> TokenStream { + print_helper(args, "ATTR_ARGS"); + print_helper(input.clone(), "ATTR"); + input +} + #[proc_macro_derive(Print, attributes(print_helper))] pub fn print_derive(input: TokenStream) -> TokenStream { print_helper(input, "DERIVE"); diff --git a/src/test/ui/proc-macro/inner-attrs.rs b/src/test/ui/proc-macro/inner-attrs.rs new file mode 100644 index 0000000000000..0240ebc7b6e5d --- /dev/null +++ b/src/test/ui/proc-macro/inner-attrs.rs @@ -0,0 +1,17 @@ +// check-pass +// compile-flags: -Z span-debug --error-format human +// aux-build:test-macros.rs + +#![feature(custom_inner_attributes)] + +#[macro_use] +extern crate test_macros; + +#[print_target_and_args(first)] +#[print_target_and_args(second)] +fn foo() { + #![print_target_and_args(third)] + #![print_target_and_args(fourth)] +} + +fn main() {} diff --git a/src/test/ui/proc-macro/inner-attrs.stdout b/src/test/ui/proc-macro/inner-attrs.stdout new file mode 100644 index 0000000000000..2bd9818b13be1 --- /dev/null +++ b/src/test/ui/proc-macro/inner-attrs.stdout @@ -0,0 +1,292 @@ +PRINT-ATTR_ARGS INPUT (DISPLAY): first +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "first", + span: $DIR/inner-attrs.rs:10:25: 10:30 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second)] fn foo() +{ # ! [print_target_and_args(third)] # ! [print_target_and_args(fourth)] } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/inner-attrs.rs:11:1: 11:2 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/inner-attrs.rs:11:3: 11:24 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "second", + span: $DIR/inner-attrs.rs:11:25: 11:31 (#0), + }, + ], + span: $DIR/inner-attrs.rs:11:24: 11:32 (#0), + }, + ], + span: $DIR/inner-attrs.rs:11:2: 11:33 (#0), + }, + Ident { + ident: "fn", + span: $DIR/inner-attrs.rs:12:1: 12:3 (#0), + }, + Ident { + ident: "foo", + span: $DIR/inner-attrs.rs:12:4: 12:7 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/inner-attrs.rs:12:7: 12:9 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/inner-attrs.rs:13:5: 13:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/inner-attrs.rs:13:6: 13:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/inner-attrs.rs:13:8: 13:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "third", + span: $DIR/inner-attrs.rs:13:30: 13:35 (#0), + }, + ], + span: $DIR/inner-attrs.rs:13:29: 13:36 (#0), + }, + ], + span: $DIR/inner-attrs.rs:13:7: 13:37 (#0), + }, + Punct { + ch: '#', + spacing: Joint, + span: $DIR/inner-attrs.rs:14:5: 14:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/inner-attrs.rs:14:6: 14:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/inner-attrs.rs:14:8: 14:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "fourth", + span: $DIR/inner-attrs.rs:14:30: 14:36 (#0), + }, + ], + span: $DIR/inner-attrs.rs:14:29: 14:37 (#0), + }, + ], + span: $DIR/inner-attrs.rs:14:7: 14:38 (#0), + }, + ], + span: $DIR/inner-attrs.rs:12:10: 15:2 (#0), + }, +] +PRINT-ATTR_ARGS INPUT (DISPLAY): second +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "second", + span: $DIR/inner-attrs.rs:11:25: 11:31 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): fn foo() +{ # ! [print_target_and_args(third)] # ! [print_target_and_args(fourth)] } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "fn", + span: $DIR/inner-attrs.rs:12:1: 12:3 (#0), + }, + Ident { + ident: "foo", + span: $DIR/inner-attrs.rs:12:4: 12:7 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/inner-attrs.rs:12:7: 12:9 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/inner-attrs.rs:13:5: 13:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/inner-attrs.rs:13:6: 13:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/inner-attrs.rs:13:8: 13:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "third", + span: $DIR/inner-attrs.rs:13:30: 13:35 (#0), + }, + ], + span: $DIR/inner-attrs.rs:13:29: 13:36 (#0), + }, + ], + span: $DIR/inner-attrs.rs:13:7: 13:37 (#0), + }, + Punct { + ch: '#', + spacing: Joint, + span: $DIR/inner-attrs.rs:14:5: 14:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/inner-attrs.rs:14:6: 14:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/inner-attrs.rs:14:8: 14:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "fourth", + span: $DIR/inner-attrs.rs:14:30: 14:36 (#0), + }, + ], + span: $DIR/inner-attrs.rs:14:29: 14:37 (#0), + }, + ], + span: $DIR/inner-attrs.rs:14:7: 14:38 (#0), + }, + ], + span: $DIR/inner-attrs.rs:12:10: 15:2 (#0), + }, +] +PRINT-ATTR_ARGS INPUT (DISPLAY): third +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "third", + span: $DIR/inner-attrs.rs:13:30: 13:35 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): fn foo() { # ! [print_target_and_args(fourth)] } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "fn", + span: $DIR/inner-attrs.rs:12:1: 12:3 (#0), + }, + Ident { + ident: "foo", + span: $DIR/inner-attrs.rs:12:4: 12:7 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/inner-attrs.rs:12:7: 12:9 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/inner-attrs.rs:14:5: 14:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/inner-attrs.rs:14:6: 14:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/inner-attrs.rs:14:8: 14:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "fourth", + span: $DIR/inner-attrs.rs:14:30: 14:36 (#0), + }, + ], + span: $DIR/inner-attrs.rs:14:29: 14:37 (#0), + }, + ], + span: $DIR/inner-attrs.rs:14:7: 14:38 (#0), + }, + ], + span: $DIR/inner-attrs.rs:12:10: 15:2 (#0), + }, +] +PRINT-ATTR_ARGS INPUT (DISPLAY): fourth +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "fourth", + span: $DIR/inner-attrs.rs:14:30: 14:36 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): fn foo() { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "fn", + span: $DIR/inner-attrs.rs:12:1: 12:3 (#0), + }, + Ident { + ident: "foo", + span: $DIR/inner-attrs.rs:12:4: 12:7 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/inner-attrs.rs:12:7: 12:9 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/inner-attrs.rs:12:10: 15:2 (#0), + }, +] diff --git a/src/test/ui/proc-macro/issue-75930-derive-cfg.rs b/src/test/ui/proc-macro/issue-75930-derive-cfg.rs index 649e731840337..53ecaf5bc2e0e 100644 --- a/src/test/ui/proc-macro/issue-75930-derive-cfg.rs +++ b/src/test/ui/proc-macro/issue-75930-derive-cfg.rs @@ -8,7 +8,6 @@ // We need '--error-format human' to stop compiletest from // trying to interpret proc-macro output as JSON messages // (a pretty-printed struct may cause a line to start with '{' ) -// FIXME: We currently lose spans here (see issue #43081) #[macro_use] extern crate test_macros; @@ -58,6 +57,10 @@ struct Foo<#[cfg(FALSE)] A, B> { u8 ); + fn plain_removed_fn() { + #![cfg_attr(not(FALSE), cfg(FALSE))] + } + 0 }], #[print_helper(d)] diff --git a/src/test/ui/proc-macro/issue-75930-derive-cfg.stderr b/src/test/ui/proc-macro/issue-75930-derive-cfg.stderr index 5227da7d76677..74923ba85b879 100644 --- a/src/test/ui/proc-macro/issue-75930-derive-cfg.stderr +++ b/src/test/ui/proc-macro/issue-75930-derive-cfg.stderr @@ -1,5 +1,5 @@ warning: derive helper attribute is used before it is introduced - --> $DIR/issue-75930-derive-cfg.rs:16:3 + --> $DIR/issue-75930-derive-cfg.rs:15:3 | LL | #[print_helper(a)] | ^^^^^^^^^^^^ diff --git a/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout b/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout index 15e63c20eb9bc..3c07d63dcb5cd 100644 --- a/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout +++ b/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout @@ -19,165 +19,166 @@ struct Foo < #[cfg(FALSE)] A, B > #[cfg(FALSE)] String, u8) } struct TupleStruct(#[cfg(FALSE)] String, #[cfg(not(FALSE))] i32, - #[cfg(FALSE)] bool, u8) ; 0 + #[cfg(FALSE)] bool, u8) ; fn plain_removed_fn() + { # ! [cfg_attr(not(FALSE), cfg(FALSE))] } 0 }], #[print_helper(d)] fourth : B } PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:1: 15:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:16:3: 16:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:3: 15:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "a", - span: $DIR/issue-75930-derive-cfg.rs:16:16: 16:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:16: 15:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:16:15: 16:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:15: 15:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:16:2: 16:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:2: 15:19 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:18:1: 18:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:18:24: 18:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:24: 17:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "dead_code", - span: $DIR/issue-75930-derive-cfg.rs:18:30: 18:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:30: 17:39 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:18:29: 18:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:29: 17:40 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:18:1: 18:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "derive", - span: $DIR/issue-75930-derive-cfg.rs:20:3: 20:9 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:3: 19:9 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "Print", - span: $DIR/issue-75930-derive-cfg.rs:20:10: 20:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:10: 19:15 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:20:9: 20:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:9: 19:16 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:20:2: 20:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:2: 19:17 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:21:3: 21:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:3: 20:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "b", - span: $DIR/issue-75930-derive-cfg.rs:21:16: 21:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:16: 20:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:21:15: 21:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:15: 20:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:21:2: 21:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:2: 20:19 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 22:7 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:7 (#0), }, Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:22:8: 22:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:8: 21:11 (#0), }, Punct { ch: '<', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:22:11: 22:12 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:11: 21:12 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:12: 22:13 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:12: 21:13 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:22:14: 22:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:14: 21:17 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:22:18: 22:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:18: 21:23 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:17: 22:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:17: 21:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:13: 22:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:13: 21:25 (#0), }, Ident { ident: "A", - span: $DIR/issue-75930-derive-cfg.rs:22:26: 22:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:26: 21:27 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:27: 22:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:27: 21:28 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:22:29: 22:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:29: 21:30 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:30: 22:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:30: 21:31 (#0), }, Group { delimiter: Brace, @@ -185,128 +186,128 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:23:5: 23:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:5: 22:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:23:7: 23:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:7: 22:10 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:23:11: 23:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:11: 22:16 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:23:10: 23:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:10: 22:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:23:6: 23:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:6: 22:18 (#0), }, Ident { ident: "first", - span: $DIR/issue-75930-derive-cfg.rs:23:19: 23:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:19: 22:24 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:23:24: 23:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:24: 22:25 (#0), }, Ident { ident: "String", - span: $DIR/issue-75930-derive-cfg.rs:23:26: 23:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:26: 22:32 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:23:32: 23:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:22:32: 22:33 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:24:5: 24:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:5: 23:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/issue-75930-derive-cfg.rs:24:7: 24:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:7: 23:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:24:16: 24:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:16: 23:21 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:24:21: 24:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:21: 23:22 (#0), }, Ident { ident: "deny", - span: $DIR/issue-75930-derive-cfg.rs:24:23: 24:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:23: 23:27 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "warnings", - span: $DIR/issue-75930-derive-cfg.rs:24:28: 24:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:28: 23:36 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:24:27: 24:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:27: 23:37 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:24:15: 24:38 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:15: 23:38 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:24:6: 24:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:6: 23:39 (#0), }, Ident { ident: "second", - span: $DIR/issue-75930-derive-cfg.rs:24:40: 24:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:40: 23:46 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:24:46: 24:47 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:46: 23:47 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:24:48: 24:52 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:48: 23:52 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:24:52: 24:53 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:52: 23:53 (#0), }, Ident { ident: "third", - span: $DIR/issue-75930-derive-cfg.rs:25:5: 25:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:5: 24:10 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:25:10: 25:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:10: 24:11 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:25:13: 25:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:13: 24:15 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:25:15: 25:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:15: 24:16 (#0), }, Group { delimiter: Brace, @@ -314,145 +315,145 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:26:9: 26:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:9: 25:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:26:11: 26:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:11: 25:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:26:15: 26:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:15: 25:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:26:14: 26:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:14: 25:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:26:10: 26:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:10: 25:22 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:26:23: 26:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:23: 25:29 (#0), }, Ident { ident: "Bar", - span: $DIR/issue-75930-derive-cfg.rs:26:30: 26:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:30: 25:33 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:26:33: 26:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:25:33: 25:34 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:27:9: 27:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:9: 26:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:27:11: 27:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:11: 26:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:27:15: 27:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:15: 26:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:27:19: 27:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:19: 26:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:27:18: 27:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:18: 26:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:27:14: 27:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:14: 26:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:27:10: 27:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:10: 26:27 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:27:28: 27:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:28: 26:34 (#0), }, Ident { ident: "Inner", - span: $DIR/issue-75930-derive-cfg.rs:27:35: 27:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:35: 26:40 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:27:40: 27:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:40: 26:41 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:28:9: 28:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:9: 27:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:28:11: 28:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:11: 27:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:28:15: 28:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:15: 27:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:28:14: 28:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:14: 27:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:28:10: 28:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:10: 27:22 (#0), }, Ident { ident: "let", - span: $DIR/issue-75930-derive-cfg.rs:28:23: 28:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:23: 27:26 (#0), }, Ident { ident: "a", - span: $DIR/issue-75930-derive-cfg.rs:28:27: 28:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:27: 27:28 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:28:29: 28:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:29: 27:30 (#0), }, Literal { kind: Integer, symbol: "25", suffix: None, - span: $DIR/issue-75930-derive-cfg.rs:28:31: 28:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:31: 27:33 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:28:33: 28:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:27:33: 27:34 (#0), }, Ident { ident: "match", - span: $DIR/issue-75930-derive-cfg.rs:29:9: 29:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:28:9: 28:14 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:29:15: 29:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:28:15: 28:19 (#0), }, Group { delimiter: Brace, @@ -460,194 +461,194 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:30:13: 30:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:13: 29:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:30:15: 30:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:15: 29:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:30:19: 30:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:19: 29:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:30:18: 30:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:18: 29:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:30:14: 30:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:14: 29:26 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:30:27: 30:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:27: 29:31 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:30:32: 30:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:32: 29:34 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:30:32: 30:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:32: 29:34 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:30:35: 30:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:35: 29:37 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:30:37: 30:38 (#0), + span: $DIR/issue-75930-derive-cfg.rs:29:37: 29:38 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:31:13: 31:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:13: 30:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/issue-75930-derive-cfg.rs:31:15: 31:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:15: 30:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:31:24: 31:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:24: 30:27 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:31:28: 31:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:28: 30:33 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:31:27: 31:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:27: 30:34 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:31:34: 31:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:34: 30:35 (#0), }, Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:31:36: 31:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:36: 30:41 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "warnings", - span: $DIR/issue-75930-derive-cfg.rs:31:42: 31:50 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:42: 30:50 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:31:41: 31:51 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:41: 30:51 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:31:23: 31:52 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:23: 30:52 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:31:14: 31:53 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:14: 30:53 (#0), }, Ident { ident: "false", - span: $DIR/issue-75930-derive-cfg.rs:31:54: 31:59 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:54: 30:59 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:31:60: 31:62 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:60: 30:62 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:31:60: 31:62 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:60: 30:62 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:31:63: 31:65 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:63: 30:65 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:31:65: 31:66 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:65: 30:66 (#0), }, Ident { ident: "_", - span: $DIR/issue-75930-derive-cfg.rs:32:13: 32:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:13: 31:14 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:32:15: 32:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:15: 31:17 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:32:15: 32:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:15: 31:17 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:32:18: 32:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:18: 31:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:29:20: 33:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:28:20: 32:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:33:10: 33:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:32:10: 32:11 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:35:9: 35:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:34:9: 34:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:35:11: 35:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:34:11: 34:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "should_be_removed", - span: $DIR/issue-75930-derive-cfg.rs:35:24: 35:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:34:24: 34:41 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:35:23: 35:42 (#0), + span: $DIR/issue-75930-derive-cfg.rs:34:23: 34:42 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:35:10: 35:43 (#0), + span: $DIR/issue-75930-derive-cfg.rs:34:10: 34:43 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:36:9: 36:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:35:9: 35:11 (#0), }, Ident { ident: "removed_fn", - span: $DIR/issue-75930-derive-cfg.rs:36:12: 36:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:35:12: 35:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:36:22: 36:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:35:22: 35:24 (#0), }, Group { delimiter: Brace, @@ -655,108 +656,108 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:37:13: 37:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:36:13: 36:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:37:14: 37:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:36:14: 36:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:37:16: 37:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:36:16: 36:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:37:20: 37:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:36:20: 36:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:37:19: 37:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:36:19: 36:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:37:15: 37:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:36:15: 36:27 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:36:25: 38:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:35:25: 37:10 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:40:9: 40:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:9: 39:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:40:11: 40:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:11: 39:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "c", - span: $DIR/issue-75930-derive-cfg.rs:40:24: 40:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:24: 39:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:40:23: 40:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:23: 39:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:40:10: 40:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:10: 39:27 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:40:28: 40:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:28: 39:29 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:40:30: 40:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:30: 39:33 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:40:34: 40:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:34: 39:37 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:40:38: 40:43 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:38: 39:43 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:40:37: 40:44 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:37: 39:44 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:40:33: 40:45 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:33: 39:45 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:40:29: 40:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:29: 39:46 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:40:47: 40:49 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:47: 39:49 (#0), }, Ident { ident: "kept_fn", - span: $DIR/issue-75930-derive-cfg.rs:40:50: 40:57 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:50: 39:57 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:40:57: 40:59 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:57: 39:59 (#0), }, Group { delimiter: Brace, @@ -764,82 +765,82 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:41:13: 41:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:13: 40:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:41:14: 41:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:14: 40:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:41:16: 41:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:16: 40:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:41:20: 41:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:20: 40:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:41:24: 41:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:24: 40:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:41:23: 41:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:23: 40:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:41:19: 41:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:19: 40:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:41:15: 41:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:15: 40:32 (#0), }, Ident { ident: "let", - span: $DIR/issue-75930-derive-cfg.rs:42:13: 42:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:13: 41:16 (#0), }, Ident { ident: "my_val", - span: $DIR/issue-75930-derive-cfg.rs:42:17: 42:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:17: 41:23 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:42:24: 42:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:24: 41:25 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:42:26: 42:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:26: 41:30 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:42:30: 42:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:30: 41:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:40:60: 43:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:60: 42:10 (#0), }, Ident { ident: "enum", - span: $DIR/issue-75930-derive-cfg.rs:45:9: 45:13 (#0), + span: $DIR/issue-75930-derive-cfg.rs:44:9: 44:13 (#0), }, Ident { ident: "TupleEnum", - span: $DIR/issue-75930-derive-cfg.rs:45:14: 45:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:44:14: 44:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:46:13: 46:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:45:13: 45:16 (#0), }, Group { delimiter: Parenthesis, @@ -847,166 +848,166 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:47:17: 47:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:17: 46:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:47:19: 47:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:19: 46:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:47:23: 47:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:23: 46:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:47:22: 47:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:22: 46:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:47:18: 47:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:18: 46:30 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:47:31: 47:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:31: 46:33 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:47:33: 47:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:33: 46:34 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:48:17: 48:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:17: 47:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:48:19: 48:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:19: 47:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:48:23: 48:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:23: 47:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:48:22: 48:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:22: 47:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:48:18: 48:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:18: 47:30 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:48:31: 48:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:31: 47:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:48:35: 48:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:47:35: 47:36 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:49:17: 49:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:17: 48:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:49:19: 49:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:19: 48:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:49:23: 49:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:23: 48:26 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:49:27: 49:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:27: 48:32 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:26: 49:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:26: 48:33 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:22: 49:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:22: 48:34 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:18: 49:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:18: 48:35 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:49:36: 49:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:36: 48:39 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:49:39: 49:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:39: 48:40 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:50:17: 50:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:17: 49:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:50:19: 50:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:19: 49:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:50:23: 50:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:23: 49:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:50:22: 50:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:22: 49:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:50:18: 50:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:18: 49:30 (#0), }, Ident { ident: "String", - span: $DIR/issue-75930-derive-cfg.rs:50:31: 50:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:31: 49:37 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:50:37: 50:38 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:37: 49:38 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:50:39: 50:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:39: 49:41 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:46:16: 51:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:45:16: 50:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:45:24: 52:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:44:24: 51:10 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:54:9: 54:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:9: 53:15 (#0), }, Ident { ident: "TupleStruct", - span: $DIR/issue-75930-derive-cfg.rs:54:16: 54:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:16: 53:27 (#0), }, Group { delimiter: Parenthesis, @@ -1014,184 +1015,262 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:55:13: 55:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:13: 54:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:55:15: 55:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:15: 54:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:55:19: 55:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:19: 54:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:55:18: 55:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:18: 54:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:55:14: 55:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:14: 54:26 (#0), }, Ident { ident: "String", - span: $DIR/issue-75930-derive-cfg.rs:55:27: 55:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:27: 54:33 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:55:33: 55:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:33: 54:34 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:56:13: 56:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:13: 55:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:56:15: 56:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:15: 55:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:56:19: 56:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:19: 55:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:56:23: 56:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:23: 55:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:56:22: 56:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:22: 55:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:56:18: 56:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:18: 55:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:56:14: 56:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:14: 55:31 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:56:32: 56:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:32: 55:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:56:35: 56:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:35: 55:36 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:13: 57:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:13: 56:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:57:15: 57:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:15: 56:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:57:19: 57:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:19: 56:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:57:18: 57:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:18: 56:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:57:14: 57:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:14: 56:26 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:57:27: 57:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:27: 56:31 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:31: 57:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:31: 56:32 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:58:13: 58:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:13: 57:15 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:54:27: 59:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:27: 58:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:59:10: 59:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:10: 58:11 (#0), + }, + Ident { + ident: "fn", + span: $DIR/issue-75930-derive-cfg.rs:60:9: 60:11 (#0), + }, + Ident { + ident: "plain_removed_fn", + span: $DIR/issue-75930-derive-cfg.rs:60:12: 60:28 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/issue-75930-derive-cfg.rs:60:28: 60:30 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/issue-75930-derive-cfg.rs:61:13: 61:14 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/issue-75930-derive-cfg.rs:61:14: 61:15 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "cfg_attr", + span: $DIR/issue-75930-derive-cfg.rs:61:16: 61:24 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "not", + span: $DIR/issue-75930-derive-cfg.rs:61:25: 61:28 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "FALSE", + span: $DIR/issue-75930-derive-cfg.rs:61:29: 61:34 (#0), + }, + ], + span: $DIR/issue-75930-derive-cfg.rs:61:28: 61:35 (#0), + }, + Punct { + ch: ',', + spacing: Alone, + span: $DIR/issue-75930-derive-cfg.rs:61:35: 61:36 (#0), + }, + Ident { + ident: "cfg", + span: $DIR/issue-75930-derive-cfg.rs:61:37: 61:40 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "FALSE", + span: $DIR/issue-75930-derive-cfg.rs:61:41: 61:46 (#0), + }, + ], + span: $DIR/issue-75930-derive-cfg.rs:61:40: 61:47 (#0), + }, + ], + span: $DIR/issue-75930-derive-cfg.rs:61:24: 61:48 (#0), + }, + ], + span: $DIR/issue-75930-derive-cfg.rs:61:15: 61:49 (#0), + }, + ], + span: $DIR/issue-75930-derive-cfg.rs:60:31: 62:10 (#0), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: $DIR/issue-75930-derive-cfg.rs:61:9: 61:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:9: 64:10 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:25:17: 62:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:17: 65:6 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:25:12: 62:7 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:12: 65:7 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:62:7: 62:8 (#0), + span: $DIR/issue-75930-derive-cfg.rs:65:7: 65:8 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:63:5: 63:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:5: 66:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:63:7: 63:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:7: 66:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "d", - span: $DIR/issue-75930-derive-cfg.rs:63:20: 63:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:20: 66:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:63:19: 63:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:19: 66:22 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:63:6: 63:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:6: 66:23 (#0), }, Ident { ident: "fourth", - span: $DIR/issue-75930-derive-cfg.rs:64:5: 64:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:5: 67:11 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:11: 64:12 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:11: 67:12 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:64:13: 64:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:13: 67:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:32: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:32: 68:2 (#0), }, ] PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_helper(b)] struct Foo < B > @@ -1200,152 +1279,152 @@ PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_hel [u8 ; { #[cfg(not(FALSE))] struct Inner ; match true - { #[allow(warnings)] false => { } _ => { } } ; #[print_helper(c)] + { #[allow(warnings)] false => { }, _ => { } } ; #[print_helper(c)] #[cfg(not(FALSE))] fn kept_fn() { # ! [cfg(not(FALSE))] let my_val = true ; } enum TupleEnum - { Foo(#[cfg(not(FALSE))] i32, u8), } struct + { Foo(#[cfg(not(FALSE))] i32, u8) } struct TupleStruct(#[cfg(not(FALSE))] i32, u8) ; 0 - }], #[print_helper(d)] fourth : B, + }], #[print_helper(d)] fourth : B } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:1: 15:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:3: 15:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "a", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:16: 15:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:15: 15:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:15:2: 15:19 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:24: 17:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "dead_code", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:30: 17:39 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:29: 17:40 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:3: 20:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "b", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:16: 20:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:15: 20:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:2: 20:19 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:7 (#0), }, Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:8: 21:11 (#0), }, Punct { ch: '<', - spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + spacing: Joint, + span: $DIR/issue-75930-derive-cfg.rs:21:11: 21:12 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:29: 21:30 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:30: 21:31 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "second", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:40: 23:46 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:46: 23:47 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:48: 23:52 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:23:52: 23:53 (#0), }, Ident { ident: "third", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:5: 24:10 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:10: 24:11 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:13: 24:15 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:15: 24:16 (#0), }, Group { delimiter: Brace, @@ -1353,58 +1432,58 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:9: 26:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:11: 26:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:15: 26:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:19: 26:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:18: 26:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:14: 26:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:10: 26:27 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:28: 26:34 (#0), }, Ident { ident: "Inner", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:35: 26:40 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:26:40: 26:41 (#0), }, Ident { ident: "match", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:28:9: 28:14 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:28:15: 28:19 (#0), }, Group { delimiter: Brace, @@ -1412,146 +1491,151 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:13: 30:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:36: 30:41 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "warnings", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:42: 30:50 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:41: 30:51 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:13: 30:14 (#0), }, Ident { ident: "false", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:54: 30:59 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:60: 30:62 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:60: 30:62 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:30:63: 30:65 (#0), + }, + Punct { + ch: ',', + spacing: Alone, + span: $DIR/issue-75930-derive-cfg.rs:30:65: 30:66 (#0), }, Ident { ident: "_", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:13: 31:14 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:15: 31:17 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:15: 31:17 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:31:18: 31:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:28:20: 32:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:32:10: 32:11 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:9: 39:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:11: 39:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "c", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:24: 39:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:23: 39:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:10: 39:27 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:28: 39:29 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:30: 39:33 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:34: 39:37 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:38: 39:43 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:37: 39:44 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:33: 39:45 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:29: 39:46 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:47: 39:49 (#0), }, Ident { ident: "kept_fn", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:50: 39:57 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:57: 39:59 (#0), }, Group { delimiter: Brace, @@ -1559,82 +1643,82 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:13: 40:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:14: 40:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:16: 40:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:20: 40:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:24: 40:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:23: 40:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:19: 40:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:40:15: 40:32 (#0), }, Ident { ident: "let", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:13: 41:16 (#0), }, Ident { ident: "my_val", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:17: 41:23 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:24: 41:25 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:26: 41:30 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:41:30: 41:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:39:60: 42:10 (#0), }, Ident { ident: "enum", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:44:9: 44:13 (#0), }, Ident { ident: "TupleEnum", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:44:14: 44:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:45:13: 45:16 (#0), }, Group { delimiter: Parenthesis, @@ -1642,69 +1726,64 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:17: 48:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:19: 48:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:23: 48:26 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:27: 48:32 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:26: 48:33 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:22: 48:34 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:18: 48:35 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:36: 48:39 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:48:39: 48:40 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:49:39: 49:41 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), - }, - Punct { - ch: ',', - spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:45:16: 50:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:44:24: 51:10 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:9: 53:15 (#0), }, Ident { ident: "TupleStruct", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:16: 53:27 (#0), }, Group { delimiter: Parenthesis, @@ -1712,120 +1791,115 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:13: 55:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:15: 55:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:19: 55:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:23: 55:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:22: 55:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:18: 55:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:14: 55:31 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:32: 55:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:35: 55:36 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:13: 57:15 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:27: 58:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:10: 58:11 (#0), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:9: 64:10 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:17: 65:6 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:24:12: 65:7 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:65:7: 65:8 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:5: 66:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:7: 66:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "d", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:20: 66:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:19: 66:22 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:66:6: 66:23 (#0), }, Ident { ident: "fourth", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:5: 67:11 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:11: 67:12 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), - }, - Punct { - ch: ',', - spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:13: 67:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:22:1: 65:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:21:32: 68:2 (#0), }, ] diff --git a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout index 40da5aa93bfa8..0faaa4261f6d1 100644 --- a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout +++ b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout @@ -34,48 +34,48 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ stream: TokenStream [ Ident { ident: "mod", - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 27:8 (#0), }, Ident { ident: "bar", - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:27:9: 27:12 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Punct { ch: '#', - spacing: Joint, - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + spacing: Alone, + span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "doc", - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0), }, Literal { kind: StrRaw(0), symbol: " Foo", suffix: None, - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0), }, ], - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0), }, ], - span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0), + span: $DIR/issue-78675-captured-inner-attrs.rs:27:13: 29:6 (#0), }, ], span: $DIR/issue-78675-captured-inner-attrs.rs:22:13: 22:18 (#4), diff --git a/src/test/ui/proc-macro/macro-rules-derive-cfg.rs b/src/test/ui/proc-macro/macro-rules-derive-cfg.rs new file mode 100644 index 0000000000000..ce0be174eb21f --- /dev/null +++ b/src/test/ui/proc-macro/macro-rules-derive-cfg.rs @@ -0,0 +1,28 @@ +// check-pass +// compile-flags: -Z span-debug --error-format human +// aux-build:test-macros.rs + +#![feature(rustc_attrs)] +#![feature(stmt_expr_attributes)] + +#[macro_use] +extern crate test_macros; + +macro_rules! produce_it { + ($expr:expr) => { + #[derive(Print)] + struct Foo { + val: [bool; { + let a = #[cfg_attr(not(FALSE), rustc_dummy(first))] $expr; + 0 + }] + } + } +} + +produce_it!(#[cfg_attr(not(FALSE), rustc_dummy(second))] { + #![cfg_attr(not(FALSE), allow(unused))] + 30 +}); + +fn main() {} diff --git a/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout b/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout new file mode 100644 index 0000000000000..bec5d2b7aa59c --- /dev/null +++ b/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout @@ -0,0 +1,171 @@ +PRINT-DERIVE INPUT (DISPLAY): struct Foo +{ + val : + [bool ; + { + let a = #[rustc_dummy(first)] #[rustc_dummy(second)] + { # ! [allow(unused)] 30 } ; 0 + }] +} +PRINT-DERIVE INPUT (DEBUG): TokenStream [ + Ident { + ident: "struct", + span: $DIR/macro-rules-derive-cfg.rs:14:9: 14:15 (#6), + }, + Ident { + ident: "Foo", + span: $DIR/macro-rules-derive-cfg.rs:14:16: 14:19 (#6), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "val", + span: $DIR/macro-rules-derive-cfg.rs:15:13: 15:16 (#6), + }, + Punct { + ch: ':', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:15:16: 15:17 (#6), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "bool", + span: $DIR/macro-rules-derive-cfg.rs:15:19: 15:23 (#6), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:15:23: 15:24 (#6), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "let", + span: $DIR/macro-rules-derive-cfg.rs:16:17: 16:20 (#6), + }, + Ident { + ident: "a", + span: $DIR/macro-rules-derive-cfg.rs:16:21: 16:22 (#6), + }, + Punct { + ch: '=', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:16:23: 16:24 (#6), + }, + Punct { + ch: '#', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:16:25: 16:26 (#6), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/macro-rules-derive-cfg.rs:16:48: 16:59 (#6), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "first", + span: $DIR/macro-rules-derive-cfg.rs:16:60: 16:65 (#6), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:16:59: 16:66 (#6), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:16:25: 16:26 (#6), + }, + Punct { + ch: '#', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:23:13: 23:14 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/macro-rules-derive-cfg.rs:23:36: 23:47 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "second", + span: $DIR/macro-rules-derive-cfg.rs:23:48: 23:54 (#0), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:23:47: 23:55 (#0), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:23:13: 23:14 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:24:5: 24:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:24:6: 24:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/macro-rules-derive-cfg.rs:24:29: 24:34 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "unused", + span: $DIR/macro-rules-derive-cfg.rs:24:35: 24:41 (#0), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:24:34: 24:42 (#0), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:24:5: 24:6 (#0), + }, + Literal { + kind: Integer, + symbol: "30", + suffix: None, + span: $DIR/macro-rules-derive-cfg.rs:25:5: 25:7 (#0), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:23:58: 26:2 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/macro-rules-derive-cfg.rs:16:74: 16:75 (#6), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: $DIR/macro-rules-derive-cfg.rs:17:17: 17:18 (#6), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:15:25: 18:14 (#6), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:15:18: 18:15 (#6), + }, + ], + span: $DIR/macro-rules-derive-cfg.rs:14:20: 19:10 (#6), + }, +] diff --git a/src/test/ui/proc-macro/nested-derive-cfg.rs b/src/test/ui/proc-macro/nested-derive-cfg.rs new file mode 100644 index 0000000000000..d5a0c366aaf99 --- /dev/null +++ b/src/test/ui/proc-macro/nested-derive-cfg.rs @@ -0,0 +1,24 @@ +// compile-flags: -Z span-debug --error-format human +// aux-build:test-macros.rs +// check-pass + +#![no_std] + +#[macro_use] +extern crate test_macros; + +extern crate std; + +#[derive(Print)] +struct Foo { + #[cfg(FALSE)] removed: bool, + my_array: [bool; { + struct Inner { + #[cfg(FALSE)] removed_inner_field: u8, + non_removed_inner_field: usize + } + 0 + }] +} + +fn main() {} diff --git a/src/test/ui/proc-macro/nested-derive-cfg.stdout b/src/test/ui/proc-macro/nested-derive-cfg.stdout new file mode 100644 index 0000000000000..bf1717e3766f0 --- /dev/null +++ b/src/test/ui/proc-macro/nested-derive-cfg.stdout @@ -0,0 +1,81 @@ +PRINT-DERIVE INPUT (DISPLAY): struct Foo +{ my_array : [bool ; { struct Inner { non_removed_inner_field : usize } 0 }] } +PRINT-DERIVE INPUT (DEBUG): TokenStream [ + Ident { + ident: "struct", + span: $DIR/nested-derive-cfg.rs:13:1: 13:7 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/nested-derive-cfg.rs:13:8: 13:11 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "my_array", + span: $DIR/nested-derive-cfg.rs:15:5: 15:13 (#0), + }, + Punct { + ch: ':', + spacing: Alone, + span: $DIR/nested-derive-cfg.rs:15:13: 15:14 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "bool", + span: $DIR/nested-derive-cfg.rs:15:16: 15:20 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/nested-derive-cfg.rs:15:20: 15:21 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "struct", + span: $DIR/nested-derive-cfg.rs:16:9: 16:15 (#0), + }, + Ident { + ident: "Inner", + span: $DIR/nested-derive-cfg.rs:16:16: 16:21 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "non_removed_inner_field", + span: $DIR/nested-derive-cfg.rs:18:13: 18:36 (#0), + }, + Punct { + ch: ':', + spacing: Alone, + span: $DIR/nested-derive-cfg.rs:18:36: 18:37 (#0), + }, + Ident { + ident: "usize", + span: $DIR/nested-derive-cfg.rs:18:38: 18:43 (#0), + }, + ], + span: $DIR/nested-derive-cfg.rs:16:22: 19:10 (#0), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: $DIR/nested-derive-cfg.rs:20:9: 20:10 (#0), + }, + ], + span: $DIR/nested-derive-cfg.rs:15:22: 21:6 (#0), + }, + ], + span: $DIR/nested-derive-cfg.rs:15:15: 21:7 (#0), + }, + ], + span: $DIR/nested-derive-cfg.rs:13:12: 22:2 (#0), + }, +] diff --git a/src/test/ui/proc-macro/weird-braces.rs b/src/test/ui/proc-macro/weird-braces.rs new file mode 100644 index 0000000000000..b9a7e08f99379 --- /dev/null +++ b/src/test/ui/proc-macro/weird-braces.rs @@ -0,0 +1,23 @@ +// aux-build:test-macros.rs +// check-pass +// compile-flags: -Z span-debug + +#![feature(custom_inner_attributes)] + +#![no_std] // Don't load unnecessary hygiene information from std +extern crate std; + +extern crate test_macros; +use test_macros::{print_target_and_args}; + +struct Foo; +trait Bar {} + +#[print_target_and_args(first_outer)] +#[print_target_and_args(second_outer)] +impl Bar<{1 > 0}> for Foo<{true}> { + #![print_target_and_args(first_inner)] + #![print_target_and_args(second_inner)] +} + +fn main() {} diff --git a/src/test/ui/proc-macro/weird-braces.stdout b/src/test/ui/proc-macro/weird-braces.stdout new file mode 100644 index 0000000000000..990829456e88a --- /dev/null +++ b/src/test/ui/proc-macro/weird-braces.stdout @@ -0,0 +1,524 @@ +PRINT-ATTR_ARGS INPUT (DISPLAY): first_outer +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "first_outer", + span: $DIR/weird-braces.rs:16:25: 16:36 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second_outer)] impl Bar < { 1 > 0 } > for Foo < +{ true } > +{ + # ! [print_target_and_args(first_inner)] # ! + [print_target_and_args(second_inner)] +} +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/weird-braces.rs:17:1: 17:2 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/weird-braces.rs:17:3: 17:24 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "second_outer", + span: $DIR/weird-braces.rs:17:25: 17:37 (#0), + }, + ], + span: $DIR/weird-braces.rs:17:24: 17:38 (#0), + }, + ], + span: $DIR/weird-braces.rs:17:2: 17:39 (#0), + }, + Ident { + ident: "impl", + span: $DIR/weird-braces.rs:18:1: 18:5 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/weird-braces.rs:18:6: 18:9 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:9: 18:10 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/weird-braces.rs:18:11: 18:12 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:13: 18:14 (#0), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: $DIR/weird-braces.rs:18:15: 18:16 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:10: 18:17 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:17: 18:18 (#0), + }, + Ident { + ident: "for", + span: $DIR/weird-braces.rs:18:19: 18:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/weird-braces.rs:18:23: 18:26 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:26: 18:27 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "true", + span: $DIR/weird-braces.rs:18:28: 18:32 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:27: 18:33 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:33: 18:34 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/weird-braces.rs:19:5: 19:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/weird-braces.rs:19:6: 19:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/weird-braces.rs:19:8: 19:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "first_inner", + span: $DIR/weird-braces.rs:19:30: 19:41 (#0), + }, + ], + span: $DIR/weird-braces.rs:19:29: 19:42 (#0), + }, + ], + span: $DIR/weird-braces.rs:19:7: 19:43 (#0), + }, + Punct { + ch: '#', + spacing: Joint, + span: $DIR/weird-braces.rs:20:5: 20:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/weird-braces.rs:20:6: 20:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/weird-braces.rs:20:8: 20:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "second_inner", + span: $DIR/weird-braces.rs:20:30: 20:42 (#0), + }, + ], + span: $DIR/weird-braces.rs:20:29: 20:43 (#0), + }, + ], + span: $DIR/weird-braces.rs:20:7: 20:44 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:35: 21:2 (#0), + }, +] +PRINT-ATTR_ARGS INPUT (DISPLAY): second_outer +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "second_outer", + span: $DIR/weird-braces.rs:17:25: 17:37 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > +{ + # ! [print_target_and_args(first_inner)] # ! + [print_target_and_args(second_inner)] +} +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "impl", + span: $DIR/weird-braces.rs:18:1: 18:5 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/weird-braces.rs:18:6: 18:9 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:9: 18:10 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/weird-braces.rs:18:11: 18:12 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:13: 18:14 (#0), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: $DIR/weird-braces.rs:18:15: 18:16 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:10: 18:17 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:17: 18:18 (#0), + }, + Ident { + ident: "for", + span: $DIR/weird-braces.rs:18:19: 18:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/weird-braces.rs:18:23: 18:26 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:26: 18:27 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "true", + span: $DIR/weird-braces.rs:18:28: 18:32 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:27: 18:33 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:33: 18:34 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/weird-braces.rs:19:5: 19:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/weird-braces.rs:19:6: 19:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/weird-braces.rs:19:8: 19:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "first_inner", + span: $DIR/weird-braces.rs:19:30: 19:41 (#0), + }, + ], + span: $DIR/weird-braces.rs:19:29: 19:42 (#0), + }, + ], + span: $DIR/weird-braces.rs:19:7: 19:43 (#0), + }, + Punct { + ch: '#', + spacing: Joint, + span: $DIR/weird-braces.rs:20:5: 20:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/weird-braces.rs:20:6: 20:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/weird-braces.rs:20:8: 20:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "second_inner", + span: $DIR/weird-braces.rs:20:30: 20:42 (#0), + }, + ], + span: $DIR/weird-braces.rs:20:29: 20:43 (#0), + }, + ], + span: $DIR/weird-braces.rs:20:7: 20:44 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:35: 21:2 (#0), + }, +] +PRINT-ATTR_ARGS INPUT (DISPLAY): first_inner +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "first_inner", + span: $DIR/weird-braces.rs:19:30: 19:41 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > +{ # ! [print_target_and_args(second_inner)] } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "impl", + span: $DIR/weird-braces.rs:18:1: 18:5 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/weird-braces.rs:18:6: 18:9 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:9: 18:10 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/weird-braces.rs:18:11: 18:12 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:13: 18:14 (#0), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: $DIR/weird-braces.rs:18:15: 18:16 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:10: 18:17 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:17: 18:18 (#0), + }, + Ident { + ident: "for", + span: $DIR/weird-braces.rs:18:19: 18:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/weird-braces.rs:18:23: 18:26 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:26: 18:27 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "true", + span: $DIR/weird-braces.rs:18:28: 18:32 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:27: 18:33 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:33: 18:34 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Joint, + span: $DIR/weird-braces.rs:20:5: 20:6 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/weird-braces.rs:20:6: 20:7 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "print_target_and_args", + span: $DIR/weird-braces.rs:20:8: 20:29 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "second_inner", + span: $DIR/weird-braces.rs:20:30: 20:42 (#0), + }, + ], + span: $DIR/weird-braces.rs:20:29: 20:43 (#0), + }, + ], + span: $DIR/weird-braces.rs:20:7: 20:44 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:35: 21:2 (#0), + }, +] +PRINT-ATTR_ARGS INPUT (DISPLAY): second_inner +PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ + Ident { + ident: "second_inner", + span: $DIR/weird-braces.rs:20:30: 20:42 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "impl", + span: $DIR/weird-braces.rs:18:1: 18:5 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/weird-braces.rs:18:6: 18:9 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:9: 18:10 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/weird-braces.rs:18:11: 18:12 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:13: 18:14 (#0), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: $DIR/weird-braces.rs:18:15: 18:16 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:10: 18:17 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:17: 18:18 (#0), + }, + Ident { + ident: "for", + span: $DIR/weird-braces.rs:18:19: 18:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/weird-braces.rs:18:23: 18:26 (#0), + }, + Punct { + ch: '<', + spacing: Alone, + span: $DIR/weird-braces.rs:18:26: 18:27 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "true", + span: $DIR/weird-braces.rs:18:28: 18:32 (#0), + }, + ], + span: $DIR/weird-braces.rs:18:27: 18:33 (#0), + }, + Punct { + ch: '>', + spacing: Alone, + span: $DIR/weird-braces.rs:18:33: 18:34 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/weird-braces.rs:18:35: 21:2 (#0), + }, +]