From a20ae8901c1160b4044dda803cb061630e2f8331 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Sat, 26 Sep 2020 12:41:53 -0400 Subject: [PATCH 1/5] Move pprust code to a 'state' submodule --- compiler/rustc_ast_pretty/src/pprust/mod.rs | 99 +++++ .../src/{pprust.rs => pprust/state.rs} | 379 +++++++++--------- 2 files changed, 299 insertions(+), 179 deletions(-) create mode 100644 compiler/rustc_ast_pretty/src/pprust/mod.rs rename compiler/rustc_ast_pretty/src/{pprust.rs => pprust/state.rs} (92%) diff --git a/compiler/rustc_ast_pretty/src/pprust/mod.rs b/compiler/rustc_ast_pretty/src/pprust/mod.rs new file mode 100644 index 0000000000000..b88699f6ee176 --- /dev/null +++ b/compiler/rustc_ast_pretty/src/pprust/mod.rs @@ -0,0 +1,99 @@ +#[cfg(test)] +mod tests; + +pub mod state; +pub use state::{print_crate, AnnNode, Comments, PpAnn, PrintState, State}; + +use rustc_ast as ast; +use rustc_ast::token::{Nonterminal, Token, TokenKind}; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; + +pub fn nonterminal_to_string(nt: &Nonterminal) -> String { + State::new().nonterminal_to_string(nt) +} + +/// Print the token kind precisely, without converting `$crate` into its respective crate name. +pub fn token_kind_to_string(tok: &TokenKind) -> String { + State::new().token_kind_to_string(tok) +} + +/// Print the token precisely, without converting `$crate` into its respective crate name. +pub fn token_to_string(token: &Token) -> String { + State::new().token_to_string(token) +} + +pub fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String { + State::new().token_to_string_ext(token, convert_dollar_crate) +} + +pub fn ty_to_string(ty: &ast::Ty) -> String { + State::new().ty_to_string(ty) +} + +pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String { + State::new().bounds_to_string(bounds) +} + +pub fn pat_to_string(pat: &ast::Pat) -> String { + State::new().pat_to_string(pat) +} + +pub fn expr_to_string(e: &ast::Expr) -> String { + State::new().expr_to_string(e) +} + +pub fn tt_to_string(tt: &TokenTree) -> String { + State::new().tt_to_string(tt) +} + +pub fn tts_to_string(tokens: &TokenStream) -> String { + State::new().tts_to_string(tokens) +} + +pub fn stmt_to_string(stmt: &ast::Stmt) -> String { + State::new().stmt_to_string(stmt) +} + +pub fn item_to_string(i: &ast::Item) -> String { + State::new().item_to_string(i) +} + +pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String { + State::new().generic_params_to_string(generic_params) +} + +pub fn path_to_string(p: &ast::Path) -> String { + State::new().path_to_string(p) +} + +pub fn path_segment_to_string(p: &ast::PathSegment) -> String { + State::new().path_segment_to_string(p) +} + +pub fn vis_to_string(v: &ast::Visibility) -> String { + State::new().vis_to_string(v) +} + +pub fn block_to_string(blk: &ast::Block) -> String { + State::new().block_to_string(blk) +} + +pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String { + State::new().meta_list_item_to_string(li) +} + +pub fn attr_item_to_string(ai: &ast::AttrItem) -> String { + State::new().attr_item_to_string(ai) +} + +pub fn attribute_to_string(attr: &ast::Attribute) -> String { + State::new().attribute_to_string(attr) +} + +pub fn param_to_string(arg: &ast::Param) -> String { + State::new().param_to_string(arg) +} + +pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String { + State::new().to_string(f) +} diff --git a/compiler/rustc_ast_pretty/src/pprust.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs similarity index 92% rename from compiler/rustc_ast_pretty/src/pprust.rs rename to compiler/rustc_ast_pretty/src/pprust/state.rs index d16b541c6999e..14feb4989035c 100644 --- a/compiler/rustc_ast_pretty/src/pprust.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -142,13 +142,6 @@ pub fn print_crate<'a>( s.s.eof() } -pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String { - let mut printer = - State { s: pp::mk_printer(), comments: None, ann: &NoAnn, is_expanded: false }; - f(&mut printer); - printer.s.eof() -} - // This makes printed token streams look slightly nicer, // and also addresses some specific regressions described in #63896 and #73345. fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool { @@ -231,173 +224,8 @@ pub fn literal_to_string(lit: token::Lit) -> String { out } -/// Print the token kind precisely, without converting `$crate` into its respective crate name. -pub fn token_kind_to_string(tok: &TokenKind) -> String { - token_kind_to_string_ext(tok, None) -} - -fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option) -> String { - match *tok { - token::Eq => "=".to_string(), - token::Lt => "<".to_string(), - token::Le => "<=".to_string(), - token::EqEq => "==".to_string(), - token::Ne => "!=".to_string(), - token::Ge => ">=".to_string(), - token::Gt => ">".to_string(), - token::Not => "!".to_string(), - token::Tilde => "~".to_string(), - token::OrOr => "||".to_string(), - token::AndAnd => "&&".to_string(), - token::BinOp(op) => binop_to_string(op).to_string(), - token::BinOpEq(op) => format!("{}=", binop_to_string(op)), - - /* Structural symbols */ - token::At => "@".to_string(), - token::Dot => ".".to_string(), - token::DotDot => "..".to_string(), - token::DotDotDot => "...".to_string(), - token::DotDotEq => "..=".to_string(), - token::Comma => ",".to_string(), - token::Semi => ";".to_string(), - token::Colon => ":".to_string(), - token::ModSep => "::".to_string(), - token::RArrow => "->".to_string(), - token::LArrow => "<-".to_string(), - token::FatArrow => "=>".to_string(), - token::OpenDelim(token::Paren) => "(".to_string(), - token::CloseDelim(token::Paren) => ")".to_string(), - token::OpenDelim(token::Bracket) => "[".to_string(), - token::CloseDelim(token::Bracket) => "]".to_string(), - token::OpenDelim(token::Brace) => "{".to_string(), - token::CloseDelim(token::Brace) => "}".to_string(), - token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(), - token::Pound => "#".to_string(), - token::Dollar => "$".to_string(), - token::Question => "?".to_string(), - token::SingleQuote => "'".to_string(), - - /* Literals */ - token::Literal(lit) => literal_to_string(lit), - - /* Name components */ - token::Ident(s, is_raw) => IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string(), - token::Lifetime(s) => s.to_string(), - - /* Other */ - token::DocComment(comment_kind, attr_style, data) => { - doc_comment_to_string(comment_kind, attr_style, data) - } - token::Eof => "".to_string(), - - token::Interpolated(ref nt) => nonterminal_to_string(nt), - } -} - -/// Print the token precisely, without converting `$crate` into its respective crate name. -pub fn token_to_string(token: &Token) -> String { - token_to_string_ext(token, false) -} - -fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String { - let convert_dollar_crate = convert_dollar_crate.then_some(token.span); - token_kind_to_string_ext(&token.kind, convert_dollar_crate) -} - -pub fn nonterminal_to_string(nt: &Nonterminal) -> String { - match *nt { - token::NtExpr(ref e) => expr_to_string(e), - token::NtMeta(ref e) => attr_item_to_string(e), - token::NtTy(ref e) => ty_to_string(e), - token::NtPath(ref e) => path_to_string(e), - token::NtItem(ref e) => item_to_string(e), - token::NtBlock(ref e) => block_to_string(e), - token::NtStmt(ref e) => stmt_to_string(e), - token::NtPat(ref e) => pat_to_string(e), - token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(), - token::NtLifetime(e) => e.to_string(), - token::NtLiteral(ref e) => expr_to_string(e), - token::NtTT(ref tree) => tt_to_string(tree), - token::NtVis(ref e) => vis_to_string(e), - } -} - -pub fn ty_to_string(ty: &ast::Ty) -> String { - to_string(|s| s.print_type(ty)) -} - -pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String { - to_string(|s| s.print_type_bounds("", bounds)) -} - -pub fn pat_to_string(pat: &ast::Pat) -> String { - to_string(|s| s.print_pat(pat)) -} - -pub fn expr_to_string(e: &ast::Expr) -> String { - to_string(|s| s.print_expr(e)) -} - -pub fn tt_to_string(tt: &TokenTree) -> String { - to_string(|s| s.print_tt(tt, false)) -} - -pub fn tts_to_string(tokens: &TokenStream) -> String { - to_string(|s| s.print_tts(tokens, false)) -} - -pub fn stmt_to_string(stmt: &ast::Stmt) -> String { - to_string(|s| s.print_stmt(stmt)) -} - -pub fn item_to_string(i: &ast::Item) -> String { - to_string(|s| s.print_item(i)) -} - -pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String { - to_string(|s| s.print_generic_params(generic_params)) -} - -pub fn path_to_string(p: &ast::Path) -> String { - to_string(|s| s.print_path(p, false, 0)) -} - -pub fn path_segment_to_string(p: &ast::PathSegment) -> String { - to_string(|s| s.print_path_segment(p, false)) -} - -pub fn vis_to_string(v: &ast::Visibility) -> String { - to_string(|s| s.print_visibility(v)) -} - -fn block_to_string(blk: &ast::Block) -> String { - to_string(|s| { - // Containing cbox, will be closed by `print_block` at `}`. - s.cbox(INDENT_UNIT); - // Head-ibox, will be closed by `print_block` after `{`. - s.ibox(0); - s.print_block(blk) - }) -} - -pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String { - to_string(|s| s.print_meta_list_item(li)) -} - -fn attr_item_to_string(ai: &ast::AttrItem) -> String { - to_string(|s| s.print_attr_item(ai, ai.path.span)) -} - -pub fn attribute_to_string(attr: &ast::Attribute) -> String { - to_string(|s| s.print_attribute(attr)) -} - -pub fn param_to_string(arg: &ast::Param) -> String { - to_string(|s| s.print_param(arg, false)) -} - fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String { - format!("{}{}", to_string(|s| s.print_visibility(vis)), s) + format!("{}{}", State::new().to_string(|s| s.print_visibility(vis)), s) } impl std::ops::Deref for State<'_> { @@ -679,7 +507,8 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) { match tt { TokenTree::Token(token) => { - self.word(token_to_string_ext(&token, convert_dollar_crate)); + let token_str = self.token_to_string_ext(&token, convert_dollar_crate); + self.word(token_str); if let token::DocComment(..) = token.kind { self.hardbreak() } @@ -745,14 +574,20 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.space(); } } - _ => self.word(token_kind_to_string(&token::OpenDelim(delim))), + _ => { + let token_str = self.token_kind_to_string(&token::OpenDelim(delim)); + self.word(token_str) + } } self.ibox(0); self.print_tts(tts, convert_dollar_crate); self.end(); match delim { DelimToken::Brace => self.bclose(span), - _ => self.word(token_kind_to_string(&token::CloseDelim(delim))), + _ => { + let token_str = self.token_kind_to_string(&token::CloseDelim(delim)); + self.word(token_str) + } } } @@ -818,6 +653,183 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere } } } + + fn nonterminal_to_string(&self, nt: &Nonterminal) -> String { + match *nt { + token::NtExpr(ref e) => self.expr_to_string(e), + token::NtMeta(ref e) => self.attr_item_to_string(e), + token::NtTy(ref e) => self.ty_to_string(e), + token::NtPath(ref e) => self.path_to_string(e), + token::NtItem(ref e) => self.item_to_string(e), + token::NtBlock(ref e) => self.block_to_string(e), + token::NtStmt(ref e) => self.stmt_to_string(e), + token::NtPat(ref e) => self.pat_to_string(e), + token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(), + token::NtLifetime(e) => e.to_string(), + token::NtLiteral(ref e) => self.expr_to_string(e), + token::NtTT(ref tree) => self.tt_to_string(tree), + token::NtVis(ref e) => self.vis_to_string(e), + } + } + + /// Print the token kind precisely, without converting `$crate` into its respective crate name. + fn token_kind_to_string(&self, tok: &TokenKind) -> String { + self.token_kind_to_string_ext(tok, None) + } + + fn token_kind_to_string_ext( + &self, + tok: &TokenKind, + convert_dollar_crate: Option, + ) -> String { + match *tok { + token::Eq => "=".to_string(), + token::Lt => "<".to_string(), + token::Le => "<=".to_string(), + token::EqEq => "==".to_string(), + token::Ne => "!=".to_string(), + token::Ge => ">=".to_string(), + token::Gt => ">".to_string(), + token::Not => "!".to_string(), + token::Tilde => "~".to_string(), + token::OrOr => "||".to_string(), + token::AndAnd => "&&".to_string(), + token::BinOp(op) => binop_to_string(op).to_string(), + token::BinOpEq(op) => format!("{}=", binop_to_string(op)), + + /* Structural symbols */ + token::At => "@".to_string(), + token::Dot => ".".to_string(), + token::DotDot => "..".to_string(), + token::DotDotDot => "...".to_string(), + token::DotDotEq => "..=".to_string(), + token::Comma => ",".to_string(), + token::Semi => ";".to_string(), + token::Colon => ":".to_string(), + token::ModSep => "::".to_string(), + token::RArrow => "->".to_string(), + token::LArrow => "<-".to_string(), + token::FatArrow => "=>".to_string(), + token::OpenDelim(token::Paren) => "(".to_string(), + token::CloseDelim(token::Paren) => ")".to_string(), + token::OpenDelim(token::Bracket) => "[".to_string(), + token::CloseDelim(token::Bracket) => "]".to_string(), + token::OpenDelim(token::Brace) => "{".to_string(), + token::CloseDelim(token::Brace) => "}".to_string(), + token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(), + token::Pound => "#".to_string(), + token::Dollar => "$".to_string(), + token::Question => "?".to_string(), + token::SingleQuote => "'".to_string(), + + /* Literals */ + token::Literal(lit) => literal_to_string(lit), + + /* Name components */ + token::Ident(s, is_raw) => { + IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string() + } + token::Lifetime(s) => s.to_string(), + + /* Other */ + token::DocComment(comment_kind, attr_style, data) => { + doc_comment_to_string(comment_kind, attr_style, data) + } + token::Eof => "".to_string(), + + token::Interpolated(ref nt) => self.nonterminal_to_string(nt), + } + } + + /// Print the token precisely, without converting `$crate` into its respective crate name. + fn token_to_string(&self, token: &Token) -> String { + self.token_to_string_ext(token, false) + } + + fn token_to_string_ext(&self, token: &Token, convert_dollar_crate: bool) -> String { + let convert_dollar_crate = convert_dollar_crate.then_some(token.span); + self.token_kind_to_string_ext(&token.kind, convert_dollar_crate) + } + + fn ty_to_string(&self, ty: &ast::Ty) -> String { + self.to_string(|s| s.print_type(ty)) + } + + fn bounds_to_string(&self, bounds: &[ast::GenericBound]) -> String { + self.to_string(|s| s.print_type_bounds("", bounds)) + } + + fn pat_to_string(&self, pat: &ast::Pat) -> String { + self.to_string(|s| s.print_pat(pat)) + } + + fn expr_to_string(&self, e: &ast::Expr) -> String { + self.to_string(|s| s.print_expr(e)) + } + + fn tt_to_string(&self, tt: &TokenTree) -> String { + self.to_string(|s| s.print_tt(tt, false)) + } + + fn tts_to_string(&self, tokens: &TokenStream) -> String { + self.to_string(|s| s.print_tts(tokens, false)) + } + + fn stmt_to_string(&self, stmt: &ast::Stmt) -> String { + self.to_string(|s| s.print_stmt(stmt)) + } + + fn item_to_string(&self, i: &ast::Item) -> String { + self.to_string(|s| s.print_item(i)) + } + + fn generic_params_to_string(&self, generic_params: &[ast::GenericParam]) -> String { + self.to_string(|s| s.print_generic_params(generic_params)) + } + + fn path_to_string(&self, p: &ast::Path) -> String { + self.to_string(|s| s.print_path(p, false, 0)) + } + + fn path_segment_to_string(&self, p: &ast::PathSegment) -> String { + self.to_string(|s| s.print_path_segment(p, false)) + } + + fn vis_to_string(&self, v: &ast::Visibility) -> String { + self.to_string(|s| s.print_visibility(v)) + } + + fn block_to_string(&self, blk: &ast::Block) -> String { + self.to_string(|s| { + // Containing cbox, will be closed by `print_block` at `}`. + s.cbox(INDENT_UNIT); + // Head-ibox, will be closed by `print_block` after `{`. + s.ibox(0); + s.print_block(blk) + }) + } + + fn meta_list_item_to_string(&self, li: &ast::NestedMetaItem) -> String { + self.to_string(|s| s.print_meta_list_item(li)) + } + + fn attr_item_to_string(&self, ai: &ast::AttrItem) -> String { + self.to_string(|s| s.print_attr_item(ai, ai.path.span)) + } + + fn attribute_to_string(&self, attr: &ast::Attribute) -> String { + self.to_string(|s| s.print_attribute(attr)) + } + + fn param_to_string(&self, arg: &ast::Param) -> String { + self.to_string(|s| s.print_param(arg, false)) + } + + fn to_string(&self, f: impl FnOnce(&mut State<'_>)) -> String { + let mut printer = State::new(); + f(&mut printer); + printer.s.eof() + } } impl<'a> PrintState<'a> for State<'a> { @@ -856,6 +868,15 @@ impl<'a> PrintState<'a> for State<'a> { } impl<'a> State<'a> { + pub fn new() -> State<'a> { + State { + s: pp::mk_printer(), + comments: None, + ann: &NoAnn, + is_expanded: false, + } + } + // Synthesizes a comment that was not textually present in the original source // file. pub fn synth_comment(&mut self, text: String) { @@ -1139,7 +1160,7 @@ impl<'a> State<'a> { self.print_fn_full(sig, item.ident, gen, &item.vis, def, body, &item.attrs); } ast::ItemKind::Mod(ref _mod) => { - self.head(to_string(|s| { + self.head(self.to_string(|s| { s.print_visibility(&item.vis); s.print_unsafety(_mod.unsafety); s.word("mod"); @@ -1158,7 +1179,7 @@ impl<'a> State<'a> { } } ast::ItemKind::ForeignMod(ref nmod) => { - self.head(to_string(|s| { + self.head(self.to_string(|s| { s.print_unsafety(nmod.unsafety); s.word("extern"); })); @@ -1366,7 +1387,7 @@ impl<'a> State<'a> { ast::CrateSugar::JustCrate => self.word_nbsp("crate"), }, ast::VisibilityKind::Restricted { ref path, .. } => { - let path = to_string(|s| s.print_path(path, false, 0)); + let path = self.to_string(|s| s.print_path(path, false, 0)); if path == "self" || path == "super" { self.word_nbsp(format!("pub({})", path)) } else { From ea468f427016bbf89819199bb8420afc27e64a7f Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Sat, 26 Sep 2020 12:51:00 -0400 Subject: [PATCH 2/5] Allow skipping extra paren insertion during AST pretty-printing Fixes #74616 Makes progress towards #43081 Unblocks PR #76130 When pretty-printing an AST node, we may insert additional parenthesis to ensure that precedence is properly preserved in code we output. However, the proc macro implementation relies on comparing a pretty-printed AST node to the captured `TokenStream`. Inserting extra parenthesis changes the structure of the reparsed `TokenStream`, making the comparison fail. This PR refactors the AST pretty-printing code to allow skipping the insertion of additional parenthesis. Several freestanding methods are moved to trait methods on `PrintState`, which keep track of an internal `insert_extra_parens` flag. This flag is normally `true`, but we expose a public method which allows pretty-printing a nonterminal with `insert_extra_parens = false`. To avoid changing the public interface of `rustc_ast_pretty`, the freestanding `_to_string` methods are changed to delegate to a newly-crated `State`. The main pretty-printing code is moved to a new `state` module to ensure that it does not accidentally call any of these public helper functions (instead, the internal functions with the same name should be used). --- compiler/rustc_ast_pretty/src/pprust/mod.rs | 5 + compiler/rustc_ast_pretty/src/pprust/state.rs | 26 +++- compiler/rustc_hir_pretty/src/lib.rs | 3 + compiler/rustc_parse/src/lib.rs | 37 ++++- .../ui/proc-macro/issue-75734-pp-paren.rs | 26 ++++ .../ui/proc-macro/issue-75734-pp-paren.stdout | 134 ++++++++++++++++++ 6 files changed, 219 insertions(+), 12 deletions(-) create mode 100644 src/test/ui/proc-macro/issue-75734-pp-paren.rs create mode 100644 src/test/ui/proc-macro/issue-75734-pp-paren.stdout diff --git a/compiler/rustc_ast_pretty/src/pprust/mod.rs b/compiler/rustc_ast_pretty/src/pprust/mod.rs index b88699f6ee176..b34ea41ab558a 100644 --- a/compiler/rustc_ast_pretty/src/pprust/mod.rs +++ b/compiler/rustc_ast_pretty/src/pprust/mod.rs @@ -8,6 +8,11 @@ use rustc_ast as ast; use rustc_ast::token::{Nonterminal, Token, TokenKind}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; +pub fn nonterminal_to_string_no_extra_parens(nt: &Nonterminal) -> String { + let state = State::without_insert_extra_parens(); + state.nonterminal_to_string(nt) +} + pub fn nonterminal_to_string(nt: &Nonterminal) -> String { State::new().nonterminal_to_string(nt) } diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 14feb4989035c..9aa066370bb5b 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -20,9 +20,6 @@ use rustc_span::{BytePos, FileName, Span}; use std::borrow::Cow; -#[cfg(test)] -mod tests; - pub enum MacHeader<'a> { Path(&'a ast::Path), Keyword(&'static str), @@ -91,6 +88,13 @@ pub struct State<'a> { comments: Option>, ann: &'a (dyn PpAnn + 'a), is_expanded: bool, + // If `true`, additional parenthesis (separate from `ExprKind::Paren`) + // are inserted to ensure that proper precedence is preserved + // in the pretty-printed output. + // + // This is usually `true`, except when performing the pretty-print/reparse + // check in `nt_to_tokenstream` + insert_extra_parens: bool, } crate const INDENT_UNIT: usize = 4; @@ -112,6 +116,7 @@ pub fn print_crate<'a>( comments: Some(Comments::new(sm, filename, input)), ann, is_expanded, + insert_extra_parens: true, }; if is_expanded && has_injected_crate { @@ -225,7 +230,7 @@ pub fn literal_to_string(lit: token::Lit) -> String { } fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String { - format!("{}{}", State::new().to_string(|s| s.print_visibility(vis)), s) + format!("{}{}", State::new().to_string(|s| s.print_visibility(vis)), s) } impl std::ops::Deref for State<'_> { @@ -242,6 +247,7 @@ impl std::ops::DerefMut for State<'_> { } pub trait PrintState<'a>: std::ops::Deref + std::ops::DerefMut { + fn insert_extra_parens(&self) -> bool; fn comments(&mut self) -> &mut Option>; fn print_ident(&mut self, ident: Ident); fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); @@ -827,12 +833,16 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere fn to_string(&self, f: impl FnOnce(&mut State<'_>)) -> String { let mut printer = State::new(); + printer.insert_extra_parens = self.insert_extra_parens(); f(&mut printer); printer.s.eof() } } impl<'a> PrintState<'a> for State<'a> { + fn insert_extra_parens(&self) -> bool { + self.insert_extra_parens + } fn comments(&mut self) -> &mut Option> { &mut self.comments } @@ -874,9 +884,14 @@ impl<'a> State<'a> { comments: None, ann: &NoAnn, is_expanded: false, + insert_extra_parens: true, } } + pub(super) fn without_insert_extra_parens() -> State<'a> { + State { insert_extra_parens: false, ..State::new() } + } + // Synthesizes a comment that was not textually present in the original source // file. pub fn synth_comment(&mut self, text: String) { @@ -1679,7 +1694,8 @@ impl<'a> State<'a> { } /// Prints `expr` or `(expr)` when `needs_par` holds. - fn print_expr_cond_paren(&mut self, expr: &ast::Expr, needs_par: bool) { + fn print_expr_cond_paren(&mut self, expr: &ast::Expr, mut needs_par: bool) { + needs_par &= self.insert_extra_parens; if needs_par { self.popen(); } diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index f6e4b1fb418bf..72011f04d9a77 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -141,6 +141,9 @@ impl std::ops::DerefMut for State<'_> { } impl<'a> PrintState<'a> for State<'a> { + fn insert_extra_parens(&self) -> bool { + true + } fn comments(&mut self) -> &mut Option> { &mut self.comments } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index b68d36c9a8e6f..51038b7d3aabc 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -297,7 +297,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke }; // FIXME(#43081): Avoid this pretty-print + reparse hack - let source = pprust::nonterminal_to_string(nt); + // Pretty-print the AST struct without inserting any parenthesis + // beyond those explicitly written by the user (e.g. `ExpnKind::Paren`). + // The resulting stream may have incorrect precedence, but it's only + // ever used for a comparison against the capture tokenstream. + let source = pprust::nonterminal_to_string_no_extra_parens(nt); let filename = FileName::macro_expansion_source_code(&source); let reparsed_tokens = parse_stream_from_source_str(filename, source, sess, Some(span)); @@ -325,9 +329,28 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // modifications, including adding/removing typically non-semantic // tokens such as extra braces and commas, don't happen. if let Some(tokens) = tokens { + // If the streams match, then the AST hasn't been modified. Return the captured + // `TokenStream`. if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess) { return tokens; } + + // The check failed. This time, we pretty-print the AST struct with parenthesis + // inserted to preserve precedence. This may cause `None`-delimiters in the captured + // token stream to match up with inserted parenthesis in the reparsed stream. + let source_with_parens = pprust::nonterminal_to_string(nt); + let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens); + let tokens_with_parens = parse_stream_from_source_str( + filename_with_parens, + source_with_parens, + sess, + Some(span), + ); + + if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_with_parens, sess) { + return tokens; + } + info!( "cached tokens found, but they're not \"probably equal\", \ going with stringified version" @@ -489,12 +512,12 @@ pub fn tokentree_probably_equal_for_proc_macro( (TokenTree::Token(token), TokenTree::Token(reparsed_token)) => { token_probably_equal_for_proc_macro(token, reparsed_token) } - ( - TokenTree::Delimited(_, delim, tokens), - TokenTree::Delimited(_, reparsed_delim, reparsed_tokens), - ) => { - delim == reparsed_delim - && tokenstream_probably_equal_for_proc_macro(tokens, reparsed_tokens, sess) + (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => { + // `NoDelim` delimiters can appear in the captured tokenstream, but not + // in the reparsed tokenstream. Allow them to match with anything, so + // that we check if the two streams are structurally equivalent. + (delim == delim2 || *delim == DelimToken::NoDelim || *delim2 == DelimToken::NoDelim) + && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess) } _ => false, } diff --git a/src/test/ui/proc-macro/issue-75734-pp-paren.rs b/src/test/ui/proc-macro/issue-75734-pp-paren.rs new file mode 100644 index 0000000000000..faa93787d1385 --- /dev/null +++ b/src/test/ui/proc-macro/issue-75734-pp-paren.rs @@ -0,0 +1,26 @@ +// Regression test for issue #75734 +// Ensures that we don't lose tokens when pretty-printing would +// normally insert extra parentheses. + +// check-pass +// aux-build:test-macros.rs +// compile-flags: -Z span-debug + +#![no_std] // Don't load unnecessary hygiene information from std +extern crate std; + +#[macro_use] +extern crate test_macros; + +macro_rules! mul_2 { + ($val:expr) => { + print_bang!($val * 2); + }; +} + + +#[print_attr] +fn main() { + &|_: u8| {}; + mul_2!(1 + 1); +} diff --git a/src/test/ui/proc-macro/issue-75734-pp-paren.stdout b/src/test/ui/proc-macro/issue-75734-pp-paren.stdout new file mode 100644 index 0000000000000..b33b85f1705f5 --- /dev/null +++ b/src/test/ui/proc-macro/issue-75734-pp-paren.stdout @@ -0,0 +1,134 @@ +PRINT-ATTR INPUT (DISPLAY): fn main() { & | _ : u8 | { } ; mul_2 ! (1 + 1) ; } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "fn", + span: $DIR/issue-75734-pp-paren.rs:23:1: 23:3 (#0), + }, + Ident { + ident: "main", + span: $DIR/issue-75734-pp-paren.rs:23:4: 23:8 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/issue-75734-pp-paren.rs:23:8: 23:10 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Punct { + ch: '&', + spacing: Joint, + span: $DIR/issue-75734-pp-paren.rs:24:5: 24:6 (#0), + }, + Punct { + ch: '|', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:24:6: 24:7 (#0), + }, + Ident { + ident: "_", + span: $DIR/issue-75734-pp-paren.rs:24:7: 24:8 (#0), + }, + Punct { + ch: ':', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:24:8: 24:9 (#0), + }, + Ident { + ident: "u8", + span: $DIR/issue-75734-pp-paren.rs:24:10: 24:12 (#0), + }, + Punct { + ch: '|', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:24:12: 24:13 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/issue-75734-pp-paren.rs:24:14: 24:16 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:24:16: 24:17 (#0), + }, + Ident { + ident: "mul_2", + span: $DIR/issue-75734-pp-paren.rs:25:5: 25:10 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:25:10: 25:11 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/issue-75734-pp-paren.rs:25:12: 25:13 (#0), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:25:14: 25:15 (#0), + }, + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/issue-75734-pp-paren.rs:25:16: 25:17 (#0), + }, + ], + span: $DIR/issue-75734-pp-paren.rs:25:11: 25:18 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:25:18: 25:19 (#0), + }, + ], + span: $DIR/issue-75734-pp-paren.rs:23:11: 26:2 (#0), + }, +] +PRINT-BANG INPUT (DISPLAY): 1 + 1 * 2 +PRINT-BANG INPUT (DEBUG): TokenStream [ + Group { + delimiter: None, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/issue-75734-pp-paren.rs:25:12: 25:13 (#0), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:25:14: 25:15 (#0), + }, + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/issue-75734-pp-paren.rs:25:16: 25:17 (#0), + }, + ], + span: $DIR/issue-75734-pp-paren.rs:17:21: 17:25 (#7), + }, + Punct { + ch: '*', + spacing: Alone, + span: $DIR/issue-75734-pp-paren.rs:17:26: 17:27 (#7), + }, + Literal { + kind: Integer, + symbol: "2", + suffix: None, + span: $DIR/issue-75734-pp-paren.rs:17:28: 17:29 (#7), + }, +] From 820953819c490adfe40b488aad32c8eef7d2ecd1 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Sat, 26 Sep 2020 13:12:49 -0400 Subject: [PATCH 3/5] Add `relaxed_delim_match` parameter --- compiler/rustc_parse/src/lib.rs | 69 +++++++++++++++++++++++++-------- 1 file changed, 53 insertions(+), 16 deletions(-) diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 51038b7d3aabc..dbdabdbb8e506 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -7,8 +7,8 @@ #![feature(or_patterns)] use rustc_ast as ast; -use rustc_ast::token::{self, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; +use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; +use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; @@ -329,9 +329,8 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // modifications, including adding/removing typically non-semantic // tokens such as extra braces and commas, don't happen. if let Some(tokens) = tokens { - // If the streams match, then the AST hasn't been modified. Return the captured - // `TokenStream`. - if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess) { + // Compare with a non-relaxed delim match to start. + if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess, false) { return tokens; } @@ -340,14 +339,21 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // token stream to match up with inserted parenthesis in the reparsed stream. let source_with_parens = pprust::nonterminal_to_string(nt); let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens); - let tokens_with_parens = parse_stream_from_source_str( + let reparsed_tokens_with_parens = parse_stream_from_source_str( filename_with_parens, source_with_parens, sess, Some(span), ); - if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_with_parens, sess) { + // Compare with a relaxed delim match - we want inserted parenthesis in the + // reparsed stream to match `None`-delimiters in the original stream. + if tokenstream_probably_equal_for_proc_macro( + &tokens, + &reparsed_tokens_with_parens, + sess, + true, + ) { return tokens; } @@ -355,8 +361,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke "cached tokens found, but they're not \"probably equal\", \ going with stringified version" ); - info!("cached tokens: {:?}", tokens); - info!("reparsed tokens: {:?}", reparsed_tokens); + info!("cached tokens: {}", pprust::tts_to_string(&tokens)); + info!("reparsed tokens: {}", pprust::tts_to_string(&reparsed_tokens_with_parens)); + + info!("cached tokens debug: {:?}", tokens); + info!("reparsed tokens debug: {:?}", reparsed_tokens_with_parens); } reparsed_tokens } @@ -370,6 +379,7 @@ pub fn tokenstream_probably_equal_for_proc_macro( tokens: &TokenStream, reparsed_tokens: &TokenStream, sess: &ParseSess, + relaxed_delim_match: bool, ) -> bool { // When checking for `probably_eq`, we ignore certain tokens that aren't // preserved in the AST. Because they are not preserved, the pretty @@ -495,7 +505,9 @@ pub fn tokenstream_probably_equal_for_proc_macro( let tokens = tokens.trees().flat_map(|t| expand_token(t, sess)); let reparsed_tokens = reparsed_tokens.trees().flat_map(|t| expand_token(t, sess)); - tokens.eq_by(reparsed_tokens, |t, rt| tokentree_probably_equal_for_proc_macro(&t, &rt, sess)) + tokens.eq_by(reparsed_tokens, |t, rt| { + tokentree_probably_equal_for_proc_macro(&t, &rt, sess, relaxed_delim_match) + }) } // See comments in `Nonterminal::to_tokenstream` for why we care about @@ -507,17 +519,42 @@ pub fn tokentree_probably_equal_for_proc_macro( token: &TokenTree, reparsed_token: &TokenTree, sess: &ParseSess, + relaxed_delim_match: bool, ) -> bool { match (token, reparsed_token) { (TokenTree::Token(token), TokenTree::Token(reparsed_token)) => { token_probably_equal_for_proc_macro(token, reparsed_token) } - (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => { - // `NoDelim` delimiters can appear in the captured tokenstream, but not - // in the reparsed tokenstream. Allow them to match with anything, so - // that we check if the two streams are structurally equivalent. - (delim == delim2 || *delim == DelimToken::NoDelim || *delim2 == DelimToken::NoDelim) - && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess) + ( + TokenTree::Delimited(_, delim, tokens), + TokenTree::Delimited(_, reparsed_delim, reparsed_tokens), + ) if delim == reparsed_delim => tokenstream_probably_equal_for_proc_macro( + tokens, + reparsed_tokens, + sess, + relaxed_delim_match, + ), + (TokenTree::Delimited(_, DelimToken::NoDelim, tokens), reparsed_token) => { + if relaxed_delim_match { + if let TokenTree::Delimited(_, DelimToken::Paren, reparsed_tokens) = reparsed_token + { + if tokenstream_probably_equal_for_proc_macro( + tokens, + reparsed_tokens, + sess, + relaxed_delim_match, + ) { + return true; + } + } + } + tokens.len() == 1 + && tokentree_probably_equal_for_proc_macro( + &tokens.trees().next().unwrap(), + reparsed_token, + sess, + relaxed_delim_match, + ) } _ => false, } From 477ce31d37af4630f61620ea5422276f0aa79582 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Tue, 29 Sep 2020 00:31:13 -0400 Subject: [PATCH 4/5] Remove unused import --- compiler/rustc_parse/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index dbdabdbb8e506..cceaa08daa4a1 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -8,7 +8,7 @@ use rustc_ast as ast; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; From 9a6ea386472acb7e1e1dd24370ef9d60d07463f5 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Sun, 11 Oct 2020 13:07:45 -0400 Subject: [PATCH 5/5] Add hack to keep `actix-web` and `actori-web` compiling This extends the existing `ident_name_compatibility_hack` to handle the `tuple_from_req` macro defined in `actix-web` (and its fork `actori-web`). --- compiler/rustc_ast/src/token.rs | 21 ++++++++++---- compiler/rustc_span/src/symbol.rs | 1 + .../actix-web-2.0.0/src/extract.rs | 7 +++++ .../actix-web/src/extract.rs | 7 +++++ .../actori-web-2.0.0/src/extract.rs | 7 +++++ .../actori-web/src/extract.rs | 7 +++++ .../group-compat-hack/group-compat-hack.rs | 28 +++++++++++++++++++ .../group-compat-hack.stdout | 4 +++ 8 files changed, 77 insertions(+), 5 deletions(-) create mode 100644 src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs create mode 100644 src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs create mode 100644 src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs create mode 100644 src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index d5b3e87adc36a..9635750fb40bc 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -810,10 +810,10 @@ impl Nonterminal { if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind { let filename = source_map.span_to_filename(orig_span); if let FileName::Real(RealFileName::Named(path)) = filename { - let matches_prefix = |prefix| { - // Check for a path that ends with 'prefix*/src/lib.rs' + let matches_prefix = |prefix, filename| { + // Check for a path that ends with 'prefix*/src/' let mut iter = path.components().rev(); - iter.next().and_then(|p| p.as_os_str().to_str()) == Some("lib.rs") + iter.next().and_then(|p| p.as_os_str().to_str()) == Some(filename) && iter.next().and_then(|p| p.as_os_str().to_str()) == Some("src") && iter .next() @@ -821,14 +821,25 @@ impl Nonterminal { .map_or(false, |p| p.starts_with(prefix)) }; - if (macro_name == sym::impl_macros && matches_prefix("time-macros-impl")) - || (macro_name == sym::arrays && matches_prefix("js-sys")) + if (macro_name == sym::impl_macros + && matches_prefix("time-macros-impl", "lib.rs")) + || (macro_name == sym::arrays && matches_prefix("js-sys", "lib.rs")) { let snippet = source_map.span_to_snippet(orig_span); if snippet.as_deref() == Ok("$name") { return Some((*ident, *is_raw)); } } + + if macro_name == sym::tuple_from_req + && (matches_prefix("actix-web", "extract.rs") + || matches_prefix("actori-web", "extract.rs")) + { + let snippet = source_map.span_to_snippet(orig_span); + if snippet.as_deref() == Ok("$T") { + return Some((*ident, *is_raw)); + } + } } } } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 223a0758f008b..28fef65da070a 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1114,6 +1114,7 @@ symbols! { try_trait, tt, tuple, + tuple_from_req, tuple_indexing, two_phase, ty, diff --git a/src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs new file mode 100644 index 0000000000000..2d4f6010012df --- /dev/null +++ b/src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs @@ -0,0 +1,7 @@ +// ignore-test this is not a test + +macro_rules! tuple_from_req { + ($T:ident) => { + #[my_macro] struct Three($T); + } +} diff --git a/src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs new file mode 100644 index 0000000000000..2d4f6010012df --- /dev/null +++ b/src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs @@ -0,0 +1,7 @@ +// ignore-test this is not a test + +macro_rules! tuple_from_req { + ($T:ident) => { + #[my_macro] struct Three($T); + } +} diff --git a/src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs new file mode 100644 index 0000000000000..9ec6aba63f35f --- /dev/null +++ b/src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs @@ -0,0 +1,7 @@ +// ignore-test this is not a test + +macro_rules! tuple_from_req { + ($T:ident) => { + #[my_macro] struct Four($T); + } +} diff --git a/src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs new file mode 100644 index 0000000000000..9ec6aba63f35f --- /dev/null +++ b/src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs @@ -0,0 +1,7 @@ +// ignore-test this is not a test + +macro_rules! tuple_from_req { + ($T:ident) => { + #[my_macro] struct Four($T); + } +} diff --git a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs index bc82a2ff196d2..652fabf34ac3d 100644 --- a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs +++ b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs @@ -45,5 +45,33 @@ mod with_version { other!(Foo); } +mod actix_web_test { + include!("actix-web/src/extract.rs"); + + struct Foo; + tuple_from_req!(Foo); +} + +mod actix_web_version_test { + include!("actix-web-2.0.0/src/extract.rs"); + + struct Foo; + tuple_from_req!(Foo); +} + +mod actori_web_test { + include!("actori-web/src/extract.rs"); + + struct Foo; + tuple_from_req!(Foo); +} + +mod actori_web_version_test { + include!("actori-web-2.0.0/src/extract.rs"); + + struct Foo; + tuple_from_req!(Foo); +} + fn main() {} diff --git a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout index e83bc9f8fca7a..c6b18ab674baa 100644 --- a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout +++ b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout @@ -4,3 +4,7 @@ Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/gro Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:21: 5:27 (#20) }, Ident { ident: "One", span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:28: 5:31 (#20) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:43:18: 43:21 (#0) }], span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:31: 5:38 (#20) }, Punct { ch: ';', spacing: Alone, span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:38: 5:39 (#20) }] Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/js-sys-0.3.17/src/lib.rs:5:21: 5:27 (#24) }, Ident { ident: "Two", span: $DIR/js-sys-0.3.17/src/lib.rs:5:28: 5:31 (#24) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:44:13: 44:16 (#0) }], span: $DIR/js-sys-0.3.17/src/lib.rs:5:31: 5:38 (#24) }, Punct { ch: ';', spacing: Alone, span: $DIR/js-sys-0.3.17/src/lib.rs:5:38: 5:39 (#24) }] Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/group-compat-hack.rs:38:25: 38:31 (#28) }, Ident { ident: "Three", span: $DIR/group-compat-hack.rs:38:32: 38:37 (#28) }, Group { delimiter: Parenthesis, stream: TokenStream [Group { delimiter: None, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:45:12: 45:15 (#0) }], span: $DIR/group-compat-hack.rs:38:38: 38:43 (#28) }], span: $DIR/group-compat-hack.rs:38:37: 38:44 (#28) }, Punct { ch: ';', spacing: Alone, span: $DIR/group-compat-hack.rs:38:44: 38:45 (#28) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actix-web/src/extract.rs:5:21: 5:27 (#33) }, Ident { ident: "Three", span: $DIR/actix-web/src/extract.rs:5:28: 5:33 (#33) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:52:21: 52:24 (#0) }], span: $DIR/actix-web/src/extract.rs:5:33: 5:37 (#33) }, Punct { ch: ';', spacing: Alone, span: $DIR/actix-web/src/extract.rs:5:37: 5:38 (#33) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actix-web-2.0.0/src/extract.rs:5:21: 5:27 (#38) }, Ident { ident: "Three", span: $DIR/actix-web-2.0.0/src/extract.rs:5:28: 5:33 (#38) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:59:21: 59:24 (#0) }], span: $DIR/actix-web-2.0.0/src/extract.rs:5:33: 5:37 (#38) }, Punct { ch: ';', spacing: Alone, span: $DIR/actix-web-2.0.0/src/extract.rs:5:37: 5:38 (#38) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actori-web/src/extract.rs:5:21: 5:27 (#43) }, Ident { ident: "Four", span: $DIR/actori-web/src/extract.rs:5:28: 5:32 (#43) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:66:21: 66:24 (#0) }], span: $DIR/actori-web/src/extract.rs:5:32: 5:36 (#43) }, Punct { ch: ';', spacing: Alone, span: $DIR/actori-web/src/extract.rs:5:36: 5:37 (#43) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actori-web-2.0.0/src/extract.rs:5:21: 5:27 (#48) }, Ident { ident: "Four", span: $DIR/actori-web-2.0.0/src/extract.rs:5:28: 5:32 (#48) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:73:21: 73:24 (#0) }], span: $DIR/actori-web-2.0.0/src/extract.rs:5:32: 5:36 (#48) }, Punct { ch: ';', spacing: Alone, span: $DIR/actori-web-2.0.0/src/extract.rs:5:36: 5:37 (#48) }]