Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

proc_macro: Fix expand_expr expansion of bool literals #98463

Merged
merged 1 commit into from
Jul 10, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 4 additions & 0 deletions compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,10 @@ impl server::TokenStream for Rustc<'_, '_> {
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
// be recovered in the general case.
match &expr.kind {
ast::ExprKind::Lit(l) if l.token.kind == token::Bool => {
Ok(tokenstream::TokenTree::token(token::Ident(l.token.symbol, false), l.span)
.into())
}
ast::ExprKind::Lit(l) => {
Ok(tokenstream::TokenTree::token(token::Literal(l.token), l.span).into())
}
Expand Down
71 changes: 70 additions & 1 deletion src/test/ui/proc-macro/auxiliary/expand-expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,72 @@ extern crate proc_macro;
use proc_macro::*;
use std::str::FromStr;

// Flatten the TokenStream, removing any toplevel `Delimiter::None`s for
// comparison.
fn flatten(ts: TokenStream) -> Vec<TokenTree> {
ts.into_iter()
.flat_map(|tt| match &tt {
TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
flatten(group.stream())
}
_ => vec![tt],
})
.collect()
}

// Assert that two TokenStream values are roughly equal to one-another.
fn assert_ts_eq(lhs: &TokenStream, rhs: &TokenStream) {
let ltts = flatten(lhs.clone());
let rtts = flatten(rhs.clone());

if ltts.len() != rtts.len() {
panic!(
"expected the same number of tts ({} == {})\nlhs:\n{:#?}\nrhs:\n{:#?}",
ltts.len(),
rtts.len(),
lhs,
rhs
)
}

for (ltt, rtt) in ltts.iter().zip(&rtts) {
match (ltt, rtt) {
(TokenTree::Group(l), TokenTree::Group(r)) => {
assert_eq!(
l.delimiter(),
r.delimiter(),
"expected delimiters to match for {:?} and {:?}",
l,
r
);
assert_ts_eq(&l.stream(), &r.stream());
}
(TokenTree::Punct(l), TokenTree::Punct(r)) => assert_eq!(
(l.as_char(), l.spacing()),
(r.as_char(), r.spacing()),
"expected punct to match for {:?} and {:?}",
l,
r
),
(TokenTree::Ident(l), TokenTree::Ident(r)) => assert_eq!(
l.to_string(),
r.to_string(),
"expected ident to match for {:?} and {:?}",
l,
r
),
(TokenTree::Literal(l), TokenTree::Literal(r)) => assert_eq!(
l.to_string(),
r.to_string(),
"expected literal to match for {:?} and {:?}",
l,
r
),
(l, r) => panic!("expected type to match for {:?} and {:?}", l, r),
}
}
}

#[proc_macro]
pub fn expand_expr_is(input: TokenStream) -> TokenStream {
let mut iter = input.into_iter();
Expand All @@ -31,6 +97,9 @@ pub fn expand_expr_is(input: TokenStream) -> TokenStream {
expanded.to_string()
);

// Also compare the raw tts to make sure they line up.
assert_ts_eq(&expected, &expanded);

TokenStream::new()
}

Expand All @@ -48,7 +117,7 @@ pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
// invocation expand to the same literal.
let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
let parse_t = TokenStream::from_str("file!{}")
.unwrap()
.unwrap()
.expand_expr()
.expect("expand_expr failed on internal macro")
.to_string();
Expand Down