Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Less NoDelim #96421

Merged
merged 5 commits into from
Apr 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 3 additions & 3 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1542,10 +1542,10 @@ pub enum MacArgs {
}

impl MacArgs {
pub fn delim(&self) -> DelimToken {
pub fn delim(&self) -> Option<DelimToken> {
match self {
MacArgs::Delimited(_, delim, _) => delim.to_token(),
MacArgs::Empty | MacArgs::Eq(..) => token::NoDelim,
MacArgs::Delimited(_, delim, _) => Some(delim.to_token()),
MacArgs::Empty | MacArgs::Eq(..) => None,
}
}

Expand Down
38 changes: 20 additions & 18 deletions compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Some(MacHeader::Path(&item.path)),
false,
None,
delim.to_token(),
Some(delim.to_token()),
tokens,
true,
span,
Expand Down Expand Up @@ -530,7 +530,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
None,
false,
None,
*delim,
Some(*delim),
tts,
convert_dollar_crate,
dspan.entire(),
Expand All @@ -556,12 +556,12 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
header: Option<MacHeader<'_>>,
has_bang: bool,
ident: Option<Ident>,
delim: DelimToken,
delim: Option<DelimToken>,
tts: &TokenStream,
convert_dollar_crate: bool,
span: Span,
) {
if delim == DelimToken::Brace {
if delim == Some(DelimToken::Brace) {
self.cbox(INDENT_UNIT);
}
match header {
Expand All @@ -577,31 +577,33 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.print_ident(ident);
}
match delim {
DelimToken::Brace => {
Some(DelimToken::Brace) => {
if header.is_some() || has_bang || ident.is_some() {
self.nbsp();
}
self.word("{");
if !tts.is_empty() {
self.space();
}
}
_ => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str)
}
}
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
match delim {
DelimToken::Brace => {
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
let empty = tts.is_empty();
self.bclose(span, empty);
}
_ => {
Some(delim) => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str);
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
self.word(token_str)
self.word(token_str);
}
None => {
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
}
}
}
Expand Down
9 changes: 4 additions & 5 deletions compiler/rustc_expand/src/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -260,16 +260,15 @@ fn generic_extension<'cx, 'tt>(
// Merge the gated spans from parsing the matcher with the pre-existing ones.
sess.gated_spans.merge(gated_spans_snapshot);

// Ignore the delimiters on the RHS.
let rhs = match &rhses[i] {
mbe::TokenTree::Delimited(_, delimited) => &delimited.tts,
let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span),
_ => cx.span_bug(sp, "malformed macro rhs"),
};
let arm_span = rhses[i].span();

let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
let rhs_spans = rhs.tts.iter().map(|t| t.span()).collect::<Vec<_>>();
// rhs has holes ( `$id` and `$(...)` that need filled)
let mut tts = match transcribe(cx, &named_matches, &rhs, transparency) {
let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) {
Ok(tts) => tts,
Err(mut err) => {
err.emit();
Expand Down
13 changes: 7 additions & 6 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ impl MutVisitor for Marker {
enum Frame<'a> {
Delimited {
tts: &'a [mbe::TokenTree],
delim_token: token::DelimToken,
idx: usize,
delim_token: token::DelimToken,
petrochenkov marked this conversation as resolved.
Show resolved Hide resolved
span: DelimSpan,
},
Sequence {
Expand All @@ -42,8 +42,8 @@ enum Frame<'a> {

impl<'a> Frame<'a> {
/// Construct a new frame around the delimited set of tokens.
fn new(tts: &'a [mbe::TokenTree]) -> Frame<'a> {
Frame::Delimited { tts, delim_token: token::NoDelim, idx: 0, span: DelimSpan::dummy() }
fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> {
Frame::Delimited { tts: &src.tts, idx: 0, delim_token: src.delim, span }
}
}

Expand Down Expand Up @@ -85,17 +85,18 @@ impl<'a> Iterator for Frame<'a> {
pub(super) fn transcribe<'a>(
cx: &ExtCtxt<'a>,
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
src: &[mbe::TokenTree],
src: &mbe::Delimited,
src_span: DelimSpan,
transparency: Transparency,
) -> PResult<'a, TokenStream> {
// Nothing for us to transcribe...
if src.is_empty() {
if src.tts.is_empty() {
return Ok(TokenStream::default());
}

// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
// we have yet to expand/are still expanding. We start the stack off with the whole RHS.
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(&src)];
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(&src, src_span)];

// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
// `repeats` keeps track of where we are in matching at each level, with the last element being
Expand Down
27 changes: 15 additions & 12 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream,
use rustc_ast::{self as ast};
use rustc_ast::{AstLike, AttrVec, Attribute};
use rustc_errors::PResult;
use rustc_span::{sym, Span, DUMMY_SP};
use rustc_span::{sym, Span};

use std::convert::TryInto;
use std::ops::Range;
Expand Down Expand Up @@ -400,24 +400,26 @@ fn make_token_stream(
) -> AttrAnnotatedTokenStream {
#[derive(Debug)]
struct FrameData {
open: Span,
open_delim: DelimToken,
// This is `None` for the first frame, `Some` for all others.
open_delim_sp: Option<(DelimToken, Span)>,
inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
}
let mut stack =
vec![FrameData { open: DUMMY_SP, open_delim: DelimToken::NoDelim, inner: vec![] }];
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
let mut token_and_spacing = iter.next();
while let Some((token, spacing)) = token_and_spacing {
match token {
FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
stack.push(FrameData { open: span, open_delim: delim, inner: vec![] });
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
}
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
// HACK: If we encounter a mismatched `None` delimiter at the top
// level, just ignore it.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this still necessary?
There's no longer a DelimToken::NoDelim at the top level.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's also a similar // HACK comment lower in the same function.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The "mismatched None delimiter at the top level" here refers to token, which comes from the iterator, not the frame. Indeed, the condition looks for a non-NoDelim in the frame. So this change preserves the existing behaviour, as I understand it.

More generally: there are three HACK comments in this function, and a comment at the top of the function referring to #67062. The HACK comments are on three if statements. I tried changing the bodies of those three if statements to panic!() and the test suite passed. I then tried removing all three if statements and the test suite again passed.

These if statements were added in #82608, with an explanation here. I don't know if (a) they never did anything useful, (b) they do something useful but we have no test coverage for them, or (c) something has changed in the meantime that means they are no longer necessary.

I suggest we leave this change as is, and consider removing these hacks in a separate PR. That way if there are any regressions they'll be easier to deal with.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have filed #96543 to remove the hacks.

if matches!(delim, DelimToken::NoDelim)
&& (stack.len() == 1
|| !matches!(stack.last_mut().unwrap().open_delim, DelimToken::NoDelim))
|| !matches!(
stack.last_mut().unwrap().open_delim_sp.unwrap().0,
DelimToken::NoDelim
))
{
token_and_spacing = iter.next();
continue;
Expand All @@ -430,7 +432,7 @@ fn make_token_stream(
// merge our current frame with the one above it. That is, transform
// `[ { < first second } third ]` into `[ { first second } third ]`
if !matches!(delim, DelimToken::NoDelim)
&& matches!(frame_data.open_delim, DelimToken::NoDelim)
&& matches!(frame_data.open_delim_sp.unwrap().0, DelimToken::NoDelim)
{
stack.last_mut().unwrap().inner.extend(frame_data.inner);
// Process our closing delimiter again, this time at the previous
Expand All @@ -439,12 +441,13 @@ fn make_token_stream(
continue;
}

let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
assert_eq!(
frame_data.open_delim, delim,
open_delim, delim,
"Mismatched open/close delims: open={:?} close={:?}",
frame_data.open, span
open_delim, span
);
let dspan = DelimSpan::from_pair(frame_data.open, span);
let dspan = DelimSpan::from_pair(open_sp, span);
let stream = AttrAnnotatedTokenStream::new(frame_data.inner);
let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream);
stack
Expand Down Expand Up @@ -472,7 +475,7 @@ fn make_token_stream(
// HACK: If we don't have a closing `None` delimiter for our last
// frame, merge the frame with the top-level frame. That is,
// turn `< first second` into `first second`
if stack.len() == 2 && stack[1].open_delim == DelimToken::NoDelim {
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == DelimToken::NoDelim {
let temp_buf = stack.pop().unwrap();
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
}
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_parse/src/parser/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2043,7 +2043,8 @@ impl<'a> Parser<'a> {
self.sess.gated_spans.gate(sym::async_closure, span);
}

if self.token.kind == TokenKind::Semi && self.token_cursor.frame.delim == DelimToken::Paren
if self.token.kind == TokenKind::Semi
&& matches!(self.token_cursor.frame.delim_sp, Some((DelimToken::Paren, _)))
{
// It is likely that the closure body is a block but where the
// braces have been removed. We will recover and eat the next
Expand Down
34 changes: 13 additions & 21 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,14 +244,13 @@ struct TokenCursor {

#[derive(Clone)]
struct TokenCursorFrame {
delim: token::DelimToken,
span: DelimSpan,
delim_sp: Option<(DelimToken, DelimSpan)>,
tree_cursor: tokenstream::Cursor,
}

impl TokenCursorFrame {
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
TokenCursorFrame { delim, span, tree_cursor: tts.into_trees() }
fn new(delim_sp: Option<(DelimToken, DelimSpan)>, tts: TokenStream) -> Self {
TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
}
}

Expand All @@ -266,7 +265,7 @@ impl TokenCursor {
loop {
// FIXME: we currently don't return `NoDelim` open/close delims. To fix #67062 we will
// need to, whereupon the `delim != DelimToken::NoDelim` conditions below can be
// removed, as well as the loop.
// removed.
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
match tree {
&TokenTree::Token(ref token) => match (desugar_doc_comments, token) {
Expand All @@ -277,7 +276,7 @@ impl TokenCursor {
},
&TokenTree::Delimited(sp, delim, ref tts) => {
// Set `open_delim` to true here because we deal with it immediately.
let frame = TokenCursorFrame::new(sp, delim, tts.clone());
let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
self.stack.push(mem::replace(&mut self.frame, frame));
if delim != DelimToken::NoDelim {
return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
Expand All @@ -286,12 +285,11 @@ impl TokenCursor {
}
};
} else if let Some(frame) = self.stack.pop() {
let delim = self.frame.delim;
let span = self.frame.span;
self.frame = frame;
if delim != DelimToken::NoDelim {
if let Some((delim, span)) = self.frame.delim_sp && delim != DelimToken::NoDelim {
self.frame = frame;
return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
}
self.frame = frame;
// No close delimiter to return; continue on to the next iteration.
} else {
return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
Expand Down Expand Up @@ -330,8 +328,7 @@ impl TokenCursor {
self.stack.push(mem::replace(
&mut self.frame,
TokenCursorFrame::new(
delim_span,
token::NoDelim,
None,
if attr_style == AttrStyle::Inner {
[TokenTree::token(token::Pound, span), TokenTree::token(token::Not, span), body]
.iter()
Expand Down Expand Up @@ -431,10 +428,6 @@ impl<'a> Parser<'a> {
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
// Note: because of the way `TokenCursor::inlined_next` is structured, the `span` and
// `delim` arguments here are never used.
let start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens);

let mut parser = Parser {
sess,
token: Token::dummy(),
Expand All @@ -444,7 +437,7 @@ impl<'a> Parser<'a> {
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
frame: start_frame,
frame: TokenCursorFrame::new(None, tokens),
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
Expand Down Expand Up @@ -1025,7 +1018,7 @@ impl<'a> Parser<'a> {
}

let frame = &self.token_cursor.frame;
if frame.delim != DelimToken::NoDelim {
if let Some((delim, span)) = frame.delim_sp && delim != DelimToken::NoDelim {
let all_normal = (0..dist).all(|i| {
let token = frame.tree_cursor.look_ahead(i);
!matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _)))
Expand All @@ -1038,7 +1031,7 @@ impl<'a> Parser<'a> {
looker(&Token::new(token::OpenDelim(*delim), dspan.open))
}
},
None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
None => looker(&Token::new(token::CloseDelim(delim), span.close)),
};
}
}
Expand Down Expand Up @@ -1198,8 +1191,7 @@ impl<'a> Parser<'a> {
// Grab the tokens from this frame.
let frame = &self.token_cursor.frame;
let stream = frame.tree_cursor.stream.clone();
let span = frame.span;
let delim = frame.delim;
let (delim, span) = frame.delim_sp.unwrap();

// Advance the token cursor through the entire delimited
// sequence. After getting the `OpenDelim` we are *within* the
Expand Down