Skip to content

Commit

Permalink
Remove hacks in make_token_stream.
Browse files Browse the repository at this point in the history
`make_tokenstream` has three commented hacks, and a comment at the top
referring to rust-lang#67062. These hacks have no observable effect, at least as judged
by running the test suite. The hacks were added in rust-lang#82608, with an explanation
[here](rust-lang#82608 (comment)). It
appears that one of the following is true: (a) they never did anything useful,
(b) they do something useful but we have no test coverage for them, or (c)
something has changed in the meantime that means they are no longer necessary.

This commit removes the hacks and the comments, in the hope that (b) is not
true.
  • Loading branch information
nnethercote committed Apr 29, 2022
1 parent baaa3b6 commit bb398ca
Showing 1 changed file with 0 additions and 38 deletions.
38 changes: 0 additions & 38 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,12 +388,6 @@ impl<'a> Parser<'a> {
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
/// of open and close delims.
// FIXME(#67062): Currently, we don't parse `Invisible`-delimited groups correctly,
// which can cause us to end up with mismatched `Invisible` delimiters in our
// captured tokens. This function contains several hacks to work around this -
// essentially, we throw away mismatched `Invisible` delimiters when we encounter them.
// Once we properly parse `Invisible` delimiters, they can be captured just like any
// other tokens, and these hacks can be removed.
fn make_token_stream(
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
break_last_token: bool,
Expand All @@ -412,35 +406,10 @@ fn make_token_stream(
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
}
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
// HACK: If we encounter a mismatched `Invisible` delimiter at the top
// level, just ignore it.
if matches!(delim, Delimiter::Invisible)
&& (stack.len() == 1
|| !matches!(
stack.last_mut().unwrap().open_delim_sp.unwrap().0,
Delimiter::Invisible
))
{
token_and_spacing = iter.next();
continue;
}
let frame_data = stack
.pop()
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));

// HACK: If our current frame has a mismatched opening `Invisible` delimiter,
// merge our current frame with the one above it. That is, transform
// `[ { < first second } third ]` into `[ { first second } third ]`
if !matches!(delim, Delimiter::Invisible)
&& matches!(frame_data.open_delim_sp.unwrap().0, Delimiter::Invisible)
{
stack.last_mut().unwrap().inner.extend(frame_data.inner);
// Process our closing delimiter again, this time at the previous
// frame in the stack
token_and_spacing = Some((token, spacing));
continue;
}

let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
assert_eq!(
open_delim, delim,
Expand Down Expand Up @@ -472,13 +441,6 @@ fn make_token_stream(
}
token_and_spacing = iter.next();
}
// HACK: If we don't have a closing `Invisible` delimiter for our last
// frame, merge the frame with the top-level frame. That is,
// turn `< first second` into `first second`
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == Delimiter::Invisible {
let temp_buf = stack.pop().unwrap();
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
}
let mut final_buf = stack.pop().expect("Missing final buf!");
if break_last_token {
let (last_token, spacing) = final_buf.inner.pop().unwrap();
Expand Down

0 comments on commit bb398ca

Please sign in to comment.