Skip to content

Commit

Permalink
Improve Path spans.
Browse files Browse the repository at this point in the history
  • Loading branch information
jseyfried committed Mar 30, 2017
1 parent f08d5ad commit 8fde04b
Show file tree
Hide file tree
Showing 9 changed files with 95 additions and 67 deletions.
7 changes: 4 additions & 3 deletions src/libsyntax/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1015,9 +1015,10 @@ impl MetaItem {
{
let (mut span, name) = match tokens.next() {
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt {
token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
_ => None,
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt {
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
},
_ => return None,
};
Expand Down
21 changes: 20 additions & 1 deletion src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,26 @@ impl<F> TTMacroExpander for F
{
fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
-> Box<MacResult+'cx> {
(*self)(ecx, span, &input.trees().collect::<Vec<_>>())
struct AvoidInterpolatedIdents;

impl Folder for AvoidInterpolatedIdents {
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident) = **nt {
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
}
}
fold::noop_fold_tt(tt, self)
}

fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}

let input: Vec<_> =
input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect();
(*self)(ecx, span, &input)
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
_ => {}
}
// check at the beginning and the parser checks after each bump
p.check_unknown_macro_variable();
p.process_potential_macro_variable();
match name {
"item" => match panictry!(p.parse_item()) {
Some(i) => token::NtItem(i),
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
p.root_module_name = cx.current_expansion.module.mod_path.last()
.map(|id| id.name.as_str().to_string());

p.check_unknown_macro_variable();
p.process_potential_macro_variable();
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return Box::new(ParserAnyMacro {
Expand Down
13 changes: 8 additions & 5 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,11 +136,14 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => {
let span = Span { lo: start_sp.lo, ..end_sp };
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue
}
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
Some(kind) => {
let span = Span { lo: start_sp.lo, ..end_sp };
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue
}
_ => end_sp,
},
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
},
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
Expand Down
9 changes: 1 addition & 8 deletions src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use ast::Ident;
use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::quoted;
use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
use parse::token::{self, SubstNt, Token, NtTT};
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{TokenStream, TokenTree, Delimited};
use util::small_vector::SmallVector;
Expand Down Expand Up @@ -154,13 +154,6 @@ pub fn transcribe(sp_diag: &Handler,
None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
match **nt {
// sidestep the interpolation tricks for ident because
// (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token.
NtIdent(ref sn) => {
let token = TokenTree::Token(sn.span, token::Ident(sn.node));
result.push(token.into());
}
NtTT(ref tt) => result.push(tt.clone().into()),
_ => {
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
Expand Down
4 changes: 1 addition & 3 deletions src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,9 +218,7 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream

/// Given stream and the ParseSess, produce a parser
pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
let mut p = Parser::new(sess, stream, None, false);
p.check_unknown_macro_variable();
p
Parser::new(sess, stream, None, false)
}

/// Parse a string representing a character literal into its final form.
Expand Down
56 changes: 35 additions & 21 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ pub struct Parser<'a> {
/// the span of the current token:
pub span: Span,
/// the span of the previous token:
pub meta_var_span: Option<Span>,
pub prev_span: Span,
/// the previous token kind
prev_token_kind: PrevTokenKind,
Expand Down Expand Up @@ -417,6 +418,7 @@ impl<'a> Parser<'a> {
token: token::Underscore,
span: syntax_pos::DUMMY_SP,
prev_span: syntax_pos::DUMMY_SP,
meta_var_span: None,
prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(),
obsolete_set: HashSet::new(),
Expand All @@ -443,6 +445,7 @@ impl<'a> Parser<'a> {
parser.directory.path = PathBuf::from(sess.codemap().span_to_filename(parser.span));
parser.directory.path.pop();
}
parser.process_potential_macro_variable();
parser
}

Expand Down Expand Up @@ -1012,7 +1015,7 @@ impl<'a> Parser<'a> {
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
}

self.prev_span = self.span;
self.prev_span = self.meta_var_span.take().unwrap_or(self.span);

// Record last token kind for possible error recovery.
self.prev_token_kind = match self.token {
Expand All @@ -1028,7 +1031,7 @@ impl<'a> Parser<'a> {
self.token = next.tok;
self.expected_tokens.clear();
// check after each token
self.check_unknown_macro_variable();
self.process_potential_macro_variable();
}

/// Advance the parser using provided token as a next one. Use this when
Expand Down Expand Up @@ -1722,7 +1725,7 @@ impl<'a> Parser<'a> {
pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> {
maybe_whole!(self, NtPath, |x| x);

let lo = self.span;
let lo = self.meta_var_span.unwrap_or(self.span);
let is_global = self.eat(&token::ModSep);

// Parse any number of segments and bound sets. A segment is an
Expand All @@ -1744,13 +1747,9 @@ impl<'a> Parser<'a> {
segments.insert(0, PathSegment::crate_root());
}

// Assemble the span.
// FIXME(#39450) This is bogus if part of the path is macro generated.
let span = lo.to(self.prev_span);

// Assemble the result.
Ok(ast::Path {
span: span,
span: lo.to(self.prev_span),
segments: segments,
})
}
Expand All @@ -1763,8 +1762,8 @@ impl<'a> Parser<'a> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let ident_span = self.span;
let identifier = self.parse_path_segment_ident()?;
let ident_span = self.prev_span;

if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) {
self.bump();
Expand Down Expand Up @@ -1831,8 +1830,8 @@ impl<'a> Parser<'a> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let ident_span = self.span;
let identifier = self.parse_path_segment_ident()?;
let ident_span = self.prev_span;

// If we do not see a `::`, stop.
if !self.eat(&token::ModSep) {
Expand Down Expand Up @@ -1873,10 +1872,11 @@ impl<'a> Parser<'a> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let ident_span = self.span;
let identifier = self.parse_path_segment_ident()?;

// Assemble and push the result.
segments.push(PathSegment::from_ident(identifier, self.prev_span));
segments.push(PathSegment::from_ident(identifier, ident_span));

// If we do not see a `::` or see `::{`/`::*`, stop.
if !self.check(&token::ModSep) || self.is_import_coupler() {
Expand All @@ -1896,8 +1896,9 @@ impl<'a> Parser<'a> {
fn expect_lifetime(&mut self) -> Lifetime {
match self.token {
token::Lifetime(ident) => {
let ident_span = self.span;
self.bump();
Lifetime { name: ident.name, span: self.prev_span, id: ast::DUMMY_NODE_ID }
Lifetime { name: ident.name, span: ident_span, id: ast::DUMMY_NODE_ID }
}
_ => self.span_bug(self.span, "not a lifetime")
}
Expand Down Expand Up @@ -2568,10 +2569,23 @@ impl<'a> Parser<'a> {
return Ok(e);
}

pub fn check_unknown_macro_variable(&mut self) {
if let token::SubstNt(name) = self.token {
self.fatal(&format!("unknown macro variable `{}`", name)).emit()
}
pub fn process_potential_macro_variable(&mut self) {
let ident = match self.token {
token::SubstNt(name) => {
self.fatal(&format!("unknown macro variable `{}`", name)).emit();
return
}
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
match **nt {
token::NtIdent(ident) => ident,
_ => return,
}
}
_ => return,
};
self.token = token::Ident(ident.node);
self.span = ident.span;
}

/// parse a single token tree from the input.
Expand All @@ -2589,9 +2603,9 @@ impl<'a> Parser<'a> {
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
let token = mem::replace(&mut self.token, token::Underscore);
let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span);
self.bump();
TokenTree::Token(self.prev_span, token)
TokenTree::Token(span, token)
}
}
}
Expand Down Expand Up @@ -3489,9 +3503,9 @@ impl<'a> Parser<'a> {
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
-> PResult<'a, PatKind> {
let ident_span = self.span;
let ident = self.parse_ident()?;
let prev_span = self.prev_span;
let name = codemap::Spanned{span: prev_span, node: ident};
let name = codemap::Spanned{span: ident_span, node: ident};
let sub = if self.eat(&token::At) {
Some(self.parse_pat()?)
} else {
Expand Down Expand Up @@ -4364,7 +4378,7 @@ impl<'a> Parser<'a> {
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token {
// Preserve hygienic context.
token::Ident(ident) => { this.bump(); codemap::respan(this.prev_span, ident) }
token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) }
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
Expand Down
48 changes: 24 additions & 24 deletions src/libsyntax/parse/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,7 @@ impl Token {
ModSep => true, // global path
Pound => true, // expression attributes
Interpolated(ref nt) => match **nt {
NtExpr(..) => true,
NtBlock(..) => true,
NtPath(..) => true,
NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
_ => false,
},
_ => false,
Expand All @@ -236,8 +234,7 @@ impl Token {
Lt | BinOp(Shl) => true, // associated path
ModSep => true, // global path
Interpolated(ref nt) => match **nt {
NtTy(..) => true,
NtPath(..) => true,
NtIdent(..) | NtTy(..) | NtPath(..) => true,
_ => false,
},
_ => false,
Expand All @@ -252,14 +249,22 @@ impl Token {
}
}

/// Returns `true` if the token is an identifier.
pub fn is_ident(&self) -> bool {
pub fn ident(&self) -> Option<ast::Ident> {
match *self {
Ident(..) => true,
_ => false,
Ident(ident) => Some(ident),
Interpolated(ref nt) => match **nt {
NtIdent(ident) => Some(ident.node),
_ => None,
},
_ => None,
}
}

/// Returns `true` if the token is an identifier.
pub fn is_ident(&self) -> bool {
self.ident().is_some()
}

/// Returns `true` if the token is a documentation comment.
pub fn is_doc_comment(&self) -> bool {
match *self {
Expand Down Expand Up @@ -311,18 +316,15 @@ impl Token {

/// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
match *self {
Ident(id) => id.name == kw.name(),
_ => false,
}
self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false)
}

pub fn is_path_segment_keyword(&self) -> bool {
match *self {
Ident(id) => id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name(),
_ => false,
match self.ident() {
Some(id) => id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name(),
None => false,
}
}

Expand All @@ -333,18 +335,16 @@ impl Token {

/// Returns `true` if the token is a strict keyword.
pub fn is_strict_keyword(&self) -> bool {
match *self {
Ident(id) => id.name >= keywords::As.name() &&
id.name <= keywords::While.name(),
match self.ident() {
Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(),
_ => false,
}
}

/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_reserved_keyword(&self) -> bool {
match *self {
Ident(id) => id.name >= keywords::Abstract.name() &&
id.name <= keywords::Yield.name(),
match self.ident() {
Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(),
_ => false,
}
}
Expand Down

0 comments on commit 8fde04b

Please sign in to comment.