Skip to content

Commit

Permalink
Add ast::SequenceRepetition
Browse files Browse the repository at this point in the history
  • Loading branch information
pczarn committed Nov 7, 2014
1 parent 964191a commit 00676c8
Show file tree
Hide file tree
Showing 7 changed files with 160 additions and 93 deletions.
95 changes: 63 additions & 32 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -627,6 +627,19 @@ impl Delimited {
}
}

/// A sequence of token treesee
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec<TokenTree>,
/// The optional separator
pub separator: Option<token::Token>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
pub num_captures: uint,
}

/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
Expand Down Expand Up @@ -657,58 +670,76 @@ pub enum TokenTree {

// This only makes sense in MBE macros.

/// A kleene-style repetition sequence with a span, a TT forest,
/// an optional separator, and a boolean where true indicates
/// zero or more (..), and false indicates one or more (+).
/// The last member denotes the number of `MATCH_NONTERMINAL`s
/// in the forest.
// FIXME(eddyb) #12938 Use Rc<[TokenTree]> after DST.
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp, uint),
/// A kleene-style repetition sequence with a span
// FIXME(eddyb) #12938 Use DST.
TtSequence(Span, Rc<SequenceRepetition>),
}

impl TokenTree {
/// For unrolling some tokens or token trees into equivalent sequences.
pub fn expand_into_tts(self) -> Rc<Vec<TokenTree>> {
match self {
TtToken(sp, token::DocComment(name)) => {
pub fn len(&self) -> uint {
match *self {
TtToken(_, token::DocComment(_)) => 2,
TtToken(_, token::SubstNt(..)) => 2,
TtToken(_, token::MatchNt(..)) => 3,
TtDelimited(_, ref delimed) => {
delimed.tts.len() + 2
}
TtSequence(_, ref seq) => {
seq.tts.len()
}
TtToken(..) => 0
}
}

pub fn get_tt(&self, index: uint) -> TokenTree {
match (self, index) {
(&TtToken(sp, token::DocComment(_)), 0) => {
TtToken(sp, token::Pound)
}
(&TtToken(sp, token::DocComment(name)), 1) => {
let doc = MetaNameValue(token::intern_and_get_ident("doc"),
respan(sp, LitStr(token::get_name(name), CookedStr)));
let doc = token::NtMeta(P(respan(sp, doc)));
let delimed = Delimited {
TtDelimited(sp, Rc::new(Delimited {
delim: token::Bracket,
open_span: sp,
tts: vec![TtToken(sp, token::Interpolated(doc))],
close_span: sp,
};
Rc::new(vec![TtToken(sp, token::Pound),
TtDelimited(sp, Rc::new(delimed))])
}))
}
TtDelimited(_, ref delimed) => {
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
tts.push(delimed.open_tt());
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
tts.push(delimed.close_tt());
Rc::new(tts)
(&TtDelimited(_, ref delimed), _) => {
if index == 0 {
return delimed.open_tt();
}
if index == delimed.tts.len() + 1 {
return delimed.close_tt();
}
delimed.tts[index - 1].clone()
}
(&TtToken(sp, token::SubstNt(name, name_st)), _) => {
let v = [TtToken(sp, token::Dollar),
TtToken(sp, token::Ident(name, name_st))];
v[index]
}
TtToken(sp, token::SubstNt(name, namep)) => {
Rc::new(vec![TtToken(sp, token::Dollar),
TtToken(sp, token::Ident(name, namep))])
(&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
let v = [TtToken(sp, token::SubstNt(name, name_st)),
TtToken(sp, token::Colon),
TtToken(sp, token::Ident(kind, kind_st))];
v[index]
}
TtToken(sp, token::MatchNt(name, kind, namep, kindp)) => {
Rc::new(vec![TtToken(sp, token::SubstNt(name, namep)),
TtToken(sp, token::Colon),
TtToken(sp, token::Ident(kind, kindp))])
(&TtSequence(_, ref seq), _) => {
seq.tts[index].clone()
}
_ => panic!("Cannot expand a token")
_ => panic!("Cannot expand a token tree")
}
}

/// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span {
match *self {
TtToken(span, _) => span,
TtDelimited(span, _) => span,
TtSequence(span, _, _, _, _) => span,
TtToken(span, _) => span,
TtDelimited(span, _) => span,
TtSequence(span, _) => span,
}
}
}
Expand Down
72 changes: 46 additions & 26 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,17 +100,39 @@ use std::collections::hash_map::{Vacant, Occupied};
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
// a nonempty body.

#[deriving(Clone)]
enum TokenTreeOrTokenTreeVec {
Tt(ast::TokenTree),
TtSeq(Rc<Vec<ast::TokenTree>>),
}

impl TokenTreeOrTokenTreeVec {
fn len(&self) -> uint {
match self {
&TtSeq(ref v) => v.len(),
&Tt(ref tt) => tt.len(),
}
}

fn get_tt(&self, index: uint) -> TokenTree {
match self {
&TtSeq(ref v) => v[index].clone(),
&Tt(ref tt) => tt.get_tt(index),
}
}
}

/// an unzipping of `TokenTree`s
#[deriving(Clone)]
struct MatcherTtFrame {
elts: Rc<Vec<ast::TokenTree>>,
elts: TokenTreeOrTokenTreeVec,
idx: uint,
}

#[deriving(Clone)]
pub struct MatcherPos {
stack: Vec<MatcherTtFrame>,
elts: Rc<Vec<ast::TokenTree>>,
top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
idx: uint,
up: Option<Box<MatcherPos>>,
Expand All @@ -124,8 +146,8 @@ pub struct MatcherPos {
pub fn count_names(ms: &[TokenTree]) -> uint {
ms.iter().fold(0, |count, elt| {
count + match elt {
&TtSequence(_, _, _, _, advance_by) => {
advance_by
&TtSequence(_, ref seq) => {
seq.num_captures
}
&TtDelimited(_, ref delim) => {
count_names(delim.tts.as_slice())
Expand All @@ -144,7 +166,7 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
box MatcherPos {
stack: vec![],
elts: ms,
top_elts: TtSeq(ms),
sep: sep,
idx: 0u,
up: None,
Expand Down Expand Up @@ -183,8 +205,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
match m {
&TtSequence(_, ref more_ms, _, _, _) => {
for next_m in more_ms.iter() {
&TtSequence(_, ref seq) => {
for next_m in seq.tts.iter() {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
Expand Down Expand Up @@ -278,18 +300,18 @@ pub fn parse(sess: &ParseSess,
};

// When unzipped trees end, remove them
while ei.idx >= ei.elts.len() {
while ei.idx >= ei.top_elts.len() {
match ei.stack.pop() {
Some(MatcherTtFrame { elts, idx }) => {
ei.elts = elts;
ei.top_elts = elts;
ei.idx = idx + 1;
}
None => break
}
}

let idx = ei.idx;
let len = ei.elts.len();
let len = ei.top_elts.len();

/* at end of sequence */
if idx >= len {
Expand Down Expand Up @@ -352,17 +374,16 @@ pub fn parse(sess: &ParseSess,
eof_eis.push(ei);
}
} else {
match (*ei.elts)[idx].clone() {
match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
TtSequence(_, ref matchers, ref sep, kleene_op, match_num) => {
if kleene_op == ast::ZeroOrMore {
TtSequence(sp, seq) => {
if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += match_num;
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1u;
//we specifically matched zero repeats.
for idx in range(ei.match_cur, ei.match_cur + match_num) {
new_ei.matches[idx]
.push(Rc::new(MatchedSeq(Vec::new(), sp)));
for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
new_ei.matches[idx].push(Rc::new(MatchedSeq(Vec::new(), sp)));
}

cur_eis.push(new_ei);
Expand All @@ -372,15 +393,15 @@ pub fn parse(sess: &ParseSess,
let ei_t = ei;
cur_eis.push(box MatcherPos {
stack: vec![],
elts: matchers.clone(),
sep: (*sep).clone(),
sep: seq.separator.clone(),
idx: 0u,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
match_hi: ei_t.match_cur + match_num,
match_hi: ei_t.match_cur + seq.num_captures,
up: Some(ei_t),
sp_lo: sp.lo
sp_lo: sp.lo,
top_elts: Tt(TtSequence(sp, seq)),
});
}
TtToken(_, MatchNt(..)) => {
Expand All @@ -395,11 +416,10 @@ pub fn parse(sess: &ParseSess,
return Error(sp, "Cannot transcribe in macro LHS".into_string())
}
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
let tts = seq.expand_into_tts();
let elts = mem::replace(&mut ei.elts, tts);
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx;
ei.stack.push(MatcherTtFrame {
elts: elts,
elts: lower_elts,
idx: idx,
});
ei.idx = 0;
Expand Down Expand Up @@ -433,7 +453,7 @@ pub fn parse(sess: &ParseSess,
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|| bb_eis.len() > 1u {
let nts = bb_eis.iter().map(|ei| {
match (*ei.elts)[ei.idx] {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
(format!("{} ('{}')",
token::get_ident(name),
Expand All @@ -458,7 +478,7 @@ pub fn parse(sess: &ParseSess,
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());

let mut ei = bb_eis.pop().unwrap();
match (*ei.elts)[ei.idx] {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(_, name, _, _)) => {
let name_string = token::get_ident(name);
let match_cur = ei.match_cur;
Expand Down
26 changes: 15 additions & 11 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,20 +233,24 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
let argument_gram = vec!(
TtSequence(DUMMY_SP,
Rc::new(vec![
TtToken(DUMMY_SP, match_lhs),
TtToken(DUMMY_SP, token::FatArrow),
TtToken(DUMMY_SP, match_rhs)]),
Some(token::Semi),
ast::OneOrMore,
2),
Rc::new(ast::SequenceRepetition {
tts: vec![
TtToken(DUMMY_SP, match_lhs_tok),
TtToken(DUMMY_SP, token::FatArrow),
TtToken(DUMMY_SP, match_rhs_tok)],
separator: Some(token::Semi),
op: ast::OneOrMore,
num_captures: 2
})),
//to phase into semicolon-termination instead of
//semicolon-separation
TtSequence(DUMMY_SP,
Rc::new(vec![TtToken(DUMMY_SP, token::Semi)]),
None,
ast::ZeroOrMore,
0));
Rc::new(ast::SequenceRepetition {
tts: vec![TtToken(DUMMY_SP, token::Semi)],
separator: None,
op: ast::ZeroOrMore,
num_captures: 0
})));


// Parse the macro_rules! invocation (`none` is for no interpolations):
Expand Down
Loading

5 comments on commit 00676c8

@bors
Copy link
Contributor

@bors bors commented on 00676c8 Nov 7, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

saw approval from pnkfelix
at pczarn@00676c8

@bors
Copy link
Contributor

@bors bors commented on 00676c8 Nov 7, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

merging pczarn/rust/interp_tt = 00676c8 into auto

@bors
Copy link
Contributor

@bors bors commented on 00676c8 Nov 7, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pczarn/rust/interp_tt = 00676c8 merged ok, testing candidate = 0b48001

@bors
Copy link
Contributor

@bors bors commented on 00676c8 Nov 7, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@bors
Copy link
Contributor

@bors bors commented on 00676c8 Nov 7, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fast-forwarding master to auto = 0b48001

Please sign in to comment.