Skip to content

Commit d59accf

Browse files
cgswordscgswords
cgswords
authored andcommitted
Refactored tokentrees into their own files in preparation for tokenstreams. Modified tests to point to the new file now.
1 parent 5522e67 commit d59accf

34 files changed

+342
-287
lines changed

src/librustc/hir/mod.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,12 @@ use util::nodemap::{NodeMap, FnvHashSet};
3838

3939
use syntax::codemap::{self, mk_sp, respan, Span, Spanned, ExpnId};
4040
use syntax::abi::Abi;
41-
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, TokenTree, AsmDialect};
41+
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
4242
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
4343
use syntax::attr::{ThinAttributes, ThinAttributesExt};
4444
use syntax::parse::token::{keywords, InternedString};
4545
use syntax::ptr::P;
46+
use syntax::tokenstream::TokenTree;
4647

4748
use std::collections::BTreeMap;
4849
use std::fmt;

src/libsyntax/ast.rs

+1-191
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,10 @@ use attr::{ThinAttributes, HasAttrs};
1919
use codemap::{mk_sp, respan, Span, Spanned, DUMMY_SP, ExpnId};
2020
use abi::Abi;
2121
use errors;
22-
use ext::base;
23-
use ext::tt::macro_parser;
2422
use parse::token::{self, keywords, InternedString};
25-
use parse::lexer;
26-
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
2723
use print::pprust;
2824
use ptr::P;
25+
use tokenstream::{TokenTree};
2926

3027
use std::fmt;
3128
use std::rc::Rc;
@@ -1097,193 +1094,6 @@ pub enum CaptureBy {
10971094
Ref,
10981095
}
10991096

1100-
/// A delimited sequence of token trees
1101-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
1102-
pub struct Delimited {
1103-
/// The type of delimiter
1104-
pub delim: token::DelimToken,
1105-
/// The span covering the opening delimiter
1106-
pub open_span: Span,
1107-
/// The delimited sequence of token trees
1108-
pub tts: Vec<TokenTree>,
1109-
/// The span covering the closing delimiter
1110-
pub close_span: Span,
1111-
}
1112-
1113-
impl Delimited {
1114-
/// Returns the opening delimiter as a token.
1115-
pub fn open_token(&self) -> token::Token {
1116-
token::OpenDelim(self.delim)
1117-
}
1118-
1119-
/// Returns the closing delimiter as a token.
1120-
pub fn close_token(&self) -> token::Token {
1121-
token::CloseDelim(self.delim)
1122-
}
1123-
1124-
/// Returns the opening delimiter as a token tree.
1125-
pub fn open_tt(&self) -> TokenTree {
1126-
TokenTree::Token(self.open_span, self.open_token())
1127-
}
1128-
1129-
/// Returns the closing delimiter as a token tree.
1130-
pub fn close_tt(&self) -> TokenTree {
1131-
TokenTree::Token(self.close_span, self.close_token())
1132-
}
1133-
}
1134-
1135-
/// A sequence of token trees
1136-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
1137-
pub struct SequenceRepetition {
1138-
/// The sequence of token trees
1139-
pub tts: Vec<TokenTree>,
1140-
/// The optional separator
1141-
pub separator: Option<token::Token>,
1142-
/// Whether the sequence can be repeated zero (*), or one or more times (+)
1143-
pub op: KleeneOp,
1144-
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
1145-
pub num_captures: usize,
1146-
}
1147-
1148-
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
1149-
/// for token sequences.
1150-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
1151-
pub enum KleeneOp {
1152-
ZeroOrMore,
1153-
OneOrMore,
1154-
}
1155-
1156-
/// When the main rust parser encounters a syntax-extension invocation, it
1157-
/// parses the arguments to the invocation as a token-tree. This is a very
1158-
/// loose structure, such that all sorts of different AST-fragments can
1159-
/// be passed to syntax extensions using a uniform type.
1160-
///
1161-
/// If the syntax extension is an MBE macro, it will attempt to match its
1162-
/// LHS token tree against the provided token tree, and if it finds a
1163-
/// match, will transcribe the RHS token tree, splicing in any captured
1164-
/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
1165-
///
1166-
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
1167-
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
1168-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
1169-
pub enum TokenTree {
1170-
/// A single token
1171-
Token(Span, token::Token),
1172-
/// A delimited sequence of token trees
1173-
Delimited(Span, Rc<Delimited>),
1174-
1175-
// This only makes sense in MBE macros.
1176-
1177-
/// A kleene-style repetition sequence with a span
1178-
// FIXME(eddyb) #12938 Use DST.
1179-
Sequence(Span, Rc<SequenceRepetition>),
1180-
}
1181-
1182-
impl TokenTree {
1183-
pub fn len(&self) -> usize {
1184-
match *self {
1185-
TokenTree::Token(_, token::DocComment(name)) => {
1186-
match doc_comment_style(&name.as_str()) {
1187-
AttrStyle::Outer => 2,
1188-
AttrStyle::Inner => 3
1189-
}
1190-
}
1191-
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
1192-
TokenTree::Token(_, token::MatchNt(..)) => 3,
1193-
TokenTree::Delimited(_, ref delimed) => {
1194-
delimed.tts.len() + 2
1195-
}
1196-
TokenTree::Sequence(_, ref seq) => {
1197-
seq.tts.len()
1198-
}
1199-
TokenTree::Token(..) => 0
1200-
}
1201-
}
1202-
1203-
pub fn get_tt(&self, index: usize) -> TokenTree {
1204-
match (self, index) {
1205-
(&TokenTree::Token(sp, token::DocComment(_)), 0) => {
1206-
TokenTree::Token(sp, token::Pound)
1207-
}
1208-
(&TokenTree::Token(sp, token::DocComment(name)), 1)
1209-
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
1210-
TokenTree::Token(sp, token::Not)
1211-
}
1212-
(&TokenTree::Token(sp, token::DocComment(name)), _) => {
1213-
let stripped = strip_doc_comment_decoration(&name.as_str());
1214-
1215-
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
1216-
// required to wrap the text.
1217-
let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
1218-
*cnt = if x == '"' {
1219-
1
1220-
} else if *cnt != 0 && x == '#' {
1221-
*cnt + 1
1222-
} else {
1223-
0
1224-
};
1225-
Some(*cnt)
1226-
}).max().unwrap_or(0);
1227-
1228-
TokenTree::Delimited(sp, Rc::new(Delimited {
1229-
delim: token::Bracket,
1230-
open_span: sp,
1231-
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
1232-
TokenTree::Token(sp, token::Eq),
1233-
TokenTree::Token(sp, token::Literal(
1234-
token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
1235-
close_span: sp,
1236-
}))
1237-
}
1238-
(&TokenTree::Delimited(_, ref delimed), _) => {
1239-
if index == 0 {
1240-
return delimed.open_tt();
1241-
}
1242-
if index == delimed.tts.len() + 1 {
1243-
return delimed.close_tt();
1244-
}
1245-
delimed.tts[index - 1].clone()
1246-
}
1247-
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
1248-
let v = [TokenTree::Token(sp, token::Dollar),
1249-
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
1250-
v[index].clone()
1251-
}
1252-
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
1253-
let v = [TokenTree::Token(sp, token::SubstNt(name)),
1254-
TokenTree::Token(sp, token::Colon),
1255-
TokenTree::Token(sp, token::Ident(kind))];
1256-
v[index].clone()
1257-
}
1258-
(&TokenTree::Sequence(_, ref seq), _) => {
1259-
seq.tts[index].clone()
1260-
}
1261-
_ => panic!("Cannot expand a token tree")
1262-
}
1263-
}
1264-
1265-
/// Returns the `Span` corresponding to this token tree.
1266-
pub fn get_span(&self) -> Span {
1267-
match *self {
1268-
TokenTree::Token(span, _) => span,
1269-
TokenTree::Delimited(span, _) => span,
1270-
TokenTree::Sequence(span, _) => span,
1271-
}
1272-
}
1273-
1274-
/// Use this token tree as a matcher to parse given tts.
1275-
pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
1276-
-> macro_parser::NamedParseResult {
1277-
// `None` is because we're not interpolating
1278-
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
1279-
None,
1280-
None,
1281-
tts.iter().cloned().collect(),
1282-
true);
1283-
macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
1284-
}
1285-
}
1286-
12871097
pub type Mac = Spanned<Mac_>;
12881098

12891099
/// Represents a macro invocation. The Path indicates which macro

src/libsyntax/diagnostics/plugin.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,13 @@ use std::collections::BTreeMap;
1313
use std::env;
1414

1515
use ast;
16-
use ast::{Ident, Name, TokenTree};
16+
use ast::{Ident, Name};
1717
use codemap::Span;
1818
use ext::base::{ExtCtxt, MacEager, MacResult};
1919
use ext::build::AstBuilder;
2020
use parse::token;
2121
use ptr::P;
22+
use tokenstream::{TokenTree};
2223
use util::small_vector::SmallVector;
2324

2425
use diagnostics::metadata::output_metadata;

src/libsyntax/ext/base.rs

+16-12
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ use fold::Folder;
3131
use std::collections::{HashMap, HashSet};
3232
use std::rc::Rc;
3333
use std::default::Default;
34+
use tokenstream;
3435

3536

3637
#[derive(Debug,Clone)]
@@ -163,20 +164,22 @@ pub trait TTMacroExpander {
163164
fn expand<'cx>(&self,
164165
ecx: &'cx mut ExtCtxt,
165166
span: Span,
166-
token_tree: &[ast::TokenTree])
167+
token_tree: &[tokenstream::TokenTree])
167168
-> Box<MacResult+'cx>;
168169
}
169170

170171
pub type MacroExpanderFn =
171-
for<'cx> fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>;
172+
for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
173+
-> Box<MacResult+'cx>;
172174

173175
impl<F> TTMacroExpander for F
174-
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>
176+
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
177+
-> Box<MacResult+'cx>
175178
{
176179
fn expand<'cx>(&self,
177180
ecx: &'cx mut ExtCtxt,
178181
span: Span,
179-
token_tree: &[ast::TokenTree])
182+
token_tree: &[tokenstream::TokenTree])
180183
-> Box<MacResult+'cx> {
181184
(*self)(ecx, span, token_tree)
182185
}
@@ -187,22 +190,23 @@ pub trait IdentMacroExpander {
187190
cx: &'cx mut ExtCtxt,
188191
sp: Span,
189192
ident: ast::Ident,
190-
token_tree: Vec<ast::TokenTree> )
193+
token_tree: Vec<tokenstream::TokenTree> )
191194
-> Box<MacResult+'cx>;
192195
}
193196

194197
pub type IdentMacroExpanderFn =
195-
for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<ast::TokenTree>) -> Box<MacResult+'cx>;
198+
for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>)
199+
-> Box<MacResult+'cx>;
196200

197201
impl<F> IdentMacroExpander for F
198202
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident,
199-
Vec<ast::TokenTree>) -> Box<MacResult+'cx>
203+
Vec<tokenstream::TokenTree>) -> Box<MacResult+'cx>
200204
{
201205
fn expand<'cx>(&self,
202206
cx: &'cx mut ExtCtxt,
203207
sp: Span,
204208
ident: ast::Ident,
205-
token_tree: Vec<ast::TokenTree> )
209+
token_tree: Vec<tokenstream::TokenTree> )
206210
-> Box<MacResult+'cx>
207211
{
208212
(*self)(cx, sp, ident, token_tree)
@@ -607,7 +611,7 @@ impl<'a> ExtCtxt<'a> {
607611
expand::MacroExpander::new(self)
608612
}
609613

610-
pub fn new_parser_from_tts(&self, tts: &[ast::TokenTree])
614+
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
611615
-> parser::Parser<'a> {
612616
parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg())
613617
}
@@ -806,7 +810,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
806810
/// done as rarely as possible).
807811
pub fn check_zero_tts(cx: &ExtCtxt,
808812
sp: Span,
809-
tts: &[ast::TokenTree],
813+
tts: &[tokenstream::TokenTree],
810814
name: &str) {
811815
if !tts.is_empty() {
812816
cx.span_err(sp, &format!("{} takes no arguments", name));
@@ -817,7 +821,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
817821
/// is not a string literal, emit an error and return None.
818822
pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
819823
sp: Span,
820-
tts: &[ast::TokenTree],
824+
tts: &[tokenstream::TokenTree],
821825
name: &str)
822826
-> Option<String> {
823827
let mut p = cx.new_parser_from_tts(tts);
@@ -838,7 +842,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
838842
/// parsing error, emit a non-fatal error and return None.
839843
pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
840844
sp: Span,
841-
tts: &[ast::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
845+
tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
842846
let mut p = cx.new_parser_from_tts(tts);
843847
let mut es = Vec::new();
844848
while p.token != token::Eof {

src/libsyntax/ext/expand.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
use ast::{Block, Crate, DeclKind, PatKind};
1212
use ast::{Local, Ident, Mac_, Name, SpannedIdent};
1313
use ast::{MacStmtStyle, Mrk, Stmt, StmtKind, ItemKind};
14-
use ast::TokenTree;
1514
use ast;
1615
use ext::mtwt;
1716
use ext::build::AstBuilder;
@@ -27,6 +26,7 @@ use fold::*;
2726
use util::move_map::MoveMap;
2827
use parse::token::{fresh_mark, fresh_name, intern, keywords};
2928
use ptr::P;
29+
use tokenstream::TokenTree;
3030
use util::small_vector::SmallVector;
3131
use visit;
3232
use visit::Visitor;

0 commit comments

Comments
 (0)