Skip to content

Commit 82a15a6

Browse files
committed
Rollup merge of rust-lang#34385 - cgswords:tstream, r=nrc
syntax-[breaking-change] cc rust-lang#31645 (Only breaking because ast::TokenTree is now tokenstream::TokenTree.) This pull request refactors TokenTrees into their own file as src/libsyntax/tokenstream.rs, moving them out of src/libsyntax/ast.rs, in order to prepare for an accompanying TokenStream implementation (per RFC 1566).
2 parents d3ae56d + d59accf commit 82a15a6

34 files changed

+342
-287
lines changed

src/librustc/hir/mod.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,12 @@ use util::nodemap::{NodeMap, FnvHashSet};
3939
use syntax_pos::{mk_sp, Span, ExpnId};
4040
use syntax::codemap::{self, respan, Spanned};
4141
use syntax::abi::Abi;
42-
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, TokenTree, AsmDialect};
42+
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
4343
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
4444
use syntax::attr::{ThinAttributes, ThinAttributesExt};
4545
use syntax::parse::token::{keywords, InternedString};
4646
use syntax::ptr::P;
47+
use syntax::tokenstream::TokenTree;
4748

4849
use std::collections::BTreeMap;
4950
use std::fmt;

src/libsyntax/ast.rs

+1-191
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,10 @@ use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId};
2020
use codemap::{respan, Spanned};
2121
use abi::Abi;
2222
use errors;
23-
use ext::base;
24-
use ext::tt::macro_parser;
2523
use parse::token::{self, keywords, InternedString};
26-
use parse::lexer;
27-
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
2824
use print::pprust;
2925
use ptr::P;
26+
use tokenstream::{TokenTree};
3027

3128
use std::fmt;
3229
use std::rc::Rc;
@@ -1134,193 +1131,6 @@ pub enum CaptureBy {
11341131
Ref,
11351132
}
11361133

1137-
/// A delimited sequence of token trees
1138-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
1139-
pub struct Delimited {
1140-
/// The type of delimiter
1141-
pub delim: token::DelimToken,
1142-
/// The span covering the opening delimiter
1143-
pub open_span: Span,
1144-
/// The delimited sequence of token trees
1145-
pub tts: Vec<TokenTree>,
1146-
/// The span covering the closing delimiter
1147-
pub close_span: Span,
1148-
}
1149-
1150-
impl Delimited {
1151-
/// Returns the opening delimiter as a token.
1152-
pub fn open_token(&self) -> token::Token {
1153-
token::OpenDelim(self.delim)
1154-
}
1155-
1156-
/// Returns the closing delimiter as a token.
1157-
pub fn close_token(&self) -> token::Token {
1158-
token::CloseDelim(self.delim)
1159-
}
1160-
1161-
/// Returns the opening delimiter as a token tree.
1162-
pub fn open_tt(&self) -> TokenTree {
1163-
TokenTree::Token(self.open_span, self.open_token())
1164-
}
1165-
1166-
/// Returns the closing delimiter as a token tree.
1167-
pub fn close_tt(&self) -> TokenTree {
1168-
TokenTree::Token(self.close_span, self.close_token())
1169-
}
1170-
}
1171-
1172-
/// A sequence of token trees
1173-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
1174-
pub struct SequenceRepetition {
1175-
/// The sequence of token trees
1176-
pub tts: Vec<TokenTree>,
1177-
/// The optional separator
1178-
pub separator: Option<token::Token>,
1179-
/// Whether the sequence can be repeated zero (*), or one or more times (+)
1180-
pub op: KleeneOp,
1181-
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
1182-
pub num_captures: usize,
1183-
}
1184-
1185-
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
1186-
/// for token sequences.
1187-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
1188-
pub enum KleeneOp {
1189-
ZeroOrMore,
1190-
OneOrMore,
1191-
}
1192-
1193-
/// When the main rust parser encounters a syntax-extension invocation, it
1194-
/// parses the arguments to the invocation as a token-tree. This is a very
1195-
/// loose structure, such that all sorts of different AST-fragments can
1196-
/// be passed to syntax extensions using a uniform type.
1197-
///
1198-
/// If the syntax extension is an MBE macro, it will attempt to match its
1199-
/// LHS token tree against the provided token tree, and if it finds a
1200-
/// match, will transcribe the RHS token tree, splicing in any captured
1201-
/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
1202-
///
1203-
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
1204-
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
1205-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
1206-
pub enum TokenTree {
1207-
/// A single token
1208-
Token(Span, token::Token),
1209-
/// A delimited sequence of token trees
1210-
Delimited(Span, Rc<Delimited>),
1211-
1212-
// This only makes sense in MBE macros.
1213-
1214-
/// A kleene-style repetition sequence with a span
1215-
// FIXME(eddyb) #12938 Use DST.
1216-
Sequence(Span, Rc<SequenceRepetition>),
1217-
}
1218-
1219-
impl TokenTree {
1220-
pub fn len(&self) -> usize {
1221-
match *self {
1222-
TokenTree::Token(_, token::DocComment(name)) => {
1223-
match doc_comment_style(&name.as_str()) {
1224-
AttrStyle::Outer => 2,
1225-
AttrStyle::Inner => 3
1226-
}
1227-
}
1228-
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
1229-
TokenTree::Token(_, token::MatchNt(..)) => 3,
1230-
TokenTree::Delimited(_, ref delimed) => {
1231-
delimed.tts.len() + 2
1232-
}
1233-
TokenTree::Sequence(_, ref seq) => {
1234-
seq.tts.len()
1235-
}
1236-
TokenTree::Token(..) => 0
1237-
}
1238-
}
1239-
1240-
pub fn get_tt(&self, index: usize) -> TokenTree {
1241-
match (self, index) {
1242-
(&TokenTree::Token(sp, token::DocComment(_)), 0) => {
1243-
TokenTree::Token(sp, token::Pound)
1244-
}
1245-
(&TokenTree::Token(sp, token::DocComment(name)), 1)
1246-
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
1247-
TokenTree::Token(sp, token::Not)
1248-
}
1249-
(&TokenTree::Token(sp, token::DocComment(name)), _) => {
1250-
let stripped = strip_doc_comment_decoration(&name.as_str());
1251-
1252-
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
1253-
// required to wrap the text.
1254-
let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
1255-
*cnt = if x == '"' {
1256-
1
1257-
} else if *cnt != 0 && x == '#' {
1258-
*cnt + 1
1259-
} else {
1260-
0
1261-
};
1262-
Some(*cnt)
1263-
}).max().unwrap_or(0);
1264-
1265-
TokenTree::Delimited(sp, Rc::new(Delimited {
1266-
delim: token::Bracket,
1267-
open_span: sp,
1268-
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
1269-
TokenTree::Token(sp, token::Eq),
1270-
TokenTree::Token(sp, token::Literal(
1271-
token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
1272-
close_span: sp,
1273-
}))
1274-
}
1275-
(&TokenTree::Delimited(_, ref delimed), _) => {
1276-
if index == 0 {
1277-
return delimed.open_tt();
1278-
}
1279-
if index == delimed.tts.len() + 1 {
1280-
return delimed.close_tt();
1281-
}
1282-
delimed.tts[index - 1].clone()
1283-
}
1284-
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
1285-
let v = [TokenTree::Token(sp, token::Dollar),
1286-
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
1287-
v[index].clone()
1288-
}
1289-
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
1290-
let v = [TokenTree::Token(sp, token::SubstNt(name)),
1291-
TokenTree::Token(sp, token::Colon),
1292-
TokenTree::Token(sp, token::Ident(kind))];
1293-
v[index].clone()
1294-
}
1295-
(&TokenTree::Sequence(_, ref seq), _) => {
1296-
seq.tts[index].clone()
1297-
}
1298-
_ => panic!("Cannot expand a token tree")
1299-
}
1300-
}
1301-
1302-
/// Returns the `Span` corresponding to this token tree.
1303-
pub fn get_span(&self) -> Span {
1304-
match *self {
1305-
TokenTree::Token(span, _) => span,
1306-
TokenTree::Delimited(span, _) => span,
1307-
TokenTree::Sequence(span, _) => span,
1308-
}
1309-
}
1310-
1311-
/// Use this token tree as a matcher to parse given tts.
1312-
pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
1313-
-> macro_parser::NamedParseResult {
1314-
// `None` is because we're not interpolating
1315-
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
1316-
None,
1317-
None,
1318-
tts.iter().cloned().collect(),
1319-
true);
1320-
macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
1321-
}
1322-
}
1323-
13241134
pub type Mac = Spanned<Mac_>;
13251135

13261136
/// Represents a macro invocation. The Path indicates which macro

src/libsyntax/diagnostics/plugin.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,13 @@ use std::collections::BTreeMap;
1313
use std::env;
1414

1515
use ast;
16-
use ast::{Ident, Name, TokenTree};
16+
use ast::{Ident, Name};
1717
use syntax_pos::Span;
1818
use ext::base::{ExtCtxt, MacEager, MacResult};
1919
use ext::build::AstBuilder;
2020
use parse::token;
2121
use ptr::P;
22+
use tokenstream::{TokenTree};
2223
use util::small_vector::SmallVector;
2324

2425
use diagnostics::metadata::output_metadata;

src/libsyntax/ext/base.rs

+16-12
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ use fold::Folder;
3232
use std::collections::{HashMap, HashSet};
3333
use std::rc::Rc;
3434
use std::default::Default;
35+
use tokenstream;
3536

3637

3738
#[derive(Debug,Clone)]
@@ -168,20 +169,22 @@ pub trait TTMacroExpander {
168169
fn expand<'cx>(&self,
169170
ecx: &'cx mut ExtCtxt,
170171
span: Span,
171-
token_tree: &[ast::TokenTree])
172+
token_tree: &[tokenstream::TokenTree])
172173
-> Box<MacResult+'cx>;
173174
}
174175

175176
pub type MacroExpanderFn =
176-
for<'cx> fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>;
177+
for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
178+
-> Box<MacResult+'cx>;
177179

178180
impl<F> TTMacroExpander for F
179-
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>
181+
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
182+
-> Box<MacResult+'cx>
180183
{
181184
fn expand<'cx>(&self,
182185
ecx: &'cx mut ExtCtxt,
183186
span: Span,
184-
token_tree: &[ast::TokenTree])
187+
token_tree: &[tokenstream::TokenTree])
185188
-> Box<MacResult+'cx> {
186189
(*self)(ecx, span, token_tree)
187190
}
@@ -192,22 +195,23 @@ pub trait IdentMacroExpander {
192195
cx: &'cx mut ExtCtxt,
193196
sp: Span,
194197
ident: ast::Ident,
195-
token_tree: Vec<ast::TokenTree> )
198+
token_tree: Vec<tokenstream::TokenTree> )
196199
-> Box<MacResult+'cx>;
197200
}
198201

199202
pub type IdentMacroExpanderFn =
200-
for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<ast::TokenTree>) -> Box<MacResult+'cx>;
203+
for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>)
204+
-> Box<MacResult+'cx>;
201205

202206
impl<F> IdentMacroExpander for F
203207
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident,
204-
Vec<ast::TokenTree>) -> Box<MacResult+'cx>
208+
Vec<tokenstream::TokenTree>) -> Box<MacResult+'cx>
205209
{
206210
fn expand<'cx>(&self,
207211
cx: &'cx mut ExtCtxt,
208212
sp: Span,
209213
ident: ast::Ident,
210-
token_tree: Vec<ast::TokenTree> )
214+
token_tree: Vec<tokenstream::TokenTree> )
211215
-> Box<MacResult+'cx>
212216
{
213217
(*self)(cx, sp, ident, token_tree)
@@ -630,7 +634,7 @@ impl<'a> ExtCtxt<'a> {
630634
expand::MacroExpander::new(self)
631635
}
632636

633-
pub fn new_parser_from_tts(&self, tts: &[ast::TokenTree])
637+
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
634638
-> parser::Parser<'a> {
635639
parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg())
636640
}
@@ -829,7 +833,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
829833
/// done as rarely as possible).
830834
pub fn check_zero_tts(cx: &ExtCtxt,
831835
sp: Span,
832-
tts: &[ast::TokenTree],
836+
tts: &[tokenstream::TokenTree],
833837
name: &str) {
834838
if !tts.is_empty() {
835839
cx.span_err(sp, &format!("{} takes no arguments", name));
@@ -840,7 +844,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
840844
/// is not a string literal, emit an error and return None.
841845
pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
842846
sp: Span,
843-
tts: &[ast::TokenTree],
847+
tts: &[tokenstream::TokenTree],
844848
name: &str)
845849
-> Option<String> {
846850
let mut p = cx.new_parser_from_tts(tts);
@@ -861,7 +865,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
861865
/// parsing error, emit a non-fatal error and return None.
862866
pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
863867
sp: Span,
864-
tts: &[ast::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
868+
tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
865869
let mut p = cx.new_parser_from_tts(tts);
866870
let mut es = Vec::new();
867871
while p.token != token::Eof {

src/libsyntax/ext/expand.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
use ast::{Block, Crate, DeclKind, PatKind};
1212
use ast::{Local, Ident, Mac_, Name, SpannedIdent};
1313
use ast::{MacStmtStyle, Mrk, Stmt, StmtKind, ItemKind};
14-
use ast::TokenTree;
1514
use ast;
1615
use attr::HasAttrs;
1716
use ext::mtwt;
@@ -28,6 +27,7 @@ use fold::*;
2827
use util::move_map::MoveMap;
2928
use parse::token::{fresh_mark, fresh_name, intern, keywords};
3029
use ptr::P;
30+
use tokenstream::TokenTree;
3131
use util::small_vector::SmallVector;
3232
use visit;
3333
use visit::Visitor;

0 commit comments

Comments
 (0)