@@ -20,13 +20,10 @@ use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId};
20
20
use codemap:: { respan, Spanned } ;
21
21
use abi:: Abi ;
22
22
use errors;
23
- use ext:: base;
24
- use ext:: tt:: macro_parser;
25
23
use parse:: token:: { self , keywords, InternedString } ;
26
- use parse:: lexer;
27
- use parse:: lexer:: comments:: { doc_comment_style, strip_doc_comment_decoration} ;
28
24
use print:: pprust;
29
25
use ptr:: P ;
26
+ use tokenstream:: { TokenTree } ;
30
27
31
28
use std:: fmt;
32
29
use std:: rc:: Rc ;
@@ -1134,193 +1131,6 @@ pub enum CaptureBy {
1134
1131
Ref ,
1135
1132
}
1136
1133
1137
- /// A delimited sequence of token trees
1138
- #[ derive( Clone , PartialEq , Eq , RustcEncodable , RustcDecodable , Hash , Debug ) ]
1139
- pub struct Delimited {
1140
- /// The type of delimiter
1141
- pub delim : token:: DelimToken ,
1142
- /// The span covering the opening delimiter
1143
- pub open_span : Span ,
1144
- /// The delimited sequence of token trees
1145
- pub tts : Vec < TokenTree > ,
1146
- /// The span covering the closing delimiter
1147
- pub close_span : Span ,
1148
- }
1149
-
1150
- impl Delimited {
1151
- /// Returns the opening delimiter as a token.
1152
- pub fn open_token ( & self ) -> token:: Token {
1153
- token:: OpenDelim ( self . delim )
1154
- }
1155
-
1156
- /// Returns the closing delimiter as a token.
1157
- pub fn close_token ( & self ) -> token:: Token {
1158
- token:: CloseDelim ( self . delim )
1159
- }
1160
-
1161
- /// Returns the opening delimiter as a token tree.
1162
- pub fn open_tt ( & self ) -> TokenTree {
1163
- TokenTree :: Token ( self . open_span , self . open_token ( ) )
1164
- }
1165
-
1166
- /// Returns the closing delimiter as a token tree.
1167
- pub fn close_tt ( & self ) -> TokenTree {
1168
- TokenTree :: Token ( self . close_span , self . close_token ( ) )
1169
- }
1170
- }
1171
-
1172
- /// A sequence of token trees
1173
- #[ derive( Clone , PartialEq , Eq , RustcEncodable , RustcDecodable , Hash , Debug ) ]
1174
- pub struct SequenceRepetition {
1175
- /// The sequence of token trees
1176
- pub tts : Vec < TokenTree > ,
1177
- /// The optional separator
1178
- pub separator : Option < token:: Token > ,
1179
- /// Whether the sequence can be repeated zero (*), or one or more times (+)
1180
- pub op : KleeneOp ,
1181
- /// The number of `MatchNt`s that appear in the sequence (and subsequences)
1182
- pub num_captures : usize ,
1183
- }
1184
-
1185
- /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
1186
- /// for token sequences.
1187
- #[ derive( Clone , PartialEq , Eq , RustcEncodable , RustcDecodable , Hash , Debug , Copy ) ]
1188
- pub enum KleeneOp {
1189
- ZeroOrMore ,
1190
- OneOrMore ,
1191
- }
1192
-
1193
- /// When the main rust parser encounters a syntax-extension invocation, it
1194
- /// parses the arguments to the invocation as a token-tree. This is a very
1195
- /// loose structure, such that all sorts of different AST-fragments can
1196
- /// be passed to syntax extensions using a uniform type.
1197
- ///
1198
- /// If the syntax extension is an MBE macro, it will attempt to match its
1199
- /// LHS token tree against the provided token tree, and if it finds a
1200
- /// match, will transcribe the RHS token tree, splicing in any captured
1201
- /// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
1202
- ///
1203
- /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
1204
- /// Nothing special happens to misnamed or misplaced `SubstNt`s.
1205
- #[ derive( Clone , PartialEq , Eq , RustcEncodable , RustcDecodable , Hash , Debug ) ]
1206
- pub enum TokenTree {
1207
- /// A single token
1208
- Token ( Span , token:: Token ) ,
1209
- /// A delimited sequence of token trees
1210
- Delimited ( Span , Rc < Delimited > ) ,
1211
-
1212
- // This only makes sense in MBE macros.
1213
-
1214
- /// A kleene-style repetition sequence with a span
1215
- // FIXME(eddyb) #12938 Use DST.
1216
- Sequence ( Span , Rc < SequenceRepetition > ) ,
1217
- }
1218
-
1219
- impl TokenTree {
1220
- pub fn len ( & self ) -> usize {
1221
- match * self {
1222
- TokenTree :: Token ( _, token:: DocComment ( name) ) => {
1223
- match doc_comment_style ( & name. as_str ( ) ) {
1224
- AttrStyle :: Outer => 2 ,
1225
- AttrStyle :: Inner => 3
1226
- }
1227
- }
1228
- TokenTree :: Token ( _, token:: SpecialVarNt ( ..) ) => 2 ,
1229
- TokenTree :: Token ( _, token:: MatchNt ( ..) ) => 3 ,
1230
- TokenTree :: Delimited ( _, ref delimed) => {
1231
- delimed. tts . len ( ) + 2
1232
- }
1233
- TokenTree :: Sequence ( _, ref seq) => {
1234
- seq. tts . len ( )
1235
- }
1236
- TokenTree :: Token ( ..) => 0
1237
- }
1238
- }
1239
-
1240
- pub fn get_tt ( & self , index : usize ) -> TokenTree {
1241
- match ( self , index) {
1242
- ( & TokenTree :: Token ( sp, token:: DocComment ( _) ) , 0 ) => {
1243
- TokenTree :: Token ( sp, token:: Pound )
1244
- }
1245
- ( & TokenTree :: Token ( sp, token:: DocComment ( name) ) , 1 )
1246
- if doc_comment_style ( & name. as_str ( ) ) == AttrStyle :: Inner => {
1247
- TokenTree :: Token ( sp, token:: Not )
1248
- }
1249
- ( & TokenTree :: Token ( sp, token:: DocComment ( name) ) , _) => {
1250
- let stripped = strip_doc_comment_decoration ( & name. as_str ( ) ) ;
1251
-
1252
- // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
1253
- // required to wrap the text.
1254
- let num_of_hashes = stripped. chars ( ) . scan ( 0 , |cnt, x| {
1255
- * cnt = if x == '"' {
1256
- 1
1257
- } else if * cnt != 0 && x == '#' {
1258
- * cnt + 1
1259
- } else {
1260
- 0
1261
- } ;
1262
- Some ( * cnt)
1263
- } ) . max ( ) . unwrap_or ( 0 ) ;
1264
-
1265
- TokenTree :: Delimited ( sp, Rc :: new ( Delimited {
1266
- delim : token:: Bracket ,
1267
- open_span : sp,
1268
- tts : vec ! [ TokenTree :: Token ( sp, token:: Ident ( token:: str_to_ident( "doc" ) ) ) ,
1269
- TokenTree :: Token ( sp, token:: Eq ) ,
1270
- TokenTree :: Token ( sp, token:: Literal (
1271
- token:: StrRaw ( token:: intern( & stripped) , num_of_hashes) , None ) ) ] ,
1272
- close_span : sp,
1273
- } ) )
1274
- }
1275
- ( & TokenTree :: Delimited ( _, ref delimed) , _) => {
1276
- if index == 0 {
1277
- return delimed. open_tt ( ) ;
1278
- }
1279
- if index == delimed. tts . len ( ) + 1 {
1280
- return delimed. close_tt ( ) ;
1281
- }
1282
- delimed. tts [ index - 1 ] . clone ( )
1283
- }
1284
- ( & TokenTree :: Token ( sp, token:: SpecialVarNt ( var) ) , _) => {
1285
- let v = [ TokenTree :: Token ( sp, token:: Dollar ) ,
1286
- TokenTree :: Token ( sp, token:: Ident ( token:: str_to_ident ( var. as_str ( ) ) ) ) ] ;
1287
- v[ index] . clone ( )
1288
- }
1289
- ( & TokenTree :: Token ( sp, token:: MatchNt ( name, kind) ) , _) => {
1290
- let v = [ TokenTree :: Token ( sp, token:: SubstNt ( name) ) ,
1291
- TokenTree :: Token ( sp, token:: Colon ) ,
1292
- TokenTree :: Token ( sp, token:: Ident ( kind) ) ] ;
1293
- v[ index] . clone ( )
1294
- }
1295
- ( & TokenTree :: Sequence ( _, ref seq) , _) => {
1296
- seq. tts [ index] . clone ( )
1297
- }
1298
- _ => panic ! ( "Cannot expand a token tree" )
1299
- }
1300
- }
1301
-
1302
- /// Returns the `Span` corresponding to this token tree.
1303
- pub fn get_span ( & self ) -> Span {
1304
- match * self {
1305
- TokenTree :: Token ( span, _) => span,
1306
- TokenTree :: Delimited ( span, _) => span,
1307
- TokenTree :: Sequence ( span, _) => span,
1308
- }
1309
- }
1310
-
1311
- /// Use this token tree as a matcher to parse given tts.
1312
- pub fn parse ( cx : & base:: ExtCtxt , mtch : & [ TokenTree ] , tts : & [ TokenTree ] )
1313
- -> macro_parser:: NamedParseResult {
1314
- // `None` is because we're not interpolating
1315
- let arg_rdr = lexer:: new_tt_reader_with_doc_flag ( & cx. parse_sess ( ) . span_diagnostic ,
1316
- None ,
1317
- None ,
1318
- tts. iter ( ) . cloned ( ) . collect ( ) ,
1319
- true ) ;
1320
- macro_parser:: parse ( cx. parse_sess ( ) , cx. cfg ( ) , arg_rdr, mtch)
1321
- }
1322
- }
1323
-
1324
1134
pub type Mac = Spanned < Mac_ > ;
1325
1135
1326
1136
/// Represents a macro invocation. The Path indicates which macro
0 commit comments