@@ -17,7 +17,7 @@ use crate::symbol::{Symbol, kw, sym};
17
17
use crate :: tokenstream:: { DelimSpan , TokenStream , TokenTree } ;
18
18
19
19
use errors:: FatalError ;
20
- use syntax_pos:: { Span , DUMMY_SP , symbol:: Ident } ;
20
+ use syntax_pos:: { Span , symbol:: Ident } ;
21
21
use log:: debug;
22
22
23
23
use rustc_data_structures:: fx:: { FxHashMap } ;
@@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
200
200
201
201
let ( token, label) = best_failure. expect ( "ran no matchers" ) ;
202
202
let span = token. span . substitute_dummy ( sp) ;
203
- let mut err = cx. struct_span_err ( span, & parse_failure_msg ( token. kind ) ) ;
203
+ let mut err = cx. struct_span_err ( span, & parse_failure_msg ( & token) ) ;
204
204
err. span_label ( span, label) ;
205
205
if let Some ( sp) = def_span {
206
206
if cx. source_map ( ) . span_to_filename ( sp) . is_real ( ) && !sp. is_dummy ( ) {
@@ -266,17 +266,19 @@ pub fn compile(
266
266
let argument_gram = vec ! [
267
267
quoted:: TokenTree :: Sequence ( DelimSpan :: dummy( ) , Lrc :: new( quoted:: SequenceRepetition {
268
268
tts: vec![
269
- quoted:: TokenTree :: MetaVarDecl ( DUMMY_SP , lhs_nm, ast:: Ident :: from_str( "tt" ) ) ,
270
- quoted:: TokenTree :: token( token:: FatArrow , DUMMY_SP ) ,
271
- quoted:: TokenTree :: MetaVarDecl ( DUMMY_SP , rhs_nm, ast:: Ident :: from_str( "tt" ) ) ,
269
+ quoted:: TokenTree :: MetaVarDecl ( def . span , lhs_nm, ast:: Ident :: from_str( "tt" ) ) ,
270
+ quoted:: TokenTree :: token( token:: FatArrow , def . span ) ,
271
+ quoted:: TokenTree :: MetaVarDecl ( def . span , rhs_nm, ast:: Ident :: from_str( "tt" ) ) ,
272
272
] ,
273
- separator: Some ( if body. legacy { token:: Semi } else { token:: Comma } ) ,
273
+ separator: Some ( Token :: new(
274
+ if body. legacy { token:: Semi } else { token:: Comma } , def. span
275
+ ) ) ,
274
276
op: quoted:: KleeneOp :: OneOrMore ,
275
277
num_captures: 2 ,
276
278
} ) ) ,
277
279
// to phase into semicolon-termination instead of semicolon-separation
278
280
quoted:: TokenTree :: Sequence ( DelimSpan :: dummy( ) , Lrc :: new( quoted:: SequenceRepetition {
279
- tts: vec![ quoted:: TokenTree :: token( token:: Semi , DUMMY_SP ) ] ,
281
+ tts: vec![ quoted:: TokenTree :: token( token:: Semi , def . span ) ] ,
280
282
separator: None ,
281
283
op: quoted:: KleeneOp :: ZeroOrMore ,
282
284
num_captures: 0
@@ -286,7 +288,7 @@ pub fn compile(
286
288
let argument_map = match parse ( sess, body. stream ( ) , & argument_gram, None , true ) {
287
289
Success ( m) => m,
288
290
Failure ( token, msg) => {
289
- let s = parse_failure_msg ( token. kind ) ;
291
+ let s = parse_failure_msg ( & token) ;
290
292
let sp = token. span . substitute_dummy ( def. span ) ;
291
293
let mut err = sess. span_diagnostic . struct_span_fatal ( sp, & s) ;
292
294
err. span_label ( sp, msg) ;
@@ -608,9 +610,8 @@ impl FirstSets {
608
610
// If the sequence contents can be empty, then the first
609
611
// token could be the separator token itself.
610
612
611
- if let ( Some ( ref sep) , true ) = ( seq_rep. separator . clone ( ) ,
612
- subfirst. maybe_empty ) {
613
- first. add_one_maybe ( TokenTree :: token ( sep. clone ( ) , sp. entire ( ) ) ) ;
613
+ if let ( Some ( sep) , true ) = ( & seq_rep. separator , subfirst. maybe_empty ) {
614
+ first. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
614
615
}
615
616
616
617
// Reverse scan: Sequence comes before `first`.
@@ -658,9 +659,8 @@ impl FirstSets {
658
659
// If the sequence contents can be empty, then the first
659
660
// token could be the separator token itself.
660
661
661
- if let ( Some ( ref sep) , true ) = ( seq_rep. separator . clone ( ) ,
662
- subfirst. maybe_empty ) {
663
- first. add_one_maybe ( TokenTree :: token ( sep. clone ( ) , sp. entire ( ) ) ) ;
662
+ if let ( Some ( sep) , true ) = ( & seq_rep. separator , subfirst. maybe_empty ) {
663
+ first. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
664
664
}
665
665
666
666
assert ! ( first. maybe_empty) ;
@@ -851,7 +851,7 @@ fn check_matcher_core(sess: &ParseSess,
851
851
// against SUFFIX
852
852
continue ' each_token;
853
853
}
854
- TokenTree :: Sequence ( sp , ref seq_rep) => {
854
+ TokenTree :: Sequence ( _ , ref seq_rep) => {
855
855
suffix_first = build_suffix_first ( ) ;
856
856
// The trick here: when we check the interior, we want
857
857
// to include the separator (if any) as a potential
@@ -864,9 +864,9 @@ fn check_matcher_core(sess: &ParseSess,
864
864
// work of cloning it? But then again, this way I may
865
865
// get a "tighter" span?
866
866
let mut new;
867
- let my_suffix = if let Some ( ref u ) = seq_rep. separator {
867
+ let my_suffix = if let Some ( sep ) = & seq_rep. separator {
868
868
new = suffix_first. clone ( ) ;
869
- new. add_one_maybe ( TokenTree :: token ( u . clone ( ) , sp . entire ( ) ) ) ;
869
+ new. add_one_maybe ( TokenTree :: Token ( sep . clone ( ) ) ) ;
870
870
& new
871
871
} else {
872
872
& suffix_first
0 commit comments