Skip to content

Commit 6b71fba

Browse files
authored
Rollup merge of rust-lang#61669 - petrochenkov:tokderef2, r=oli-obk
syntax: Remove `Deref` impl from `Token` Follow up to rust-lang#61541 r? @oli-obk
2 parents 18ca48d + 9aaa7c7 commit 6b71fba

File tree

16 files changed

+163
-231
lines changed

16 files changed

+163
-231
lines changed

src/librustdoc/html/highlight.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ impl<'a> Classifier<'a> {
257257
token::Question => Class::QuestionMark,
258258

259259
token::Dollar => {
260-
if self.lexer.peek().kind.is_ident() {
260+
if self.lexer.peek().is_ident() {
261261
self.in_macro_nonterminal = true;
262262
Class::MacroNonTerminal
263263
} else {

src/libsyntax/attr/mod.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned};
2020
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
2121
use crate::parse::parser::Parser;
2222
use crate::parse::{self, ParseSess, PResult};
23-
use crate::parse::token::{self, Token, TokenKind};
23+
use crate::parse::token::{self, Token};
2424
use crate::ptr::P;
2525
use crate::symbol::{sym, Symbol};
2626
use crate::ThinVec;
@@ -467,8 +467,7 @@ impl MetaItem {
467467
segment.ident.span.ctxt());
468468
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
469469
}
470-
idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
471-
segment.ident.span).into());
470+
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
472471
last_pos = segment.ident.span.hi();
473472
}
474473
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);

src/libsyntax/ext/tt/macro_parser.rs

+14-14
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ use crate::ast::{Ident, Name};
7878
use crate::ext::tt::quoted::{self, TokenTree};
7979
use crate::parse::{Directory, ParseSess};
8080
use crate::parse::parser::{Parser, PathStyle};
81-
use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
81+
use crate::parse::token::{self, DocComment, Nonterminal, Token};
8282
use crate::print::pprust;
8383
use crate::symbol::{kw, sym, Symbol};
8484
use crate::tokenstream::{DelimSpan, TokenStream};
@@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> {
199199
seq_op: Option<quoted::KleeneOp>,
200200

201201
/// The separator if we are in a repetition.
202-
sep: Option<TokenKind>,
202+
sep: Option<Token>,
203203

204204
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
205205
/// before we enter the sequence.
@@ -417,24 +417,24 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
417417

418418
/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
419419
/// other tokens, this is "unexpected token...".
420-
pub fn parse_failure_msg(tok: TokenKind) -> String {
421-
match tok {
420+
pub fn parse_failure_msg(tok: &Token) -> String {
421+
match tok.kind {
422422
token::Eof => "unexpected end of macro invocation".to_string(),
423423
_ => format!(
424424
"no rules expected the token `{}`",
425-
pprust::token_to_string(&tok)
425+
pprust::token_to_string(tok)
426426
),
427427
}
428428
}
429429

430430
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
431-
fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool {
432-
if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) {
433-
name1 == name2 && is_raw1 == is_raw2
434-
} else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) {
435-
name1 == name2
431+
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
432+
if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
433+
ident1.name == ident2.name && is_raw1 == is_raw2
434+
} else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
435+
ident1.name == ident2.name
436436
} else {
437-
*t1 == *t2
437+
t1.kind == t2.kind
438438
}
439439
}
440440

@@ -712,7 +712,7 @@ pub fn parse(
712712

713713
// If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
714714
// either the parse is ambiguous (which should never happen) or there is a syntax error.
715-
if token_name_eq(&parser.token, &token::Eof) {
715+
if parser.token == token::Eof {
716716
if eof_items.len() == 1 {
717717
let matches = eof_items[0]
718718
.matches
@@ -804,8 +804,8 @@ pub fn parse(
804804

805805
/// The token is an identifier, but not `_`.
806806
/// We prohibit passing `_` to macros expecting `ident` for now.
807-
fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
808-
match *token {
807+
fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
808+
match token.kind {
809809
token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
810810
_ => None,
811811
}

src/libsyntax/ext/tt/macro_rules.rs

+17-17
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use crate::symbol::{Symbol, kw, sym};
1717
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
1818

1919
use errors::FatalError;
20-
use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
20+
use syntax_pos::{Span, symbol::Ident};
2121
use log::debug;
2222

2323
use rustc_data_structures::fx::{FxHashMap};
@@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
200200

201201
let (token, label) = best_failure.expect("ran no matchers");
202202
let span = token.span.substitute_dummy(sp);
203-
let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind));
203+
let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
204204
err.span_label(span, label);
205205
if let Some(sp) = def_span {
206206
if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
@@ -266,17 +266,19 @@ pub fn compile(
266266
let argument_gram = vec![
267267
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
268268
tts: vec![
269-
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
270-
quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
271-
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
269+
quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, ast::Ident::from_str("tt")),
270+
quoted::TokenTree::token(token::FatArrow, def.span),
271+
quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, ast::Ident::from_str("tt")),
272272
],
273-
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
273+
separator: Some(Token::new(
274+
if body.legacy { token::Semi } else { token::Comma }, def.span
275+
)),
274276
op: quoted::KleeneOp::OneOrMore,
275277
num_captures: 2,
276278
})),
277279
// to phase into semicolon-termination instead of semicolon-separation
278280
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
279-
tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
281+
tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
280282
separator: None,
281283
op: quoted::KleeneOp::ZeroOrMore,
282284
num_captures: 0
@@ -286,7 +288,7 @@ pub fn compile(
286288
let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
287289
Success(m) => m,
288290
Failure(token, msg) => {
289-
let s = parse_failure_msg(token.kind);
291+
let s = parse_failure_msg(&token);
290292
let sp = token.span.substitute_dummy(def.span);
291293
let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
292294
err.span_label(sp, msg);
@@ -608,9 +610,8 @@ impl FirstSets {
608610
// If the sequence contents can be empty, then the first
609611
// token could be the separator token itself.
610612

611-
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
612-
subfirst.maybe_empty) {
613-
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
613+
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
614+
first.add_one_maybe(TokenTree::Token(sep.clone()));
614615
}
615616

616617
// Reverse scan: Sequence comes before `first`.
@@ -658,9 +659,8 @@ impl FirstSets {
658659
// If the sequence contents can be empty, then the first
659660
// token could be the separator token itself.
660661

661-
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
662-
subfirst.maybe_empty) {
663-
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
662+
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
663+
first.add_one_maybe(TokenTree::Token(sep.clone()));
664664
}
665665

666666
assert!(first.maybe_empty);
@@ -851,7 +851,7 @@ fn check_matcher_core(sess: &ParseSess,
851851
// against SUFFIX
852852
continue 'each_token;
853853
}
854-
TokenTree::Sequence(sp, ref seq_rep) => {
854+
TokenTree::Sequence(_, ref seq_rep) => {
855855
suffix_first = build_suffix_first();
856856
// The trick here: when we check the interior, we want
857857
// to include the separator (if any) as a potential
@@ -864,9 +864,9 @@ fn check_matcher_core(sess: &ParseSess,
864864
// work of cloning it? But then again, this way I may
865865
// get a "tighter" span?
866866
let mut new;
867-
let my_suffix = if let Some(ref u) = seq_rep.separator {
867+
let my_suffix = if let Some(sep) = &seq_rep.separator {
868868
new = suffix_first.clone();
869-
new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
869+
new.add_one_maybe(TokenTree::Token(sep.clone()));
870870
&new
871871
} else {
872872
&suffix_first

src/libsyntax/ext/tt/quoted.rs

+12-22
Original file line numberDiff line numberDiff line change
@@ -23,24 +23,14 @@ pub struct Delimited {
2323
}
2424

2525
impl Delimited {
26-
/// Returns the opening delimiter (possibly `NoDelim`).
27-
pub fn open_token(&self) -> TokenKind {
28-
token::OpenDelim(self.delim)
29-
}
30-
31-
/// Returns the closing delimiter (possibly `NoDelim`).
32-
pub fn close_token(&self) -> TokenKind {
33-
token::CloseDelim(self.delim)
34-
}
35-
3626
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
3727
pub fn open_tt(&self, span: Span) -> TokenTree {
3828
let open_span = if span.is_dummy() {
3929
span
4030
} else {
4131
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
4232
};
43-
TokenTree::token(self.open_token(), open_span)
33+
TokenTree::token(token::OpenDelim(self.delim), open_span)
4434
}
4535

4636
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +40,7 @@ impl Delimited {
5040
} else {
5141
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
5242
};
53-
TokenTree::token(self.close_token(), close_span)
43+
TokenTree::token(token::CloseDelim(self.delim), close_span)
5444
}
5545
}
5646

@@ -59,7 +49,7 @@ pub struct SequenceRepetition {
5949
/// The sequence of token trees
6050
pub tts: Vec<TokenTree>,
6151
/// The optional separator
62-
pub separator: Option<TokenKind>,
52+
pub separator: Option<Token>,
6353
/// Whether the sequence can be repeated zero (*), or one or more times (+)
6454
pub op: KleeneOp,
6555
/// The number of `Match`s that appear in the sequence (and subsequences)
@@ -282,7 +272,7 @@ where
282272
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
283273
// Must have `(` not `{` or `[`
284274
if delim != token::Paren {
285-
let tok = pprust::token_to_string(&token::OpenDelim(delim));
275+
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
286276
let msg = format!("expected `(`, found `{}`", tok);
287277
sess.span_diagnostic.span_err(span.entire(), &msg);
288278
}
@@ -371,8 +361,8 @@ where
371361

372362
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
373363
/// `None`.
374-
fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
375-
match *token {
364+
fn kleene_op(token: &Token) -> Option<KleeneOp> {
365+
match token.kind {
376366
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
377367
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
378368
token::Question => Some(KleeneOp::ZeroOrOne),
@@ -424,7 +414,7 @@ fn parse_sep_and_kleene_op<I>(
424414
attrs: &[ast::Attribute],
425415
edition: Edition,
426416
macro_node_id: NodeId,
427-
) -> (Option<TokenKind>, KleeneOp)
417+
) -> (Option<Token>, KleeneOp)
428418
where
429419
I: Iterator<Item = tokenstream::TokenTree>,
430420
{
@@ -449,7 +439,7 @@ fn parse_sep_and_kleene_op_2015<I>(
449439
_features: &Features,
450440
_attrs: &[ast::Attribute],
451441
macro_node_id: NodeId,
452-
) -> (Option<TokenKind>, KleeneOp)
442+
) -> (Option<Token>, KleeneOp)
453443
where
454444
I: Iterator<Item = tokenstream::TokenTree>,
455445
{
@@ -502,7 +492,7 @@ where
502492
a hard error in an upcoming edition",
503493
);
504494

505-
return (Some(token::Question), op);
495+
return (Some(Token::new(token::Question, op1_span)), op);
506496
}
507497

508498
// #2 is a random token (this is an error) :(
@@ -541,7 +531,7 @@ where
541531
}
542532

543533
// #2 is a KleeneOp :D
544-
Ok(Ok((op, _))) => return (Some(token.kind), op),
534+
Ok(Ok((op, _))) => return (Some(token), op),
545535

546536
// #2 is a random token :(
547537
Ok(Err(token)) => token.span,
@@ -567,7 +557,7 @@ fn parse_sep_and_kleene_op_2018<I>(
567557
sess: &ParseSess,
568558
_features: &Features,
569559
_attrs: &[ast::Attribute],
570-
) -> (Option<TokenKind>, KleeneOp)
560+
) -> (Option<Token>, KleeneOp)
571561
where
572562
I: Iterator<Item = tokenstream::TokenTree>,
573563
{
@@ -596,7 +586,7 @@ where
596586
}
597587

598588
// #2 is a KleeneOp :D
599-
Ok(Ok((op, _))) => return (Some(token.kind), op),
589+
Ok(Ok((op, _))) => return (Some(token), op),
600590

601591
// #2 is a random token :(
602592
Ok(Err(token)) => token.span,

src/libsyntax/ext/tt/transcribe.rs

+8-13
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,10 @@ use crate::ext::expand::Marker;
44
use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
55
use crate::ext::tt::quoted;
66
use crate::mut_visit::noop_visit_tt;
7-
use crate::parse::token::{self, NtTT, TokenKind};
7+
use crate::parse::token::{self, NtTT, Token};
88
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
99

1010
use smallvec::{smallvec, SmallVec};
11-
use syntax_pos::DUMMY_SP;
1211

1312
use rustc_data_structures::fx::FxHashMap;
1413
use rustc_data_structures::sync::Lrc;
@@ -18,7 +17,7 @@ use std::rc::Rc;
1817
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
1918
enum Frame {
2019
Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
21-
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<TokenKind> },
20+
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
2221
}
2322

2423
impl Frame {
@@ -109,17 +108,13 @@ pub fn transcribe(
109108
else {
110109
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
111110
// go back to the beginning of the sequence.
112-
if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
113-
let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
111+
if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
112+
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
114113
*repeat_idx += 1;
115-
if *repeat_idx < repeat_len {
114+
if repeat_idx < repeat_len {
116115
*idx = 0;
117-
if let Some(sep) = sep.clone() {
118-
let prev_span = match result.last() {
119-
Some((tt, _)) => tt.span(),
120-
None => DUMMY_SP,
121-
};
122-
result.push(TokenTree::token(sep, prev_span).into());
116+
if let Some(sep) = sep {
117+
result.push(TokenTree::Token(sep.clone()).into());
123118
}
124119
continue;
125120
}
@@ -242,7 +237,7 @@ pub fn transcribe(
242237
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
243238
sp = sp.apply_mark(cx.current_expansion.mark);
244239
result.push(TokenTree::token(token::Dollar, sp).into());
245-
result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into());
240+
result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
246241
}
247242
}
248243

src/libsyntax/parse/diagnostics.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
729729
&mut self,
730730
t: &TokenKind,
731731
) -> PResult<'a, bool /* recovered */> {
732-
let token_str = pprust::token_to_string(t);
732+
let token_str = pprust::token_kind_to_string(t);
733733
let this_token_str = self.this_token_descr();
734734
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
735735
// Point at the end of the macro call when reaching end of macro arguments.

0 commit comments

Comments
 (0)