Skip to content

Commit 546c052

Browse files
committed
syntax: Get rid of token::IdentStyle
1 parent 8dbf8f5 commit 546c052

File tree

16 files changed

+111
-183
lines changed

16 files changed

+111
-183
lines changed

src/librustdoc/html/highlight.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ fn write_source(sess: &parse::ParseSess,
147147
}
148148

149149
// keywords are also included in the identifier set
150-
token::Ident(ident, _is_mod_sep) => {
150+
token::Ident(ident) => {
151151
match &*ident.name.as_str() {
152152
"ref" | "mut" => "kw-2",
153153

src/libsyntax/ast.rs

+5-7
Original file line numberDiff line numberDiff line change
@@ -1206,8 +1206,7 @@ impl TokenTree {
12061206
TokenTree::Delimited(sp, Rc::new(Delimited {
12071207
delim: token::Bracket,
12081208
open_span: sp,
1209-
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"),
1210-
token::Plain)),
1209+
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
12111210
TokenTree::Token(sp, token::Eq),
12121211
TokenTree::Token(sp, token::Literal(
12131212
token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
@@ -1225,14 +1224,13 @@ impl TokenTree {
12251224
}
12261225
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
12271226
let v = [TokenTree::Token(sp, token::Dollar),
1228-
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()),
1229-
token::Plain))];
1227+
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
12301228
v[index].clone()
12311229
}
1232-
(&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
1233-
let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)),
1230+
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
1231+
let v = [TokenTree::Token(sp, token::SubstNt(name)),
12341232
TokenTree::Token(sp, token::Colon),
1235-
TokenTree::Token(sp, token::Ident(kind, kind_st))];
1233+
TokenTree::Token(sp, token::Ident(kind))];
12361234
v[index].clone()
12371235
}
12381236
(&TokenTree::Sequence(_, ref seq), _) => {

src/libsyntax/diagnostics/plugin.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
5454
token_tree: &[TokenTree])
5555
-> Box<MacResult+'cx> {
5656
let code = match (token_tree.len(), token_tree.get(0)) {
57-
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
57+
(1, Some(&TokenTree::Token(_, token::Ident(code)))) => code,
5858
_ => unreachable!()
5959
};
6060

@@ -92,10 +92,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
9292
token_tree.get(1),
9393
token_tree.get(2)
9494
) {
95-
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
95+
(1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => {
9696
(code, None)
9797
},
98-
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
98+
(3, Some(&TokenTree::Token(_, token::Ident(ref code))),
9999
Some(&TokenTree::Token(_, token::Comma)),
100100
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
101101
(code, Some(description))
@@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
160160
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
161161
(
162162
// Crate name.
163-
&TokenTree::Token(_, token::Ident(ref crate_name, _)),
163+
&TokenTree::Token(_, token::Ident(ref crate_name)),
164164
// DIAGNOSTICS ident.
165-
&TokenTree::Token(_, token::Ident(ref name, _))
165+
&TokenTree::Token(_, token::Ident(ref name))
166166
) => (*&crate_name, name),
167167
_ => unreachable!()
168168
};

src/libsyntax/ext/quote.rs

+6-19
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ pub mod rt {
7272

7373
impl ToTokens for ast::Ident {
7474
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
75-
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))]
75+
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
7676
}
7777
}
7878

@@ -646,14 +646,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
646646
cx.expr_usize(sp, n))
647647
}
648648

649-
token::Ident(ident, style) => {
649+
token::Ident(ident) => {
650650
return cx.expr_call(sp,
651651
mk_token_path(cx, sp, "Ident"),
652-
vec![mk_ident(cx, sp, ident),
653-
match style {
654-
ModName => mk_token_path(cx, sp, "ModName"),
655-
Plain => mk_token_path(cx, sp, "Plain"),
656-
}]);
652+
vec![mk_ident(cx, sp, ident)]);
657653
}
658654

659655
token::Lifetime(ident) => {
@@ -668,19 +664,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
668664
vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
669665
}
670666

671-
token::MatchNt(name, kind, namep, kindp) => {
667+
token::MatchNt(name, kind) => {
672668
return cx.expr_call(sp,
673669
mk_token_path(cx, sp, "MatchNt"),
674-
vec!(mk_ident(cx, sp, name),
675-
mk_ident(cx, sp, kind),
676-
match namep {
677-
ModName => mk_token_path(cx, sp, "ModName"),
678-
Plain => mk_token_path(cx, sp, "Plain"),
679-
},
680-
match kindp {
681-
ModName => mk_token_path(cx, sp, "ModName"),
682-
Plain => mk_token_path(cx, sp, "Plain"),
683-
}));
670+
vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
684671
}
685672

686673
token::Interpolated(_) => panic!("quote! with interpolated token"),
@@ -722,7 +709,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
722709

723710
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
724711
match *tt {
725-
TokenTree::Token(sp, SubstNt(ident, _)) => {
712+
TokenTree::Token(sp, SubstNt(ident)) => {
726713
// tt.extend($ident.to_tokens(ext_cx))
727714

728715
let e_to_toks =

src/libsyntax/ext/tt/macro_parser.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
216216
n_rec(p_s, next_m, res, ret_val, idx)?;
217217
}
218218
}
219-
TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => {
219+
TokenTree::Token(sp, MatchNt(bind_name, _)) => {
220220
match ret_val.entry(bind_name.name) {
221221
Vacant(spot) => {
222222
spot.insert(res[*idx].clone());
@@ -263,7 +263,7 @@ pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
263263
/// unhygienic comparison)
264264
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
265265
match (t1,t2) {
266-
(&token::Ident(id1,_),&token::Ident(id2,_))
266+
(&token::Ident(id1),&token::Ident(id2))
267267
| (&token::Lifetime(id1),&token::Lifetime(id2)) =>
268268
id1.name == id2.name,
269269
_ => *t1 == *t2
@@ -451,7 +451,7 @@ pub fn parse(sess: &ParseSess,
451451
if (!bb_eis.is_empty() && !next_eis.is_empty())
452452
|| bb_eis.len() > 1 {
453453
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
454-
TokenTree::Token(_, MatchNt(bind, name, _, _)) => {
454+
TokenTree::Token(_, MatchNt(bind, name)) => {
455455
format!("{} ('{}')", name, bind)
456456
}
457457
_ => panic!()
@@ -479,7 +479,7 @@ pub fn parse(sess: &ParseSess,
479479

480480
let mut ei = bb_eis.pop().unwrap();
481481
match ei.top_elts.get_tt(ei.idx) {
482-
TokenTree::Token(span, MatchNt(_, ident, _, _)) => {
482+
TokenTree::Token(span, MatchNt(_, ident)) => {
483483
let match_cur = ei.match_cur;
484484
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
485485
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
@@ -534,9 +534,9 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
534534
"ty" => token::NtTy(panictry!(p.parse_ty())),
535535
// this could be handled like a token, since it is one
536536
"ident" => match p.token {
537-
token::Ident(sn,b) => {
537+
token::Ident(sn) => {
538538
p.bump();
539-
token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}),b)
539+
token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
540540
}
541541
_ => {
542542
let token_str = pprust::token_to_string(&p.token);

src/libsyntax/ext/tt/macro_rules.rs

+10-12
Original file line numberDiff line numberDiff line change
@@ -244,8 +244,8 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
244244
// $( $lhs:tt => $rhs:tt );+
245245
// ...quasiquoting this would be nice.
246246
// These spans won't matter, anyways
247-
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
248-
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
247+
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt);
248+
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt);
249249
let argument_gram = vec!(
250250
TokenTree::Sequence(DUMMY_SP,
251251
Rc::new(ast::SequenceRepetition {
@@ -415,7 +415,7 @@ fn check_matcher_old<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token, on_fai
415415
let mut tokens = matcher.peekable();
416416
while let Some(token) = tokens.next() {
417417
last = match *token {
418-
TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
418+
TokenTree::Token(sp, MatchNt(ref name, ref frag_spec)) => {
419419
// ii. If T is a simple NT, look ahead to the next token T' in
420420
// M. If T' is in the set FOLLOW(NT), continue. Else; reject.
421421
if can_be_followed_by_any(&frag_spec.name.as_str()) {
@@ -881,7 +881,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
881881
// Now `last` holds the complete set of NT tokens that could
882882
// end the sequence before SUFFIX. Check that every one works with `suffix`.
883883
'each_last: for &(_sp, ref t) in &last.tokens {
884-
if let MatchNt(ref name, ref frag_spec, _, _) = *t {
884+
if let MatchNt(ref name, ref frag_spec) = *t {
885885
for &(sp, ref next_token) in &suffix_first.tokens {
886886
match is_in_follow(cx, next_token, &frag_spec.name.as_str()) {
887887
Err(msg) => {
@@ -917,9 +917,8 @@ fn check_matcher_core(cx: &mut ExtCtxt,
917917
last
918918
}
919919

920-
921920
fn token_can_be_followed_by_any(tok: &Token) -> bool {
922-
if let &MatchNt(_, ref frag_spec, _, _) = tok {
921+
if let &MatchNt(_, ref frag_spec) = tok {
923922
frag_can_be_followed_by_any(&frag_spec.name.as_str())
924923
} else {
925924
// (Non NT's can always be followed by anthing in matchers.)
@@ -1005,18 +1004,17 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
10051004
"pat" => {
10061005
match *tok {
10071006
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
1008-
Ident(i, _) if (i.name.as_str() == "if" ||
1009-
i.name.as_str() == "in") => Ok(true),
1007+
Ident(i) if (i.name.as_str() == "if" ||
1008+
i.name.as_str() == "in") => Ok(true),
10101009
_ => Ok(false)
10111010
}
10121011
},
10131012
"path" | "ty" => {
10141013
match *tok {
10151014
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
10161015
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
1017-
MatchNt(_, ref frag, _, _) if frag.name.as_str() == "block" => Ok(true),
1018-
Ident(i, _) if (i.name.as_str() == "as" ||
1019-
i.name.as_str() == "where") => Ok(true),
1016+
MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true),
1017+
Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true),
10201018
_ => Ok(false)
10211019
}
10221020
},
@@ -1036,7 +1034,7 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
10361034

10371035
fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
10381036
debug!("has_legal_fragment_specifier({:?})", tok);
1039-
if let &MatchNt(_, ref frag_spec, _, _) = tok {
1037+
if let &MatchNt(_, ref frag_spec) = tok {
10401038
let s = &frag_spec.name.as_str();
10411039
if !is_legal_fragment_specifier(s) {
10421040
return Err(s.to_string());

src/libsyntax/ext/tt/transcribe.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
161161
size + lockstep_iter_size(tt, r)
162162
})
163163
},
164-
TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) =>
164+
TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
165165
match lookup_cur_matched(r, name) {
166166
Some(matched) => match *matched {
167167
MatchedNonterminal(_) => LisUnconstrained,
@@ -186,7 +186,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
186186
None => (),
187187
Some(sp) => {
188188
r.cur_span = sp;
189-
r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
189+
r.cur_tok = token::Ident(r.imported_from.unwrap());
190190
return ret_val;
191191
},
192192
}
@@ -278,12 +278,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
278278
}
279279
}
280280
// FIXME #2887: think about span stuff here
281-
TokenTree::Token(sp, SubstNt(ident, namep)) => {
281+
TokenTree::Token(sp, SubstNt(ident)) => {
282282
r.stack.last_mut().unwrap().idx += 1;
283283
match lookup_cur_matched(r, ident) {
284284
None => {
285285
r.cur_span = sp;
286-
r.cur_tok = SubstNt(ident, namep);
286+
r.cur_tok = SubstNt(ident);
287287
return ret_val;
288288
// this can't be 0 length, just like TokenTree::Delimited
289289
}
@@ -292,9 +292,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
292292
// sidestep the interpolation tricks for ident because
293293
// (a) idents can be in lots of places, so it'd be a pain
294294
// (b) we actually can, since it's a token.
295-
MatchedNonterminal(NtIdent(ref sn, b)) => {
295+
MatchedNonterminal(NtIdent(ref sn)) => {
296296
r.cur_span = sn.span;
297-
r.cur_tok = token::Ident(sn.node, b);
297+
r.cur_tok = token::Ident(sn.node);
298298
return ret_val;
299299
}
300300
MatchedNonterminal(ref other_whole_nt) => {

src/libsyntax/fold.rs

+5-12
Original file line numberDiff line numberDiff line change
@@ -610,17 +610,11 @@ pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree
610610
// apply ident folder if it's an ident, apply other folds to interpolated nodes
611611
pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
612612
match t {
613-
token::Ident(id, followed_by_colons) => {
614-
token::Ident(fld.fold_ident(id), followed_by_colons)
615-
}
613+
token::Ident(id) => token::Ident(fld.fold_ident(id)),
616614
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
617615
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
618-
token::SubstNt(ident, namep) => {
619-
token::SubstNt(fld.fold_ident(ident), namep)
620-
}
621-
token::MatchNt(name, kind, namep, kindp) => {
622-
token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp)
623-
}
616+
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
617+
token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
624618
_ => t
625619
}
626620
}
@@ -664,9 +658,8 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
664658
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
665659
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
666660
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
667-
token::NtIdent(id, is_mod_name) =>
668-
token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), .. *id}),
669-
is_mod_name),
661+
token::NtIdent(id) =>
662+
token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
670663
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
671664
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
672665
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),

0 commit comments

Comments
 (0)