Skip to content

Commit 0a92053

Browse files
committed
Adjust a lot of FollowedBy values.
The previous commit was conservative in the `Spacing` to `FollowedBy` conversion. This commit makes many `FollowedBy` values more precise. In particular, it removes `TokenStream::token_fby_{space,punct}`, because for a token stream with a single token in it, `FollowedBy::Other` is the only value that makes sense.
1 parent a6942db commit 0a92053

File tree

14 files changed

+67
-77
lines changed

14 files changed

+67
-77
lines changed

compiler/rustc_ast/src/tokenstream.rs

+9-20
Original file line numberDiff line numberDiff line change
@@ -450,19 +450,8 @@ impl TokenStream {
450450
}
451451

452452
/// Create a token stream containing a single token with
453-
/// `FollowedBy::Space`.
454-
pub fn token_fby_space(kind: TokenKind, span: Span) -> TokenStream {
455-
TokenStream::new(vec![TokenTree::token_fby_space(kind, span)])
456-
}
457-
458-
/// Create a token stream containing a single token with
459-
/// `FollowedBy::Punct`.
460-
pub fn token_fby_punct(kind: TokenKind, span: Span) -> TokenStream {
461-
TokenStream::new(vec![TokenTree::token_fby_punct(kind, span)])
462-
}
463-
464-
/// Create a token stream containing a single token with
465-
/// `FollowedBy::Other`.
453+
/// `FollowedBy::Other`. This is the only `FollowedBy` value that makes
454+
/// sense for a token stream containing a single token.
466455
pub fn token_fby_other(kind: TokenKind, span: Span) -> TokenStream {
467456
TokenStream::new(vec![TokenTree::token_fby_other(kind, span)])
468457
}
@@ -490,16 +479,16 @@ impl TokenStream {
490479
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
491480
match nt {
492481
Nonterminal::NtIdent(ident, is_raw) => {
493-
TokenStream::token_fby_space(token::Ident(ident.name, *is_raw), ident.span)
482+
TokenStream::token_fby_other(token::Ident(ident.name, *is_raw), ident.span)
494483
}
495484
Nonterminal::NtLifetime(ident) => {
496-
TokenStream::token_fby_space(token::Lifetime(ident.name), ident.span)
485+
TokenStream::token_fby_other(token::Lifetime(ident.name), ident.span)
497486
}
498487
Nonterminal::NtItem(item) => TokenStream::from_ast(item),
499488
Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
500489
Nonterminal::NtStmt(stmt) if let StmtKind::Empty = stmt.kind => {
501490
// FIXME: Properly collect tokens for empty statements.
502-
TokenStream::token_fby_space(token::Semi, stmt.span)
491+
TokenStream::token_fby_other(token::Semi, stmt.span)
503492
}
504493
Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
505494
Nonterminal::NtPat(pat) => TokenStream::from_ast(pat),
@@ -664,7 +653,7 @@ impl TokenStream {
664653
[
665654
TokenTree::token_fby_space(token::Ident(sym::doc, false), span),
666655
TokenTree::token_fby_space(token::Eq, span),
667-
TokenTree::token_fby_space(
656+
TokenTree::token_fby_other(
668657
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
669658
span,
670659
),
@@ -675,12 +664,12 @@ impl TokenStream {
675664

676665
if attr_style == AttrStyle::Inner {
677666
vec![
678-
TokenTree::token_fby_space(token::Pound, span),
679-
TokenTree::token_fby_space(token::Not, span),
667+
TokenTree::token_fby_punct(token::Pound, span),
668+
TokenTree::token_fby_other(token::Not, span),
680669
body,
681670
]
682671
} else {
683-
vec![TokenTree::token_fby_space(token::Pound, span), body]
672+
vec![TokenTree::token_fby_other(token::Pound, span), body]
684673
}
685674
}
686675
}

compiler/rustc_builtin_macros/src/assert/context.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
151151
fn build_panic(&self, expr_str: &str, panic_path: Path) -> P<Expr> {
152152
let escaped_expr_str = escape_to_fmt(expr_str);
153153
let initial = [
154-
TokenTree::token_fby_space(
154+
TokenTree::token_fby_other(
155155
token::Literal(token::Lit {
156156
kind: token::LitKind::Str,
157157
symbol: Symbol::intern(&if self.fmt_string.is_empty() {
@@ -170,7 +170,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
170170
];
171171
let captures = self.capture_decls.iter().flat_map(|cap| {
172172
[
173-
TokenTree::token_fby_space(token::Ident(cap.ident.name, false), cap.ident.span),
173+
TokenTree::token_fby_other(token::Ident(cap.ident.name, false), cap.ident.span),
174174
TokenTree::token_fby_space(token::Comma, self.span),
175175
]
176176
});

compiler/rustc_expand/src/config.rs

+8-4
Original file line numberDiff line numberDiff line change
@@ -373,16 +373,20 @@ impl<'a> StripUnconfigured<'a> {
373373
};
374374
let pound_span = pound_token.span;
375375

376-
let mut trees = vec![AttrTokenTree::Token(pound_token, FollowedBy::Space)];
377-
if attr.style == AttrStyle::Inner {
376+
let mut trees = if attr.style == AttrStyle::Inner {
378377
// For inner attributes, we do the same thing for the `!` in `#![some_attr]`
379378
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
380379
orig_trees.next().unwrap().clone()
381380
else {
382381
panic!("Bad tokens for attribute {attr:?}");
383382
};
384-
trees.push(AttrTokenTree::Token(bang_token, FollowedBy::Space));
385-
}
383+
vec![
384+
AttrTokenTree::Token(pound_token, FollowedBy::Punct),
385+
AttrTokenTree::Token(bang_token, FollowedBy::Other),
386+
]
387+
} else {
388+
vec![AttrTokenTree::Token(pound_token, FollowedBy::Other)]
389+
};
386390
// We don't really have a good span to use for the synthesized `[]`
387391
// in `#[attr]`, so just use the span of the `#` token.
388392
let bracket_group = AttrTokenTree::Delimited(

compiler/rustc_expand/src/mbe/transcribe.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ pub(super) fn transcribe<'a>(
242242
// with modified syntax context. (I believe this supports nested macros).
243243
marker.visit_span(&mut sp);
244244
marker.visit_ident(&mut original_ident);
245-
result.push(TokenTree::token_fby_space(token::Dollar, sp));
245+
result.push(TokenTree::token_fby_other(token::Dollar, sp));
246246
result.push(TokenTree::Token(
247247
Token::from_ast_ident(original_ident),
248248
FollowedBy::Space,

compiler/rustc_expand/src/proc_macro.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ impl MultiItemModifier for DeriveProcMacro {
126126
Annotatable::Stmt(stmt) => token::NtStmt(stmt),
127127
_ => unreachable!(),
128128
};
129-
TokenStream::token_fby_space(token::Interpolated(Lrc::new(nt)), DUMMY_SP)
129+
TokenStream::token_fby_other(token::Interpolated(Lrc::new(nt)), DUMMY_SP)
130130
} else {
131131
item.to_tokens()
132132
};

compiler/rustc_expand/src/proc_macro_server.rs

+14-15
Original file line numberDiff line numberDiff line change
@@ -219,14 +219,13 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
219219
for ch in data.as_str().chars() {
220220
escaped.extend(ch.escape_debug());
221221
}
222-
let stream = [
223-
Ident(sym::doc, false),
224-
Eq,
225-
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
226-
]
227-
.into_iter()
228-
.map(|kind| tokenstream::TokenTree::token_fby_space(kind, span))
229-
.collect();
222+
let stream = tokenstream::TokenStream::from_iter([
223+
tokenstream::TokenTree::token_fby_space(Ident(sym::doc, false), span),
224+
tokenstream::TokenTree::token_fby_space(Eq, span),
225+
tokenstream::TokenTree::token_fby_other(
226+
TokenKind::lit(token::Str, Symbol::intern(&escaped), None), span
227+
),
228+
]);
230229
trees.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
231230
if attr_style == ast::AttrStyle::Inner {
232231
trees.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
@@ -339,7 +338,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
339338
let minus = BinOp(BinOpToken::Minus);
340339
let symbol = Symbol::intern(&symbol.as_str()[1..]);
341340
let integer = TokenKind::lit(token::Integer, symbol, suffix);
342-
let a = tokenstream::TokenTree::token_fby_space(minus, span);
341+
let a = tokenstream::TokenTree::token_fby_other(minus, span);
343342
let b = tokenstream::TokenTree::token_fby_space(integer, span);
344343
smallvec![a, b]
345344
}
@@ -352,7 +351,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
352351
let minus = BinOp(BinOpToken::Minus);
353352
let symbol = Symbol::intern(&symbol.as_str()[1..]);
354353
let float = TokenKind::lit(token::Float, symbol, suffix);
355-
let a = tokenstream::TokenTree::token_fby_space(minus, span);
354+
let a = tokenstream::TokenTree::token_fby_other(minus, span);
356355
let b = tokenstream::TokenTree::token_fby_space(float, span);
357356
smallvec![a, b]
358357
}
@@ -545,17 +544,17 @@ impl server::TokenStream for Rustc<'_, '_> {
545544
// be recovered in the general case.
546545
match &expr.kind {
547546
ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
548-
Ok(tokenstream::TokenStream::token_fby_space(
547+
Ok(tokenstream::TokenStream::token_fby_other(
549548
token::Ident(token_lit.symbol, false),
550549
expr.span,
551550
))
552551
}
553552
ast::ExprKind::Lit(token_lit) => {
554-
Ok(tokenstream::TokenStream::token_fby_space(token::Literal(*token_lit), expr.span))
553+
Ok(tokenstream::TokenStream::token_fby_other(token::Literal(*token_lit), expr.span))
555554
}
556555
ast::ExprKind::IncludedBytes(bytes) => {
557556
let lit = token::Lit::new(token::ByteStr, escape_byte_str_symbol(bytes), None);
558-
Ok(tokenstream::TokenStream::token_fby_space(
557+
Ok(tokenstream::TokenStream::token_fby_other(
559558
token::TokenKind::Literal(lit),
560559
expr.span,
561560
))
@@ -566,11 +565,11 @@ impl server::TokenStream for Rustc<'_, '_> {
566565
Ok(Self::TokenStream::from_iter([
567566
// FIXME: The span of the `-` token is lost when
568567
// parsing, so we cannot faithfully recover it here.
569-
tokenstream::TokenTree::token_fby_space(
568+
tokenstream::TokenTree::token_fby_other(
570569
token::BinOp(token::Minus),
571570
e.span,
572571
),
573-
tokenstream::TokenTree::token_fby_space(
572+
tokenstream::TokenTree::token_fby_other(
574573
token::Literal(*token_lit),
575574
e.span,
576575
),

compiler/rustc_expand/src/tokenstream/tests.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ fn test_is_empty() {
8787
create_default_session_globals_then(|| {
8888
let test0 = TokenStream::default();
8989
let test1 =
90-
TokenStream::token_fby_space(token::Ident(Symbol::intern("a"), false), sp(0, 1));
90+
TokenStream::token_fby_other(token::Ident(Symbol::intern("a"), false), sp(0, 1));
9191
let test2 = string_to_ts("foo(bar::baz)");
9292

9393
assert_eq!(test0.is_empty(), true);
@@ -102,7 +102,7 @@ fn test_dotdotdot() {
102102
let mut stream = TokenStream::default();
103103
stream.push_tree(TokenTree::token_fby_punct(token::Dot, sp(0, 1)));
104104
stream.push_tree(TokenTree::token_fby_punct(token::Dot, sp(1, 2)));
105-
stream.push_tree(TokenTree::token_fby_space(token::Dot, sp(2, 3)));
105+
stream.push_tree(TokenTree::token_fby_other(token::Dot, sp(2, 3)));
106106
assert!(stream.eq_unspanned(&string_to_ts("...")));
107107
assert_eq!(stream.trees().count(), 1);
108108
})

compiler/rustc_parse/src/parser/expr.rs

+1-3
Original file line numberDiff line numberDiff line change
@@ -1139,9 +1139,7 @@ impl<'a> Parser<'a> {
11391139
// 1.2 | 1.2e3
11401140
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
11411141
self.token = Token::new(token::Ident(symbol1, false), ident1_span);
1142-
// This needs to be `FollowedBy::Space` to prevent regressions.
1143-
// See issue #76399 and PR #76285 for more details
1144-
let next_token1 = (Token::new(token::Dot, dot_span), FollowedBy::Space);
1142+
let next_token1 = (Token::new(token::Dot, dot_span), FollowedBy::Other);
11451143
let base1 =
11461144
self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
11471145
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);

compiler/rustc_parse/src/parser/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ impl TokenCursor {
286286
// No close delimiter to return; continue on to the next iteration.
287287
} else {
288288
// We have exhausted the outermost token stream.
289-
return (Token::new(token::Eof, DUMMY_SP), FollowedBy::Space);
289+
return (Token::new(token::Eof, DUMMY_SP), FollowedBy::Other);
290290
}
291291
}
292292
}

tests/ui/proc-macro/auxiliary/expand-expr.rs

+6-15
Original file line numberDiff line numberDiff line change
@@ -99,21 +99,12 @@ pub fn expand_expr_is(input: TokenStream) -> TokenStream {
9999
let expected = expected_tts.into_iter().collect::<TokenStream>();
100100
let expanded = iter.collect::<TokenStream>().expand_expr().expect("expand_expr failed");
101101

102-
// FIXME: Temporarily broken. The next commit will fix it.
103-
//
104-
// + LL | simple_lit!(-3.14159);
105-
// + | --------------------- in this macro invocation
106-
// + |
107-
// + = help: message: assert failed
108-
// + expected: `-3.14159`
109-
// + expanded: `- 3.14159`
110-
//
111-
// assert!(
112-
// expected.to_string() == expanded.to_string(),
113-
// "assert failed\nexpected: `{}`\nexpanded: `{}`",
114-
// expected.to_string(),
115-
// expanded.to_string()
116-
// );
102+
assert!(
103+
expected.to_string() == expanded.to_string(),
104+
"assert failed\nexpected: `{}`\nexpanded: `{}`",
105+
expected.to_string(),
106+
expanded.to_string()
107+
);
117108

118109
// Also compare the raw tts to make sure they line up.
119110
assert_ts_eq(&expected, &expanded);

tests/ui/proc-macro/cfg-eval-inner.stdout

+9-9
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
PRINT-ATTR INPUT (DISPLAY): impl
22
Foo<[u8;
33
{
4-
#! [rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
5-
{ field: [u8; { #! [rustc_dummy(another_cursed_inner)] 1 }] } 0
6-
}] > { #! [rustc_dummy(evaluated_attr)] fn bar() {} }
4+
#![rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
5+
{ field: [u8; { #![rustc_dummy(another_cursed_inner)] 1 }] } 0
6+
}] > { #![rustc_dummy(evaluated_attr)] fn bar() {} }
77
PRINT-ATTR RE-COLLECTED (DISPLAY): impl Foo <
88
[u8;
99
{
10-
#! [rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
11-
{ field: [u8; { #! [rustc_dummy(another_cursed_inner)] 1 }] } 0
12-
}] > { #! [rustc_dummy(evaluated_attr)] fn bar() {} }
10+
#![rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
11+
{ field: [u8; { #![rustc_dummy(another_cursed_inner)] 1 }] } 0
12+
}] > { #![rustc_dummy(evaluated_attr)] fn bar() {} }
1313
PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): impl Foo <
1414
[u8 ;
1515
{
@@ -47,7 +47,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
4747
stream: TokenStream [
4848
Punct {
4949
ch: '#',
50-
spacing: Alone,
50+
spacing: Joint,
5151
span: $DIR/cfg-eval-inner.rs:19:5: 19:6 (#0),
5252
},
5353
Punct {
@@ -142,7 +142,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
142142
stream: TokenStream [
143143
Punct {
144144
ch: '#',
145-
spacing: Alone,
145+
spacing: Joint,
146146
span: $DIR/cfg-eval-inner.rs:23:13: 23:14 (#0),
147147
},
148148
Punct {
@@ -207,7 +207,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
207207
stream: TokenStream [
208208
Punct {
209209
ch: '#',
210-
spacing: Alone,
210+
spacing: Joint,
211211
span: $DIR/cfg-eval-inner.rs:32:5: 32:6 (#0),
212212
},
213213
Punct {

tests/ui/proc-macro/inner-attrs.stdout

+2-2
Original file line numberDiff line numberDiff line change
@@ -579,7 +579,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
579579
},
580580
]
581581
PRINT-DERIVE INPUT (DISPLAY): struct MyDerivePrint
582-
{ field: [u8; { match true { _ => { #! [rustc_dummy(third)] true } } ; 0 }] }
582+
{ field: [u8; { match true { _ => { #![rustc_dummy(third)] true } } ; 0 }] }
583583
PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct MyDerivePrint
584584
{
585585
field :
@@ -651,7 +651,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
651651
stream: TokenStream [
652652
Punct {
653653
ch: '#',
654-
spacing: Alone,
654+
spacing: Joint,
655655
span: $DIR/inner-attrs.rs:40:17: 40:18 (#0),
656656
},
657657
Punct {

tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
4646
stream: TokenStream [
4747
Punct {
4848
ch: '#',
49-
spacing: Alone,
49+
spacing: Joint,
5050
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
5151
},
5252
Punct {

tests/ui/proc-macro/macro-rules-derive-cfg.stdout

+10-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,13 @@
11
PRINT-DERIVE INPUT (DISPLAY): struct Foo
2+
{
3+
val :
4+
[bool ;
5+
{
6+
let a = #[rustc_dummy(first)] #[rustc_dummy(second)]
7+
{ #![allow(unused)] 30 } ; 0
8+
}]
9+
}
10+
PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct Foo
211
{
312
val :
413
[bool ;
@@ -111,7 +120,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
111120
stream: TokenStream [
112121
Punct {
113122
ch: '#',
114-
spacing: Alone,
123+
spacing: Joint,
115124
span: $DIR/macro-rules-derive-cfg.rs:27:5: 27:6 (#0),
116125
},
117126
Punct {

0 commit comments

Comments
 (0)