Skip to content

Commit a2c46b9

Browse files
committed
Auto merge of rust-lang#17601 - Veykril:proc-macro-fix, r=Veykril
Fix incorrect encoding of literals in the proc-macro-api on version 4 Quick follow up on rust-lang/rust-analyzer#17559 breaking things
2 parents c5fc669 + 311aaa5 commit a2c46b9

File tree

12 files changed

+183
-134
lines changed

12 files changed

+183
-134
lines changed

src/tools/rust-analyzer/Cargo.lock

+2-1
Original file line numberDiff line numberDiff line change
@@ -1046,7 +1046,6 @@ dependencies = [
10461046
"arrayvec",
10471047
"cov-mark",
10481048
"parser",
1049-
"ra-ap-rustc_lexer",
10501049
"rustc-hash",
10511050
"smallvec",
10521051
"span",
@@ -1326,6 +1325,7 @@ dependencies = [
13261325
"base-db",
13271326
"indexmap",
13281327
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
1328+
"mbe",
13291329
"paths",
13301330
"rustc-hash",
13311331
"serde",
@@ -2218,6 +2218,7 @@ name = "tt"
22182218
version = "0.0.0"
22192219
dependencies = [
22202220
"arrayvec",
2221+
"ra-ap-rustc_lexer",
22212222
"smol_str",
22222223
"stdx",
22232224
"text-size",

src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs

+4-3
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,10 @@ use base_db::CrateId;
55
use cfg::CfgExpr;
66
use either::Either;
77
use intern::{sym, Interned};
8+
89
use mbe::{
9-
desugar_doc_comment_text, syntax_node_to_token_tree, token_to_literal, DelimiterKind,
10-
DocCommentDesugarMode, Punct,
10+
desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
11+
Punct,
1112
};
1213
use smallvec::{smallvec, SmallVec};
1314
use span::{Span, SyntaxContextId};
@@ -20,7 +21,7 @@ use crate::{
2021
db::ExpandDatabase,
2122
mod_path::ModPath,
2223
span_map::SpanMapRef,
23-
tt::{self, Subtree},
24+
tt::{self, token_to_literal, Subtree},
2425
InFile,
2526
};
2627

src/tools/rust-analyzer/crates/hir-expand/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ pub use span::{HirFileId, MacroCallId, MacroFileId};
5959

6060
pub mod tt {
6161
pub use span::Span;
62-
pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing};
62+
pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing};
6363

6464
pub type Delimiter = ::tt::Delimiter<Span>;
6565
pub type DelimSpan = ::tt::DelimSpan<Span>;

src/tools/rust-analyzer/crates/mbe/Cargo.toml

+1-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ rustc-hash.workspace = true
1717
smallvec.workspace = true
1818
tracing.workspace = true
1919
arrayvec.workspace = true
20-
ra-ap-rustc_lexer.workspace = true
2120

2221
# local deps
2322
syntax.workspace = true
@@ -30,7 +29,7 @@ span.workspace = true
3029
test-utils.workspace = true
3130

3231
[features]
33-
in-rust-tree = ["parser/in-rust-tree", "syntax/in-rust-tree"]
32+
in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]
3433

3534
[lints]
3635
workspace = true

src/tools/rust-analyzer/crates/mbe/src/lib.rs

+1-8
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,6 @@
66
//! The tests for this functionality live in another crate:
77
//! `hir_def::macro_expansion_tests::mbe`.
88
9-
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
10-
11-
#[cfg(not(feature = "in-rust-tree"))]
12-
extern crate ra_ap_rustc_lexer as rustc_lexer;
13-
#[cfg(feature = "in-rust-tree")]
14-
extern crate rustc_lexer;
15-
169
mod expander;
1710
mod parser;
1811
mod syntax_bridge;
@@ -36,7 +29,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
3629
pub use crate::syntax_bridge::{
3730
desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
3831
parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
39-
token_to_literal, token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
32+
token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
4033
};
4134

4235
pub use crate::syntax_bridge::dummy_test_span_utils::*;

src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs

+2-51
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use std::fmt;
44

55
use rustc_hash::{FxHashMap, FxHashSet};
66
use span::{Edition, SpanAnchor, SpanData, SpanMap};
7-
use stdx::{format_to, itertools::Itertools, never, non_empty_vec::NonEmptyVec};
7+
use stdx::{format_to, never, non_empty_vec::NonEmptyVec};
88
use syntax::{
99
ast::{self, make::tokens::doc_comment},
1010
format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
@@ -14,6 +14,7 @@ use syntax::{
1414
use tt::{
1515
buffer::{Cursor, TokenBuffer},
1616
iter::TtIter,
17+
token_to_literal,
1718
};
1819

1920
use crate::to_parser_input::to_parser_input;
@@ -400,56 +401,6 @@ where
400401
}
401402
}
402403

403-
pub fn token_to_literal<S>(text: SmolStr, span: S) -> tt::Literal<S>
404-
where
405-
S: Copy,
406-
{
407-
use rustc_lexer::LiteralKind;
408-
409-
let token = rustc_lexer::tokenize(&text).next_tuple();
410-
let Some((rustc_lexer::Token {
411-
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
412-
..
413-
},)) = token
414-
else {
415-
return tt::Literal { span, text, kind: tt::LitKind::Err(()), suffix: None };
416-
};
417-
418-
let (kind, start_offset, end_offset) = match kind {
419-
LiteralKind::Int { .. } => (tt::LitKind::Integer, 0, 0),
420-
LiteralKind::Float { .. } => (tt::LitKind::Float, 0, 0),
421-
LiteralKind::Char { terminated } => (tt::LitKind::Char, 1, terminated as usize),
422-
LiteralKind::Byte { terminated } => (tt::LitKind::Byte, 2, terminated as usize),
423-
LiteralKind::Str { terminated } => (tt::LitKind::Str, 1, terminated as usize),
424-
LiteralKind::ByteStr { terminated } => (tt::LitKind::ByteStr, 2, terminated as usize),
425-
LiteralKind::CStr { terminated } => (tt::LitKind::CStr, 2, terminated as usize),
426-
LiteralKind::RawStr { n_hashes } => (
427-
tt::LitKind::StrRaw(n_hashes.unwrap_or_default()),
428-
2 + n_hashes.unwrap_or_default() as usize,
429-
1 + n_hashes.unwrap_or_default() as usize,
430-
),
431-
LiteralKind::RawByteStr { n_hashes } => (
432-
tt::LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
433-
3 + n_hashes.unwrap_or_default() as usize,
434-
1 + n_hashes.unwrap_or_default() as usize,
435-
),
436-
LiteralKind::RawCStr { n_hashes } => (
437-
tt::LitKind::CStrRaw(n_hashes.unwrap_or_default()),
438-
3 + n_hashes.unwrap_or_default() as usize,
439-
1 + n_hashes.unwrap_or_default() as usize,
440-
),
441-
};
442-
443-
let (lit, suffix) = text.split_at(suffix_start as usize);
444-
let lit = &lit[start_offset..lit.len() - end_offset];
445-
let suffix = match suffix {
446-
"" | "_" => None,
447-
suffix => Some(Box::new(suffix.into())),
448-
};
449-
450-
tt::Literal { span, text: lit.into(), kind, suffix }
451-
}
452-
453404
fn is_single_token_op(kind: SyntaxKind) -> bool {
454405
matches!(
455406
kind,

src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml

+2
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,8 @@ span.workspace = true
2828
# InternIds for the syntax context
2929
base-db.workspace = true
3030
la-arena.workspace = true
31+
# only here to parse via token_to_literal
32+
mbe.workspace = true
3133

3234
[lints]
3335
workspace = true

src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs

+28-25
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ mod tests {
197197
.into(),
198198
),
199199
TokenTree::Leaf(Leaf::Literal(Literal {
200-
text: "\"Foo\"".into(),
200+
text: "Foo".into(),
201201
span: Span {
202202
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
203203
anchor,
@@ -263,32 +263,35 @@ mod tests {
263263
#[test]
264264
fn test_proc_macro_rpc_works() {
265265
let tt = fixture_token_tree();
266-
let mut span_data_table = Default::default();
267-
let task = ExpandMacro {
268-
data: ExpandMacroData {
269-
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
270-
macro_name: Default::default(),
271-
attributes: None,
272-
has_global_spans: ExpnGlobals {
273-
serialize: true,
274-
def_site: 0,
275-
call_site: 0,
276-
mixed_site: 0,
266+
for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION {
267+
let mut span_data_table = Default::default();
268+
let task = ExpandMacro {
269+
data: ExpandMacroData {
270+
macro_body: FlatTree::new(&tt, v, &mut span_data_table),
271+
macro_name: Default::default(),
272+
attributes: None,
273+
has_global_spans: ExpnGlobals {
274+
serialize: true,
275+
def_site: 0,
276+
call_site: 0,
277+
mixed_site: 0,
278+
},
279+
span_data_table: Vec::new(),
277280
},
278-
span_data_table: Vec::new(),
279-
},
280-
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
281-
env: Default::default(),
282-
current_dir: Default::default(),
283-
};
281+
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
282+
env: Default::default(),
283+
current_dir: Default::default(),
284+
};
284285

285-
let json = serde_json::to_string(&task).unwrap();
286-
// println!("{}", json);
287-
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
286+
let json = serde_json::to_string(&task).unwrap();
287+
// println!("{}", json);
288+
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
288289

289-
assert_eq!(
290-
tt,
291-
back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
292-
);
290+
assert_eq!(
291+
tt,
292+
back.data.macro_body.to_subtree_resolved(v, &span_data_table),
293+
"version: {v}"
294+
);
295+
}
293296
}
294297
}

0 commit comments

Comments
 (0)