Skip to content

Fix incorrect encoding of literals in the proc-macro-api on version 4 #17601

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 4 additions & 3 deletions crates/hir-expand/src/attrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@ use base_db::CrateId;
use cfg::CfgExpr;
use either::Either;
use intern::{sym, Interned};

use mbe::{
desugar_doc_comment_text, syntax_node_to_token_tree, token_to_literal, DelimiterKind,
DocCommentDesugarMode, Punct,
desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
Punct,
};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId};
Expand All @@ -20,7 +21,7 @@ use crate::{
db::ExpandDatabase,
mod_path::ModPath,
span_map::SpanMapRef,
tt::{self, Subtree},
tt::{self, token_to_literal, Subtree},
InFile,
};

Expand Down
2 changes: 1 addition & 1 deletion crates/hir-expand/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ pub use span::{HirFileId, MacroCallId, MacroFileId};

pub mod tt {
pub use span::Span;
pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing};
pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing};

pub type Delimiter = ::tt::Delimiter<Span>;
pub type DelimSpan = ::tt::DelimSpan<Span>;
Expand Down
3 changes: 1 addition & 2 deletions crates/mbe/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ rustc-hash.workspace = true
smallvec.workspace = true
tracing.workspace = true
arrayvec.workspace = true
ra-ap-rustc_lexer.workspace = true

# local deps
syntax.workspace = true
Expand All @@ -30,7 +29,7 @@ span.workspace = true
test-utils.workspace = true

[features]
in-rust-tree = ["parser/in-rust-tree", "syntax/in-rust-tree"]
in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]

[lints]
workspace = true
9 changes: 1 addition & 8 deletions crates/mbe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,6 @@
//! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`.

#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]

#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;

mod expander;
mod parser;
mod syntax_bridge;
Expand All @@ -36,7 +29,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
pub use crate::syntax_bridge::{
desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
token_to_literal, token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
};

pub use crate::syntax_bridge::dummy_test_span_utils::*;
Expand Down
53 changes: 2 additions & 51 deletions crates/mbe/src/syntax_bridge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::fmt;

use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, SpanAnchor, SpanData, SpanMap};
use stdx::{format_to, itertools::Itertools, never, non_empty_vec::NonEmptyVec};
use stdx::{format_to, never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
Expand All @@ -14,6 +14,7 @@ use syntax::{
use tt::{
buffer::{Cursor, TokenBuffer},
iter::TtIter,
token_to_literal,
};

use crate::to_parser_input::to_parser_input;
Expand Down Expand Up @@ -400,56 +401,6 @@ where
}
}

pub fn token_to_literal<S>(text: SmolStr, span: S) -> tt::Literal<S>
where
S: Copy,
{
use rustc_lexer::LiteralKind;

let token = rustc_lexer::tokenize(&text).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
},)) = token
else {
return tt::Literal { span, text, kind: tt::LitKind::Err(()), suffix: None };
};

let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (tt::LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (tt::LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (tt::LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (tt::LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (tt::LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (tt::LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (tt::LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
tt::LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
tt::LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
tt::LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};

let (lit, suffix) = text.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Box::new(suffix.into())),
};

tt::Literal { span, text: lit.into(), kind, suffix }
}

fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
Expand Down
2 changes: 2 additions & 0 deletions crates/proc-macro-api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ span.workspace = true
# InternIds for the syntax context
base-db.workspace = true
la-arena.workspace = true
# only here to parse via token_to_literal
mbe.workspace = true

[lints]
workspace = true
53 changes: 28 additions & 25 deletions crates/proc-macro-api/src/msg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ mod tests {
.into(),
),
TokenTree::Leaf(Leaf::Literal(Literal {
text: "\"Foo\"".into(),
text: "Foo".into(),
span: Span {
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
anchor,
Expand Down Expand Up @@ -263,32 +263,35 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
let mut span_data_table = Default::default();
let task = ExpandMacro {
data: ExpandMacroData {
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
has_global_spans: ExpnGlobals {
serialize: true,
def_site: 0,
call_site: 0,
mixed_site: 0,
for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION {
let mut span_data_table = Default::default();
let task = ExpandMacro {
data: ExpandMacroData {
macro_body: FlatTree::new(&tt, v, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
has_global_spans: ExpnGlobals {
serialize: true,
def_site: 0,
call_site: 0,
mixed_site: 0,
},
span_data_table: Vec::new(),
},
span_data_table: Vec::new(),
},
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
env: Default::default(),
current_dir: Default::default(),
};
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
env: Default::default(),
current_dir: Default::default(),
};

let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();

assert_eq!(
tt,
back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
);
assert_eq!(
tt,
back.data.macro_body.to_subtree_resolved(v, &span_data_table),
"version: {v}"
);
}
}
}
Loading