Skip to content

Commit e69b96b

Browse files
committed
Auto merge of #15466 - Veykril:prep-inline-fmt-args, r=Veykril
internal: Add offset param to token descending API The offset is unused for now as we can't map by spans yet but it will be required for #11260 to work once the token map has been changed to record spans
2 parents b147709 + 53b2924 commit e69b96b

21 files changed

+185
-143
lines changed

crates/hir-expand/src/lib.rs

+3-11
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ use either::Either;
3737
use syntax::{
3838
algo::{self, skip_trivia_token},
3939
ast::{self, AstNode, HasDocComments},
40-
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken,
40+
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
4141
};
4242

4343
use crate::{
@@ -642,6 +642,8 @@ impl ExpansionInfo {
642642
db: &dyn db::ExpandDatabase,
643643
item: Option<ast::Item>,
644644
token: InFile<&SyntaxToken>,
645+
// FIXME: use this for range mapping, so that we can resolve inline format args
646+
_relative_token_offset: Option<TextSize>,
645647
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
646648
assert_eq!(token.file_id, self.arg.file_id);
647649
let token_id_in_attr_input = if let Some(item) = item {
@@ -1051,16 +1053,6 @@ impl InFile<SyntaxToken> {
10511053
}
10521054
}
10531055
}
1054-
1055-
pub fn ancestors_with_macros(
1056-
self,
1057-
db: &dyn db::ExpandDatabase,
1058-
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
1059-
self.value.parent().into_iter().flat_map({
1060-
let file_id = self.file_id;
1061-
move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
1062-
})
1063-
}
10641056
}
10651057

10661058
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]

crates/hir/src/semantics.rs

+60-21
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
170170
self.imp.is_derive_annotated(item)
171171
}
172172

173+
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
174+
/// expansion. `token_to_map` should be a token from the `speculative args` node.
173175
pub fn speculative_expand(
174176
&self,
175177
actual_macro_call: &ast::MacroCall,
@@ -179,6 +181,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
179181
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
180182
}
181183

184+
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
185+
/// expansion. `token_to_map` should be a token from the `speculative args` node.
182186
pub fn speculative_expand_attr_macro(
183187
&self,
184188
actual_macro_call: &ast::Item,
@@ -201,14 +205,22 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
201205
)
202206
}
203207

204-
/// Descend the token into macrocalls to its first mapped counterpart.
205-
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
206-
self.imp.descend_into_macros_single(token)
208+
/// Descend the token into its macro call if it is part of one, returning the token in the
209+
/// expansion that it is associated with. If `offset` points into the token's range, it will
210+
/// be considered for the mapping in case of inline format args.
211+
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
212+
self.imp.descend_into_macros_single(token, offset)
207213
}
208214

209-
/// Descend the token into macrocalls to all its mapped counterparts.
210-
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
211-
self.imp.descend_into_macros(token)
215+
/// Descend the token into its macro call if it is part of one, returning the tokens in the
216+
/// expansion that it is associated with. If `offset` points into the token's range, it will
217+
/// be considered for the mapping in case of inline format args.
218+
pub fn descend_into_macros(
219+
&self,
220+
token: SyntaxToken,
221+
offset: TextSize,
222+
) -> SmallVec<[SyntaxToken; 1]> {
223+
self.imp.descend_into_macros(token, offset)
212224
}
213225

214226
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
@@ -217,12 +229,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
217229
pub fn descend_into_macros_with_same_text(
218230
&self,
219231
token: SyntaxToken,
232+
offset: TextSize,
220233
) -> SmallVec<[SyntaxToken; 1]> {
221-
self.imp.descend_into_macros_with_same_text(token)
234+
self.imp.descend_into_macros_with_same_text(token, offset)
222235
}
223236

224-
pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
225-
self.imp.descend_into_macros_with_kind_preference(token)
237+
pub fn descend_into_macros_with_kind_preference(
238+
&self,
239+
token: SyntaxToken,
240+
offset: TextSize,
241+
) -> SyntaxToken {
242+
self.imp.descend_into_macros_with_kind_preference(token, offset)
226243
}
227244

228245
/// Maps a node down by mapping its first and last token down.
@@ -665,7 +682,7 @@ impl<'db> SemanticsImpl<'db> {
665682
};
666683

667684
if first == last {
668-
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
685+
self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
669686
if let Some(node) = value.parent_ancestors().find_map(N::cast) {
670687
res.push(node)
671688
}
@@ -674,14 +691,15 @@ impl<'db> SemanticsImpl<'db> {
674691
} else {
675692
// Descend first and last token, then zip them to look for the node they belong to
676693
let mut scratch: SmallVec<[_; 1]> = smallvec![];
677-
self.descend_into_macros_impl(first, &mut |token| {
694+
self.descend_into_macros_impl(first, 0.into(), &mut |token| {
678695
scratch.push(token);
679696
false
680697
});
681698

682699
let mut scratch = scratch.into_iter();
683700
self.descend_into_macros_impl(
684701
last,
702+
0.into(),
685703
&mut |InFile { value: last, file_id: last_fid }| {
686704
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
687705
if first_fid == last_fid {
@@ -705,19 +723,27 @@ impl<'db> SemanticsImpl<'db> {
705723
res
706724
}
707725

708-
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
726+
fn descend_into_macros(
727+
&self,
728+
token: SyntaxToken,
729+
offset: TextSize,
730+
) -> SmallVec<[SyntaxToken; 1]> {
709731
let mut res = smallvec![];
710-
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
732+
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
711733
res.push(value);
712734
false
713735
});
714736
res
715737
}
716738

717-
fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
739+
fn descend_into_macros_with_same_text(
740+
&self,
741+
token: SyntaxToken,
742+
offset: TextSize,
743+
) -> SmallVec<[SyntaxToken; 1]> {
718744
let text = token.text();
719745
let mut res = smallvec![];
720-
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
746+
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
721747
if value.text() == text {
722748
res.push(value);
723749
}
@@ -729,7 +755,11 @@ impl<'db> SemanticsImpl<'db> {
729755
res
730756
}
731757

732-
fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
758+
fn descend_into_macros_with_kind_preference(
759+
&self,
760+
token: SyntaxToken,
761+
offset: TextSize,
762+
) -> SyntaxToken {
733763
let fetch_kind = |token: &SyntaxToken| match token.parent() {
734764
Some(node) => match node.kind() {
735765
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
@@ -741,7 +771,7 @@ impl<'db> SemanticsImpl<'db> {
741771
};
742772
let preferred_kind = fetch_kind(&token);
743773
let mut res = None;
744-
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
774+
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
745775
if fetch_kind(&value) == preferred_kind {
746776
res = Some(value);
747777
true
@@ -755,9 +785,9 @@ impl<'db> SemanticsImpl<'db> {
755785
res.unwrap_or(token)
756786
}
757787

758-
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
788+
fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
759789
let mut res = token.clone();
760-
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
790+
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
761791
res = value;
762792
true
763793
});
@@ -767,9 +797,13 @@ impl<'db> SemanticsImpl<'db> {
767797
fn descend_into_macros_impl(
768798
&self,
769799
token: SyntaxToken,
800+
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
801+
// mapping, specifically for node downmapping
802+
offset: TextSize,
770803
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
771804
) {
772805
let _p = profile::span("descend_into_macros");
806+
let relative_token_offset = token.text_range().start().checked_sub(offset);
773807
let parent = match token.parent() {
774808
Some(it) => it,
775809
None => return,
@@ -796,7 +830,12 @@ impl<'db> SemanticsImpl<'db> {
796830
self.cache(value, file_id);
797831
}
798832

799-
let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
833+
let mapped_tokens = expansion_info.map_token_down(
834+
self.db.upcast(),
835+
item,
836+
token,
837+
relative_token_offset,
838+
)?;
800839
let len = stack.len();
801840

802841
// requeue the tokens we got from mapping our current token down
@@ -943,7 +982,7 @@ impl<'db> SemanticsImpl<'db> {
943982
offset: TextSize,
944983
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
945984
node.token_at_offset(offset)
946-
.map(move |token| self.descend_into_macros(token))
985+
.map(move |token| self.descend_into_macros(token, offset))
947986
.map(|descendants| {
948987
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
949988
})

crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ pub(crate) fn extract_expressions_from_format_string(
4848
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
4949

5050
let expanded_t = ast::String::cast(
51-
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone()),
51+
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
5252
)?;
5353
if !is_format_string(&expanded_t) {
5454
return None;

crates/ide-assists/src/handlers/extract_function.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -750,7 +750,7 @@ impl FunctionBody {
750750
.descendants_with_tokens()
751751
.filter_map(SyntaxElement::into_token)
752752
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
753-
.flat_map(|t| sema.descend_into_macros(t))
753+
.flat_map(|t| sema.descend_into_macros(t, 0.into()))
754754
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
755755
}
756756
}

crates/ide-db/src/helpers.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ pub fn get_definition(
117117
sema: &Semantics<'_, RootDatabase>,
118118
token: SyntaxToken,
119119
) -> Option<Definition> {
120-
for token in sema.descend_into_macros(token) {
120+
for token in sema.descend_into_macros(token, 0.into()) {
121121
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
122122
if let Some(&[x]) = def.as_deref() {
123123
return Some(x);

crates/ide-db/src/search.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -456,14 +456,14 @@ impl<'a> FindUsages<'a> {
456456
it.text().trim_start_matches("r#") == name
457457
})
458458
.into_iter()
459-
.flat_map(|token| {
459+
.flat_map(move |token| {
460460
// FIXME: There should be optimization potential here
461461
// Currently we try to descend everything we find which
462462
// means we call `Semantics::descend_into_macros` on
463463
// every textual hit. That function is notoriously
464464
// expensive even for things that do not get down mapped
465465
// into macros.
466-
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent())
466+
sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
467467
})
468468
};
469469

crates/ide/src/call_hierarchy.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -74,18 +74,20 @@ pub(crate) fn incoming_calls(
7474
Some(calls.into_items())
7575
}
7676

77-
pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
77+
pub(crate) fn outgoing_calls(
78+
db: &RootDatabase,
79+
FilePosition { file_id, offset }: FilePosition,
80+
) -> Option<Vec<CallItem>> {
7881
let sema = Semantics::new(db);
79-
let file_id = position.file_id;
8082
let file = sema.parse(file_id);
8183
let file = file.syntax();
82-
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
84+
let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
8385
IDENT => 1,
8486
_ => 0,
8587
})?;
8688
let mut calls = CallLocations::default();
8789

88-
sema.descend_into_macros(token)
90+
sema.descend_into_macros(token, offset)
8991
.into_iter()
9092
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
9193
.filter_map(|item| match item {

crates/ide/src/doc_links.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -131,19 +131,19 @@ pub(crate) fn remove_links(markdown: &str) -> String {
131131
// |===
132132
pub(crate) fn external_docs(
133133
db: &RootDatabase,
134-
position: &FilePosition,
134+
FilePosition { file_id, offset }: FilePosition,
135135
target_dir: Option<&OsStr>,
136136
sysroot: Option<&OsStr>,
137137
) -> Option<DocumentationLinks> {
138138
let sema = &Semantics::new(db);
139-
let file = sema.parse(position.file_id).syntax().clone();
140-
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
139+
let file = sema.parse(file_id).syntax().clone();
140+
let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
141141
IDENT | INT_NUMBER | T![self] => 3,
142142
T!['('] | T![')'] => 2,
143143
kind if kind.is_trivia() => 0,
144144
_ => 1,
145145
})?;
146-
let token = sema.descend_into_macros_single(token);
146+
let token = sema.descend_into_macros_single(token, offset);
147147

148148
let node = token.parent()?;
149149
let definition = match_ast! {
@@ -285,7 +285,7 @@ impl DocCommentToken {
285285
let original_start = doc_token.text_range().start();
286286
let relative_comment_offset = offset - original_start - prefix_len;
287287

288-
sema.descend_into_macros(doc_token).into_iter().find_map(|t| {
288+
sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
289289
let (node, descended_prefix_len) = match_ast! {
290290
match t {
291291
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),

crates/ide/src/expand_macro.rs

+26-21
Original file line numberDiff line numberDiff line change
@@ -40,28 +40,33 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
4040
// struct Bar;
4141
// ```
4242

43-
let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
44-
let hir_file = sema.hir_file_for(&descended.parent()?);
45-
if !hir_file.is_derive_attr_pseudo_expansion(db) {
46-
return None;
47-
}
43+
let derive =
44+
sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
45+
let hir_file = sema.hir_file_for(&descended.parent()?);
46+
if !hir_file.is_derive_attr_pseudo_expansion(db) {
47+
return None;
48+
}
4849

49-
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
50-
// up map out of the #[derive] expansion
51-
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
52-
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
53-
let expansions = sema.expand_derive_macro(&attr)?;
54-
let idx = attr
55-
.token_tree()?
56-
.token_trees_and_tokens()
57-
.filter_map(NodeOrToken::into_token)
58-
.take_while(|it| it != &token)
59-
.filter(|it| it.kind() == T![,])
60-
.count();
61-
let expansion =
62-
format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?);
63-
Some(ExpandedMacro { name, expansion })
64-
});
50+
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
51+
// up map out of the #[derive] expansion
52+
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
53+
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
54+
let expansions = sema.expand_derive_macro(&attr)?;
55+
let idx = attr
56+
.token_tree()?
57+
.token_trees_and_tokens()
58+
.filter_map(NodeOrToken::into_token)
59+
.take_while(|it| it != &token)
60+
.filter(|it| it.kind() == T![,])
61+
.count();
62+
let expansion = format(
63+
db,
64+
SyntaxKind::MACRO_ITEMS,
65+
position.file_id,
66+
expansions.get(idx).cloned()?,
67+
);
68+
Some(ExpandedMacro { name, expansion })
69+
});
6570

6671
if derive.is_some() {
6772
return derive;

0 commit comments

Comments
 (0)