Skip to content

Commit 66870ca

Browse files
bors[bot]Veykril
andauthored
Merge #11384
11384: feat: Complete local fn and closure params from surrounding locals scope r=Veykril a=Veykril Co-authored-by: Lukas Wirth <[email protected]>
2 parents fd3942e + 6194092 commit 66870ca

File tree

5 files changed

+199
-88
lines changed

5 files changed

+199
-88
lines changed

crates/hir/src/semantics.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -389,8 +389,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
389389
self.imp.scope(node)
390390
}
391391

392-
pub fn scope_at_offset(&self, token: &SyntaxToken, offset: TextSize) -> SemanticsScope<'db> {
393-
self.imp.scope_at_offset(&token.parent().unwrap(), offset)
392+
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
393+
self.imp.scope_at_offset(&node, offset)
394394
}
395395

396396
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
Lines changed: 90 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
//! See [`complete_fn_param`].
22
3+
use hir::HirDisplay;
34
use rustc_hash::FxHashMap;
45
use syntax::{
6+
algo,
57
ast::{self, HasModuleItem},
6-
match_ast, AstNode, SyntaxKind,
8+
match_ast, AstNode, Direction, SyntaxKind,
79
};
810

911
use crate::{
@@ -15,14 +17,48 @@ use crate::{
1517
/// functions in a file have a `spam: &mut Spam` parameter, a completion with
1618
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
1719
/// suggested.
20+
///
21+
/// Also complete parameters for closure or local functions from the surrounding defined locals.
1822
pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
19-
let param_of_fn =
20-
matches!(ctx.pattern_ctx, Some(PatternContext { is_param: Some(ParamKind::Function), .. }));
23+
let (param_list, _, param_kind) = match &ctx.pattern_ctx {
24+
Some(PatternContext { param_ctx: Some(kind), .. }) => kind,
25+
_ => return None,
26+
};
27+
28+
let comma_wrapper = comma_wrapper(ctx);
29+
let mut add_new_item_to_acc = |label: &str, lookup: String| {
30+
let mk_item = |label: &str| {
31+
CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label)
32+
};
33+
let mut item = match &comma_wrapper {
34+
Some(fmt) => mk_item(&fmt(&label)),
35+
None => mk_item(label),
36+
};
37+
item.lookup_by(lookup);
38+
item.add_to(acc)
39+
};
2140

22-
if !param_of_fn {
23-
return None;
41+
match param_kind {
42+
ParamKind::Function(function) => {
43+
fill_fn_params(ctx, function, &param_list, add_new_item_to_acc);
44+
}
45+
ParamKind::Closure(closure) => {
46+
let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
47+
params_from_stmt_list_scope(ctx, stmt_list, |name, ty| {
48+
add_new_item_to_acc(&format!("{name}: {ty}"), name.to_string());
49+
});
50+
}
2451
}
2552

53+
Some(())
54+
}
55+
56+
fn fill_fn_params(
57+
ctx: &CompletionContext,
58+
function: &ast::Fn,
59+
param_list: &ast::ParamList,
60+
mut add_new_item_to_acc: impl FnMut(&str, String),
61+
) {
2662
let mut file_params = FxHashMap::default();
2763

2864
let mut extract_params = |f: ast::Fn| {
@@ -56,23 +92,46 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
5692
};
5793
}
5894

59-
let function = ctx.token.ancestors().find_map(ast::Fn::cast)?;
60-
let param_list = function.param_list()?;
95+
if let Some(stmt_list) = function.syntax().parent().and_then(ast::StmtList::cast) {
96+
params_from_stmt_list_scope(ctx, stmt_list, |name, ty| {
97+
file_params.entry(format!("{name}: {ty}")).or_insert(name.to_string());
98+
});
99+
}
61100

62101
remove_duplicated(&mut file_params, param_list.params());
63-
64102
let self_completion_items = ["self", "&self", "mut self", "&mut self"];
65103
if should_add_self_completions(ctx, param_list) {
66-
self_completion_items.into_iter().for_each(|self_item| {
67-
add_new_item_to_acc(ctx, acc, self_item.to_string(), self_item.to_string())
68-
});
104+
self_completion_items
105+
.into_iter()
106+
.for_each(|self_item| add_new_item_to_acc(self_item, self_item.to_string()));
69107
}
70108

71-
file_params.into_iter().try_for_each(|(whole_param, binding)| {
72-
Some(add_new_item_to_acc(ctx, acc, surround_with_commas(ctx, whole_param), binding))
73-
})?;
109+
file_params
110+
.into_iter()
111+
.for_each(|(whole_param, binding)| add_new_item_to_acc(&whole_param, binding));
112+
}
74113

75-
Some(())
114+
fn params_from_stmt_list_scope(
115+
ctx: &CompletionContext,
116+
stmt_list: ast::StmtList,
117+
mut cb: impl FnMut(hir::Name, String),
118+
) {
119+
let syntax_node = match stmt_list.syntax().last_child() {
120+
Some(it) => it,
121+
None => return,
122+
};
123+
let scope = ctx.sema.scope_at_offset(stmt_list.syntax(), syntax_node.text_range().end());
124+
let module = match scope.module() {
125+
Some(it) => it,
126+
None => return,
127+
};
128+
scope.process_all_names(&mut |name, def| {
129+
if let hir::ScopeDef::Local(local) = def {
130+
if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module.into()) {
131+
cb(name, ty);
132+
}
133+
}
134+
});
76135
}
77136

78137
fn remove_duplicated(
@@ -96,52 +155,32 @@ fn remove_duplicated(
96155
})
97156
}
98157

99-
fn should_add_self_completions(ctx: &CompletionContext, param_list: ast::ParamList) -> bool {
158+
fn should_add_self_completions(ctx: &CompletionContext, param_list: &ast::ParamList) -> bool {
100159
let inside_impl = ctx.impl_def.is_some();
101160
let no_params = param_list.params().next().is_none() && param_list.self_param().is_none();
102161

103162
inside_impl && no_params
104163
}
105164

106-
fn surround_with_commas(ctx: &CompletionContext, param: String) -> String {
107-
match fallible_surround_with_commas(ctx, &param) {
108-
Some(surrounded) => surrounded,
109-
// fallback to the original parameter
110-
None => param,
111-
}
112-
}
113-
114-
fn fallible_surround_with_commas(ctx: &CompletionContext, param: &str) -> Option<String> {
115-
let next_token = {
165+
fn comma_wrapper(ctx: &CompletionContext) -> Option<impl Fn(&str) -> String> {
166+
let next_token_kind = {
116167
let t = ctx.token.next_token()?;
117-
match t.kind() {
118-
SyntaxKind::WHITESPACE => t.next_token()?,
119-
_ => t,
120-
}
168+
let t = algo::skip_whitespace_token(t, Direction::Next)?;
169+
t.kind()
121170
};
122-
123-
let trailing_comma_missing = matches!(next_token.kind(), SyntaxKind::IDENT);
124-
let trailing = if trailing_comma_missing { "," } else { "" };
125-
126-
let previous_token = if matches!(ctx.token.kind(), SyntaxKind::IDENT | SyntaxKind::WHITESPACE) {
127-
ctx.previous_token.as_ref()?
128-
} else {
129-
&ctx.token
171+
let prev_token_kind = {
172+
let t = ctx.previous_token.clone()?;
173+
let t = algo::skip_whitespace_token(t, Direction::Prev)?;
174+
t.kind()
130175
};
131176

132-
let needs_leading = !matches!(previous_token.kind(), SyntaxKind::L_PAREN | SyntaxKind::COMMA);
133-
let leading = if needs_leading { ", " } else { "" };
177+
let has_trailing_comma =
178+
matches!(next_token_kind, SyntaxKind::COMMA | SyntaxKind::R_PAREN | SyntaxKind::PIPE);
179+
let trailing = if has_trailing_comma { "" } else { "," };
134180

135-
Some(format!("{}{}{}", leading, param, trailing))
136-
}
181+
let has_leading_comma =
182+
matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE);
183+
let leading = if has_leading_comma { "" } else { ", " };
137184

138-
fn add_new_item_to_acc(
139-
ctx: &CompletionContext,
140-
acc: &mut Completions,
141-
label: String,
142-
lookup: String,
143-
) {
144-
let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label);
145-
item.lookup_by(lookup);
146-
item.add_to(acc)
185+
Some(move |param: &_| format!("{}{}{}", leading, param, trailing))
147186
}

crates/ide_completion/src/context.rs

Lines changed: 56 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ use crate::{
2727
CompletionConfig,
2828
};
2929

30+
const COMPLETION_MARKER: &str = "intellijRulezz";
31+
3032
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
3133
pub(crate) enum PatternRefutability {
3234
Refutable,
@@ -68,7 +70,7 @@ pub(crate) struct PathCompletionContext {
6870
#[derive(Debug)]
6971
pub(super) struct PatternContext {
7072
pub(super) refutability: PatternRefutability,
71-
pub(super) is_param: Option<ParamKind>,
73+
pub(super) param_ctx: Option<(ast::ParamList, ast::Param, ParamKind)>,
7274
pub(super) has_type_ascription: bool,
7375
}
7476

@@ -80,10 +82,10 @@ pub(super) enum LifetimeContext {
8082
LabelDef,
8183
}
8284

83-
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
85+
#[derive(Clone, Debug, PartialEq, Eq)]
8486
pub(crate) enum ParamKind {
85-
Function,
86-
Closure,
87+
Function(ast::Fn),
88+
Closure(ast::ClosureExpr),
8789
}
8890

8991
/// `CompletionContext` is created early during completion to figure out, where
@@ -382,15 +384,15 @@ impl<'a> CompletionContext<'a> {
382384
// actual completion.
383385
let file_with_fake_ident = {
384386
let parse = db.parse(file_id);
385-
let edit = Indel::insert(offset, "intellijRulezz".to_string());
387+
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
386388
parse.reparse(&edit).tree()
387389
};
388390
let fake_ident_token =
389391
file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
390392

391393
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
392394
let token = sema.descend_into_macros_single(original_token.clone());
393-
let scope = sema.scope_at_offset(&token, offset);
395+
let scope = sema.scope_at_offset(&token.parent()?, offset);
394396
let krate = scope.krate();
395397
let mut locals = vec![];
396398
scope.process_all_names(&mut |name, scope| {
@@ -723,7 +725,7 @@ impl<'a> CompletionContext<'a> {
723725
}
724726
}
725727
ast::NameLike::Name(name) => {
726-
self.pattern_ctx = Self::classify_name(&self.sema, name);
728+
self.pattern_ctx = Self::classify_name(&self.sema, original_file, name);
727729
}
728730
}
729731
}
@@ -750,7 +752,11 @@ impl<'a> CompletionContext<'a> {
750752
})
751753
}
752754

753-
fn classify_name(_sema: &Semantics<RootDatabase>, name: ast::Name) -> Option<PatternContext> {
755+
fn classify_name(
756+
_sema: &Semantics<RootDatabase>,
757+
original_file: &SyntaxNode,
758+
name: ast::Name,
759+
) -> Option<PatternContext> {
754760
let bind_pat = name.syntax().parent().and_then(ast::IdentPat::cast)?;
755761
let is_name_in_field_pat = bind_pat
756762
.syntax()
@@ -763,7 +769,7 @@ impl<'a> CompletionContext<'a> {
763769
if !bind_pat.is_simple_ident() {
764770
return None;
765771
}
766-
Some(pattern_context_for(bind_pat.into()))
772+
Some(pattern_context_for(original_file, bind_pat.into()))
767773
}
768774

769775
fn classify_name_ref(
@@ -799,15 +805,15 @@ impl<'a> CompletionContext<'a> {
799805
},
800806
ast::TupleStructPat(it) => {
801807
path_ctx.has_call_parens = true;
802-
pat_ctx = Some(pattern_context_for(it.into()));
808+
pat_ctx = Some(pattern_context_for(original_file, it.into()));
803809
Some(PathKind::Pat)
804810
},
805811
ast::RecordPat(it) => {
806-
pat_ctx = Some(pattern_context_for(it.into()));
812+
pat_ctx = Some(pattern_context_for(original_file, it.into()));
807813
Some(PathKind::Pat)
808814
},
809815
ast::PathPat(it) => {
810-
pat_ctx = Some(pattern_context_for(it.into()));
816+
pat_ctx = Some(pattern_context_for(original_file, it.into()));
811817
Some(PathKind::Pat)
812818
},
813819
ast::MacroCall(it) => it.excl_token().and(Some(PathKind::Mac)),
@@ -824,12 +830,7 @@ impl<'a> CompletionContext<'a> {
824830
path_ctx.use_tree_parent = use_tree_parent;
825831
path_ctx.qualifier = path
826832
.segment()
827-
.and_then(|it| {
828-
find_node_with_range::<ast::PathSegment>(
829-
original_file,
830-
it.syntax().text_range(),
831-
)
832-
})
833+
.and_then(|it| find_node_in_file(original_file, &it))
833834
.map(|it| it.parent_path());
834835
return Some((path_ctx, pat_ctx));
835836
}
@@ -864,7 +865,7 @@ impl<'a> CompletionContext<'a> {
864865
}
865866
}
866867

867-
fn pattern_context_for(pat: ast::Pat) -> PatternContext {
868+
fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternContext {
868869
let mut is_param = None;
869870
let (refutability, has_type_ascription) =
870871
pat
@@ -877,18 +878,21 @@ fn pattern_context_for(pat: ast::Pat) -> PatternContext {
877878
match node {
878879
ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
879880
ast::Param(param) => {
880-
let is_closure_param = param
881-
.syntax()
882-
.ancestors()
883-
.nth(2)
884-
.and_then(ast::ClosureExpr::cast)
885-
.is_some();
886-
is_param = Some(if is_closure_param {
887-
ParamKind::Closure
888-
} else {
889-
ParamKind::Function
890-
});
891-
return (PatternRefutability::Irrefutable, param.ty().is_some())
881+
let has_type_ascription = param.ty().is_some();
882+
is_param = (|| {
883+
let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
884+
let param_list = find_node_in_file_compensated(original_file, &fake_param_list)?;
885+
let param_list_owner = param_list.syntax().parent()?;
886+
let kind = match_ast! {
887+
match param_list_owner {
888+
ast::ClosureExpr(closure) => ParamKind::Closure(closure),
889+
ast::Fn(fn_) => ParamKind::Function(fn_),
890+
_ => return None,
891+
}
892+
};
893+
Some((param_list, param, kind))
894+
})();
895+
return (PatternRefutability::Irrefutable, has_type_ascription)
892896
},
893897
ast::MatchArm(_) => PatternRefutability::Refutable,
894898
ast::Condition(_) => PatternRefutability::Refutable,
@@ -898,11 +902,29 @@ fn pattern_context_for(pat: ast::Pat) -> PatternContext {
898902
};
899903
(refutability, false)
900904
});
901-
PatternContext { refutability, is_param, has_type_ascription }
905+
PatternContext { refutability, param_ctx: is_param, has_type_ascription }
906+
}
907+
908+
fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
909+
let syntax_range = syntax.text_range();
910+
let range = node.syntax().text_range();
911+
let intersection = range.intersect(syntax_range)?;
912+
syntax.covering_element(intersection).ancestors().find_map(N::cast)
902913
}
903914

904-
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
905-
syntax.covering_element(range).ancestors().find_map(N::cast)
915+
/// Compensates for the offset introduced by the fake ident
916+
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
917+
fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
918+
let syntax_range = syntax.text_range();
919+
let range = node.syntax().text_range();
920+
let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
921+
if end < range.start() {
922+
return None;
923+
}
924+
let range = TextRange::new(range.start(), end);
925+
// our inserted ident could cause `range` to be go outside of the original syntax, so cap it
926+
let intersection = range.intersect(syntax_range)?;
927+
syntax.covering_element(intersection).ancestors().find_map(N::cast)
906928
}
907929

908930
fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {

0 commit comments

Comments
 (0)