Skip to content

Commit 838cc9d

Browse files
committed
Auto merge of rust-lang#12025 - Veykril:completion-ctx, r=Veykril
minor: Document completion context some more
2 parents e0d41bc + f8c32df commit 838cc9d

File tree

1 file changed

+49
-26
lines changed

1 file changed

+49
-26
lines changed

crates/ide_completion/src/context.rs

+49-26
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,11 @@ pub(crate) struct CompletionContext<'a> {
148148
pub(super) krate: hir::Crate,
149149
/// The module of the `scope`.
150150
pub(super) module: hir::Module,
151+
152+
/// The expected name of what we are completing.
153+
/// This is usually the parameter name of the function argument we are completing.
151154
pub(super) expected_name: Option<NameOrNameRef>,
155+
/// The expected type of what we are completing.
152156
pub(super) expected_type: Option<Type>,
153157

154158
/// The parent function of the cursor position if it exists.
@@ -157,6 +161,7 @@ pub(crate) struct CompletionContext<'a> {
157161
pub(super) impl_def: Option<ast::Impl>,
158162
/// The NameLike under the cursor in the original file if it exists.
159163
pub(super) name_syntax: Option<ast::NameLike>,
164+
/// Are we completing inside a let statement with a missing semicolon?
160165
pub(super) incomplete_let: bool,
161166

162167
pub(super) completion_location: Option<ImmediateLocation>,
@@ -424,6 +429,7 @@ impl<'a> CompletionContext<'a> {
424429
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
425430
let krate = scope.krate();
426431
let module = scope.module();
432+
427433
let mut locals = FxHashMap::default();
428434
scope.process_all_names(&mut |name, scope| {
429435
if let ScopeDef::Local(local) = scope {
@@ -467,8 +473,9 @@ impl<'a> CompletionContext<'a> {
467473
Some(ctx)
468474
}
469475

470-
/// Do the attribute expansion at the current cursor position for both original file and fake file
471-
/// as long as possible. As soon as one of the two expansions fail we stop to stay in sync.
476+
/// Expand attributes and macro calls at the current cursor position for both the original file
477+
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
478+
/// and speculative states stay in sync.
472479
fn expand_and_fill(
473480
&mut self,
474481
mut original_file: SyntaxNode,
@@ -489,7 +496,9 @@ impl<'a> CompletionContext<'a> {
489496
),
490497
|(a, b)| parent_item(a).zip(parent_item(b)),
491498
);
492-
for (actual_item, item_with_fake_ident) in ancestor_items {
499+
500+
// first try to expand attributes as these are always the outermost macro calls
501+
'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
493502
match (
494503
self.sema.expand_attr_macro(&actual_item),
495504
self.sema.speculative_expand_attr_macro(
@@ -498,12 +507,14 @@ impl<'a> CompletionContext<'a> {
498507
fake_ident_token.clone(),
499508
),
500509
) {
501-
// maybe parent items have attributes
502-
(None, None) => (),
510+
// maybe parent items have attributes, so continue walking the ancestors
511+
(None, None) => continue 'ancestors,
503512
// successful expansions
504513
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
505514
let new_offset = fake_mapped_token.text_range().start();
506515
if new_offset > actual_expansion.text_range().end() {
516+
// offset outside of bounds from the original expansion,
517+
// stop here to prevent problems from happening
507518
break 'expansion;
508519
}
509520
original_file = actual_expansion;
@@ -516,40 +527,39 @@ impl<'a> CompletionContext<'a> {
516527
_ => break 'expansion,
517528
}
518529
}
530+
531+
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
519532
let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
520533
Some(it) => it,
521-
None => break,
534+
None => break 'expansion,
522535
};
523536
let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
524537
Some(it) => it,
525-
None => break,
538+
None => break 'expansion,
526539
};
527540

528541
// Expand pseudo-derive expansion
529542
if let (Some(orig_attr), Some(spec_attr)) = (
530543
orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
531544
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
532545
) {
533-
match (
546+
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
534547
self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
535548
self.sema.speculative_expand_derive_as_pseudo_attr_macro(
536549
&orig_attr,
537550
&spec_attr,
538551
fake_ident_token.clone(),
539552
),
540553
) {
541-
// Clearly not a derive macro
542-
(None, None) => (),
543-
// successful expansions
544-
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
545-
let new_offset = fake_mapped_token.text_range().start();
546-
derive_ctx =
547-
Some((actual_expansion, fake_expansion, new_offset, orig_attr));
548-
break 'expansion;
549-
}
550-
// exactly one expansion failed, inconsistent state so stop expanding completely
551-
_ => break 'expansion,
554+
derive_ctx = Some((
555+
actual_expansion,
556+
fake_expansion,
557+
fake_mapped_token.text_range().start(),
558+
orig_attr,
559+
));
552560
}
561+
// at this point we won't have any more successful expansions, so stop
562+
break 'expansion;
553563
}
554564

555565
// Expand fn-like macro calls
@@ -560,12 +570,14 @@ impl<'a> CompletionContext<'a> {
560570
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
561571
let mac_call_path1 =
562572
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
573+
574+
// inconsistent state, stop expanding
563575
if mac_call_path0 != mac_call_path1 {
564-
break;
576+
break 'expansion;
565577
}
566578
let speculative_args = match macro_call_with_fake_ident.token_tree() {
567579
Some(tt) => tt,
568-
None => break,
580+
None => break 'expansion,
569581
};
570582

571583
match (
@@ -580,24 +592,30 @@ impl<'a> CompletionContext<'a> {
580592
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
581593
let new_offset = fake_mapped_token.text_range().start();
582594
if new_offset > actual_expansion.text_range().end() {
583-
break;
595+
// offset outside of bounds from the original expansion,
596+
// stop here to prevent problems from happening
597+
break 'expansion;
584598
}
585599
original_file = actual_expansion;
586600
speculative_file = fake_expansion;
587601
fake_ident_token = fake_mapped_token;
588602
offset = new_offset;
589-
continue;
603+
continue 'expansion;
590604
}
591-
_ => break,
605+
// at least on expansion failed, we won't have anything to expand from this point
606+
// onwards so break out
607+
_ => break 'expansion,
592608
}
593609
}
594610

595-
break;
611+
// none of our states have changed so stop the loop
612+
break 'expansion;
596613
}
597614

598615
self.fill(&original_file, speculative_file, offset, derive_ctx);
599616
}
600617

618+
/// Calculate the expected type and name of the cursor position.
601619
fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) {
602620
let mut node = match self.token.parent() {
603621
Some(it) => it,
@@ -734,6 +752,8 @@ impl<'a> CompletionContext<'a> {
734752
}
735753
}
736754

755+
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
756+
/// of the completion location.
737757
fn fill(
738758
&mut self,
739759
original_file: &SyntaxNode,
@@ -1067,14 +1087,16 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
10671087
}
10681088
}
10691089

1090+
/// Attempts to find `node` inside `syntax` via `node`'s text range.
10701091
fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
10711092
let syntax_range = syntax.text_range();
10721093
let range = node.syntax().text_range();
10731094
let intersection = range.intersect(syntax_range)?;
10741095
syntax.covering_element(intersection).ancestors().find_map(N::cast)
10751096
}
10761097

1077-
/// Compensates for the offset introduced by the fake ident
1098+
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1099+
/// for the offset introduced by the fake ident.
10781100
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
10791101
fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
10801102
let syntax_range = syntax.text_range();
@@ -1143,6 +1165,7 @@ const OP_TRAIT_LANG_NAMES: &[&str] = &[
11431165
"shr",
11441166
"sub",
11451167
];
1168+
11461169
#[cfg(test)]
11471170
mod tests {
11481171
use expect_test::{expect, Expect};

0 commit comments

Comments
 (0)