@@ -169,10 +169,10 @@ impl StaticIndex<'_> {
169
169
. unwrap ( ) ;
170
170
// hovers
171
171
let sema = hir:: Semantics :: new ( self . db ) ;
172
- let tokens_or_nodes = sema. parse_guess_edition ( file_id) . syntax ( ) . clone ( ) ;
172
+ let root = sema. parse_guess_edition ( file_id) . syntax ( ) . clone ( ) ;
173
173
let edition =
174
174
sema. attach_first_edition ( file_id) . map ( |it| it. edition ( ) ) . unwrap_or ( Edition :: CURRENT ) ;
175
- let tokens = tokens_or_nodes . descendants_with_tokens ( ) . filter_map ( |it| match it {
175
+ let tokens = root . descendants_with_tokens ( ) . filter_map ( |it| match it {
176
176
syntax:: NodeOrToken :: Node ( _) => None ,
177
177
syntax:: NodeOrToken :: Token ( it) => Some ( it) ,
178
178
} ) ;
@@ -194,24 +194,19 @@ impl StaticIndex<'_> {
194
194
)
195
195
} ) ;
196
196
let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens : vec ! [ ] } ;
197
- for token in tokens {
198
- let range = token. text_range ( ) ;
199
- let node = token. parent ( ) . unwrap ( ) ;
200
- let def = match get_definition ( & sema, token. clone ( ) ) {
201
- Some ( it) => it,
202
- None => continue ,
203
- } ;
197
+
198
+ let mut add_token = |def : Definition , range : TextRange , scope_node : & SyntaxNode | {
204
199
let id = if let Some ( it) = self . def_map . get ( & def) {
205
200
* it
206
201
} else {
207
202
let it = self . tokens . insert ( TokenStaticData {
208
- documentation : documentation_for_definition ( & sema, def, & node ) ,
203
+ documentation : documentation_for_definition ( & sema, def, scope_node ) ,
209
204
hover : Some ( hover_for_definition (
210
205
& sema,
211
206
file_id,
212
207
def,
213
208
None ,
214
- & node ,
209
+ scope_node ,
215
210
None ,
216
211
false ,
217
212
& hover_config,
@@ -240,6 +235,22 @@ impl StaticIndex<'_> {
240
235
} ,
241
236
} ) ;
242
237
result. tokens . push ( ( range, id) ) ;
238
+ } ;
239
+
240
+ if let Some ( module) = sema. file_to_module_def ( file_id) {
241
+ let def = Definition :: Module ( module) ;
242
+ let range = root. text_range ( ) ;
243
+ add_token ( def, range, & root) ;
244
+ }
245
+
246
+ for token in tokens {
247
+ let range = token. text_range ( ) ;
248
+ let node = token. parent ( ) . unwrap ( ) ;
249
+ let def = match get_definition ( & sema, token. clone ( ) ) {
250
+ Some ( it) => it,
251
+ None => continue ,
252
+ } ;
253
+ add_token ( def, range, & node) ;
243
254
}
244
255
self . files . push ( result) ;
245
256
}
@@ -300,6 +311,10 @@ mod tests {
300
311
let mut range_set: FxHashSet < _ > = ranges. iter ( ) . map ( |it| it. 0 ) . collect ( ) ;
301
312
for f in s. files {
302
313
for ( range, _) in f. tokens {
314
+ if range. start ( ) == TextSize :: from ( 0 ) {
315
+ // ignore whole file range corresponding to module definition
316
+ continue ;
317
+ }
303
318
let it = FileRange { file_id : f. file_id , range } ;
304
319
if !range_set. contains ( & it) {
305
320
panic ! ( "additional range {it:?}" ) ;
0 commit comments