Skip to content

Commit 7c7a41c

Browse files
bors[bot]Veykril
andauthored
Merge #10067
10067: Downmap tokens to all token descendants instead of just the first r=Veykril a=Veykril With this we can now resolve usages of identifiers inside (proc-)macros even if they are used for different purposes multiple times inside the expansion. Example here being with the cursor being on the `no_send_sync_value` function causing us to still highlight the identifier in the attribute invocation correctly as we now resolve its usages in there. Prior we only saw the first usage of the identifier which is for a definition only, as such we bailed and didn't highlight it. ![image](https://user-images.githubusercontent.com/3757771/131233056-7e645b1d-b82f-468c-bf19-d3335a2cf7c2.png) Note that this has to be explicitly switched over for most IDE features now as pretty much everything expects a single node/token as a result from descending. Co-authored-by: Lukas Wirth <[email protected]>
2 parents 7e31c5e + 6993a60 commit 7c7a41c

File tree

9 files changed

+199
-106
lines changed

9 files changed

+199
-106
lines changed

crates/hir/src/semantics.rs

Lines changed: 85 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
33
mod source_to_def;
44

5-
use std::{cell::RefCell, fmt, iter::successors};
5+
use std::{cell::RefCell, fmt};
66

77
use base_db::{FileId, FileRange};
88
use hir_def::{
@@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo};
1414
use hir_ty::{associated_type_shorthand_candidates, Interner};
1515
use itertools::Itertools;
1616
use rustc_hash::{FxHashMap, FxHashSet};
17+
use smallvec::{smallvec, SmallVec};
1718
use syntax::{
1819
algo::find_node_at_offset,
1920
ast::{self, GenericParamsOwner, LoopBodyOwner},
@@ -165,7 +166,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
165166
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
166167
}
167168

169+
// FIXME: Rename to descend_into_macros_single
168170
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
171+
self.imp.descend_into_macros(token).pop().unwrap()
172+
}
173+
174+
// FIXME: Rename to descend_into_macros
175+
pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
169176
self.imp.descend_into_macros(token)
170177
}
171178

@@ -174,7 +181,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
174181
node: &SyntaxNode,
175182
offset: TextSize,
176183
) -> Option<N> {
177-
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
184+
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
178185
}
179186

180187
pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
@@ -228,7 +235,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
228235
return Some(it);
229236
}
230237

231-
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
238+
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
239+
}
240+
241+
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
242+
/// descend it and find again
243+
pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
244+
&'slf self,
245+
node: &SyntaxNode,
246+
offset: TextSize,
247+
) -> impl Iterator<Item = N> + 'slf {
248+
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
232249
}
233250

234251
pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
@@ -440,87 +457,93 @@ impl<'db> SemanticsImpl<'db> {
440457
)
441458
}
442459

443-
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
460+
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
444461
let _p = profile::span("descend_into_macros");
445462
let parent = match token.parent() {
446463
Some(it) => it,
447-
None => return token,
464+
None => return smallvec![token],
448465
};
449466
let sa = self.analyze(&parent);
450-
451-
let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
467+
let mut queue = vec![InFile::new(sa.file_id, token)];
468+
let mut cache = self.expansion_info_cache.borrow_mut();
469+
let mut res = smallvec![];
470+
while let Some(token) = queue.pop() {
452471
self.db.unwind_if_cancelled();
453472

454-
for node in token.value.ancestors() {
455-
match_ast! {
456-
match node {
457-
ast::MacroCall(macro_call) => {
458-
let tt = macro_call.token_tree()?;
459-
let l_delim = match tt.left_delimiter_token() {
460-
Some(it) => it.text_range().end(),
461-
None => tt.syntax().text_range().start()
462-
};
463-
let r_delim = match tt.right_delimiter_token() {
464-
Some(it) => it.text_range().start(),
465-
None => tt.syntax().text_range().end()
466-
};
467-
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
468-
return None;
469-
}
470-
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
471-
let token = self
472-
.expansion_info_cache
473-
.borrow_mut()
474-
.entry(file_id)
475-
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
476-
.as_ref()?
477-
.map_token_down(self.db.upcast(), None, token.as_ref())?;
478-
479-
if let Some(parent) = token.value.parent() {
480-
self.cache(find_root(&parent), token.file_id);
481-
}
482-
483-
return Some(token);
484-
},
485-
ast::Item(item) => {
486-
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
487-
let file_id = call_id.as_file();
488-
let token = self
489-
.expansion_info_cache
490-
.borrow_mut()
473+
let was_not_remapped = (|| {
474+
for node in token.value.ancestors() {
475+
match_ast! {
476+
match node {
477+
ast::MacroCall(macro_call) => {
478+
let tt = macro_call.token_tree()?;
479+
let l_delim = match tt.left_delimiter_token() {
480+
Some(it) => it.text_range().end(),
481+
None => tt.syntax().text_range().start()
482+
};
483+
let r_delim = match tt.right_delimiter_token() {
484+
Some(it) => it.text_range().start(),
485+
None => tt.syntax().text_range().end()
486+
};
487+
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
488+
return None;
489+
}
490+
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
491+
let tokens = cache
491492
.entry(file_id)
492493
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
493494
.as_ref()?
494-
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
495-
496-
if let Some(parent) = token.value.parent() {
497-
self.cache(find_root(&parent), token.file_id);
495+
.map_token_down(self.db.upcast(), None, token.as_ref())?;
496+
497+
let len = queue.len();
498+
queue.extend(tokens.inspect(|token| {
499+
if let Some(parent) = token.value.parent() {
500+
self.cache(find_root(&parent), token.file_id);
501+
}
502+
}));
503+
return (queue.len() != len).then(|| ());
504+
},
505+
ast::Item(item) => {
506+
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
507+
let file_id = call_id.as_file();
508+
let tokens = cache
509+
.entry(file_id)
510+
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
511+
.as_ref()?
512+
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
513+
514+
let len = queue.len();
515+
queue.extend(tokens.inspect(|token| {
516+
if let Some(parent) = token.value.parent() {
517+
self.cache(find_root(&parent), token.file_id);
518+
}
519+
}));
520+
return (queue.len() != len).then(|| ());
498521
}
499-
500-
return Some(token);
501-
}
502-
},
503-
_ => {}
522+
},
523+
_ => {}
524+
}
504525
}
505526
}
527+
None
528+
})().is_none();
529+
if was_not_remapped {
530+
res.push(token.value)
506531
}
507-
508-
None
509-
})
510-
.last()
511-
.unwrap();
512-
token.value
532+
}
533+
res
513534
}
514535

536+
// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
537+
// traversing the inner iterator when it finds a node.
515538
fn descend_node_at_offset(
516539
&self,
517540
node: &SyntaxNode,
518541
offset: TextSize,
519-
) -> impl Iterator<Item = SyntaxNode> + '_ {
542+
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
520543
// Handle macro token cases
521544
node.token_at_offset(offset)
522-
.map(|token| self.descend_into_macros(token))
523-
.map(|it| self.token_ancestors_with_macros(it))
545+
.map(move |token| self.descend_into_macros(token))
546+
.map(|it| it.into_iter().map(move |it| self.token_ancestors_with_macros(it)))
524547
.flatten()
525548
}
526549

crates/hir_expand/src/db.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ pub fn expand_speculative(
163163
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
164164

165165
let token_id = macro_def.map_id_down(token_id);
166-
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
166+
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?;
167167
let token = node.syntax_node().covering_element(range).into_token()?;
168168
Some((node.syntax_node(), token))
169169
}

crates/hir_expand/src/hygiene.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ impl HygieneInfo {
171171
},
172172
};
173173

174-
let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
174+
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
175175
Some((tt.with_value(range + tt.value), origin))
176176
}
177177
}

crates/hir_expand/src/lib.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ impl ExpansionInfo {
368368
db: &dyn db::AstDatabase,
369369
item: Option<ast::Item>,
370370
token: InFile<&SyntaxToken>,
371-
) -> Option<InFile<SyntaxToken>> {
371+
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
372372
assert_eq!(token.file_id, self.arg.file_id);
373373
let token_id = if let Some(item) = item {
374374
let call_id = match self.expanded.file_id.0 {
@@ -411,11 +411,12 @@ impl ExpansionInfo {
411411
}
412412
};
413413

414-
let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
414+
let tokens = self
415+
.exp_map
416+
.ranges_by_token(token_id, token.value.kind())
417+
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
415418

416-
let token = self.expanded.value.covering_element(range).into_token()?;
417-
418-
Some(self.expanded.with_value(token))
419+
Some(tokens.map(move |token| self.expanded.with_value(token)))
419420
}
420421

421422
pub fn map_token_up(
@@ -453,7 +454,7 @@ impl ExpansionInfo {
453454
},
454455
};
455456

456-
let range = token_map.range_by_token(token_id, token.value.kind())?;
457+
let range = token_map.first_range_by_token(token_id, token.value.kind())?;
457458
let token =
458459
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
459460
Some((tt.with_value(token), origin))

0 commit comments

Comments
 (0)