Skip to content

Commit fae34e0

Browse files
committed
Auto merge of rust-lang#18234 - Veykril:veykril/push-vzynqtlxmrnl, r=Veykril
internal: Filter out opaque tokens in some IDE feature macro descensions
2 parents b91eeae + 0301368 commit fae34e0

File tree

11 files changed

+114
-70
lines changed

11 files changed

+114
-70
lines changed

src/tools/rust-analyzer/crates/hir-expand/src/db.rs

+11-6
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,10 @@ use crate::{
1616
cfg_process,
1717
declarative::DeclarativeMacroExpander,
1818
fixup::{self, SyntaxFixupUndoInfo},
19-
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
19+
hygiene::{
20+
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
21+
SyntaxContextExt as _,
22+
},
2023
proc_macro::ProcMacros,
2124
span_map::{RealSpanMap, SpanMap, SpanMapRef},
2225
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
@@ -300,14 +303,16 @@ pub fn expand_speculative(
300303
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
301304

302305
let syntax_node = node.syntax_node();
303-
let token = rev_tmap
306+
let (token, _) = rev_tmap
304307
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
305-
.filter_map(|range| syntax_node.covering_element(range).into_token())
306-
.min_by_key(|t| {
307-
// prefer tokens of the same kind and text
308+
.filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
309+
.min_by_key(|(t, ctx)| {
310+
// prefer tokens of the same kind and text, as well as non opaque marked ones
308311
// Note the inversion of the score here, as we want to prefer the first token in case
309312
// of all tokens having the same score
310-
(t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8)
313+
ctx.is_opaque(db) as u8
314+
+ 2 * (t.kind() != token_to_map.kind()) as u8
315+
+ 4 * ((t.text() != token_to_map.text()) as u8)
311316
})?;
312317
Some((node.syntax_node(), token))
313318
}

src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs

+4
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,7 @@ pub trait SyntaxContextExt {
151151
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
152152
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
153153
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
154+
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
154155
}
155156

156157
impl SyntaxContextExt for SyntaxContextId {
@@ -177,6 +178,9 @@ impl SyntaxContextExt for SyntaxContextId {
177178
marks.reverse();
178179
marks
179180
}
181+
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
182+
!self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
183+
}
180184
}
181185

182186
// FIXME: Make this a SyntaxContextExt method once we have RPIT

src/tools/rust-analyzer/crates/hir-expand/src/lib.rs

+13-11
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ mod prettify_macro_expansion_;
2525

2626
use attrs::collect_attrs;
2727
use rustc_hash::FxHashMap;
28+
use stdx::TupleExt;
2829
use triomphe::Arc;
2930

3031
use std::hash::Hash;
@@ -772,14 +773,15 @@ impl ExpansionInfo {
772773
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
773774
///
774775
/// Note this does a linear search through the entire backing vector of the spanmap.
776+
// FIXME: Consider adding a reverse map to ExpansionInfo to get rid of the linear search which
777+
// potentially results in quadratic look ups (notably this might improve semantic highlighting perf)
775778
pub fn map_range_down_exact(
776779
&self,
777780
span: Span,
778-
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
779-
let tokens = self
780-
.exp_map
781-
.ranges_with_span_exact(span)
782-
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
781+
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
782+
let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| {
783+
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
784+
});
783785

784786
Some(InMacroFile::new(self.expanded.file_id, tokens))
785787
}
@@ -791,11 +793,10 @@ impl ExpansionInfo {
791793
pub fn map_range_down(
792794
&self,
793795
span: Span,
794-
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
795-
let tokens = self
796-
.exp_map
797-
.ranges_with_span(span)
798-
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
796+
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
797+
let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| {
798+
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
799+
});
799800

800801
Some(InMacroFile::new(self.expanded.file_id, tokens))
801802
}
@@ -845,7 +846,8 @@ impl ExpansionInfo {
845846
self.arg.file_id,
846847
arg_map
847848
.ranges_with_span_exact(span)
848-
.filter(|range| range.intersect(arg_range).is_some())
849+
.filter(|(range, _)| range.intersect(arg_range).is_some())
850+
.map(TupleExt::head)
849851
.collect(),
850852
)
851853
}

src/tools/rust-analyzer/crates/hir/src/semantics.rs

+58-42
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ use hir_expand::{
2424
builtin::{BuiltinFnLikeExpander, EagerExpander},
2525
db::ExpandDatabase,
2626
files::InRealFile,
27+
hygiene::SyntaxContextExt as _,
2728
inert_attr_macro::find_builtin_attr_idx,
2829
name::AsName,
2930
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
@@ -32,7 +33,7 @@ use intern::Symbol;
3233
use itertools::Itertools;
3334
use rustc_hash::{FxHashMap, FxHashSet};
3435
use smallvec::{smallvec, SmallVec};
35-
use span::{EditionedFileId, FileId, HirFileIdRepr};
36+
use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
3637
use stdx::TupleExt;
3738
use syntax::{
3839
algo::skip_trivia_token,
@@ -608,7 +609,7 @@ impl<'db> SemanticsImpl<'db> {
608609
let quote = string.open_quote_text_range()?;
609610

610611
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
611-
self.descend_into_macros_breakable(token, |token| {
612+
self.descend_into_macros_breakable(token, |token, _| {
612613
(|| {
613614
let token = token.value;
614615
let string = ast::String::cast(token)?;
@@ -655,7 +656,7 @@ impl<'db> SemanticsImpl<'db> {
655656
let original_string = ast::String::cast(original_token.clone())?;
656657
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
657658
let quote = original_string.open_quote_text_range()?;
658-
self.descend_into_macros_breakable(original_token, |token| {
659+
self.descend_into_macros_breakable(original_token, |token, _| {
659660
(|| {
660661
let token = token.value;
661662
self.resolve_offset_in_format_args(
@@ -718,7 +719,7 @@ impl<'db> SemanticsImpl<'db> {
718719
// node is just the token, so descend the token
719720
self.descend_into_macros_impl(
720721
InRealFile::new(file_id, first),
721-
&mut |InFile { value, .. }| {
722+
&mut |InFile { value, .. }, _ctx| {
722723
if let Some(node) = value
723724
.parent_ancestors()
724725
.take_while(|it| it.text_range() == value.text_range())
@@ -732,15 +733,15 @@ impl<'db> SemanticsImpl<'db> {
732733
} else {
733734
// Descend first and last token, then zip them to look for the node they belong to
734735
let mut scratch: SmallVec<[_; 1]> = smallvec![];
735-
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
736+
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| {
736737
scratch.push(token);
737738
CONTINUE_NO_BREAKS
738739
});
739740

740741
let mut scratch = scratch.into_iter();
741742
self.descend_into_macros_impl(
742743
InRealFile::new(file_id, last),
743-
&mut |InFile { value: last, file_id: last_fid }| {
744+
&mut |InFile { value: last, file_id: last_fid }, _ctx| {
744745
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
745746
if first_fid == last_fid {
746747
if let Some(p) = first.parent() {
@@ -763,7 +764,9 @@ impl<'db> SemanticsImpl<'db> {
763764
res
764765
}
765766

766-
fn is_inside_macro_call(token: &SyntaxToken) -> bool {
767+
// FIXME: This isn't quite right wrt to inner attributes
768+
/// Does a syntactic traversal to check whether this token might be inside a macro call
769+
pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
767770
token.parent_ancestors().any(|ancestor| {
768771
if ast::MacroCall::can_cast(ancestor.kind()) {
769772
return true;
@@ -781,25 +784,14 @@ impl<'db> SemanticsImpl<'db> {
781784
})
782785
}
783786

784-
pub fn descend_into_macros_exact_if_in_macro(
785-
&self,
786-
token: SyntaxToken,
787-
) -> SmallVec<[SyntaxToken; 1]> {
788-
if Self::is_inside_macro_call(&token) {
789-
self.descend_into_macros_exact(token)
790-
} else {
791-
smallvec![token]
792-
}
793-
}
794-
795787
pub fn descend_into_macros_cb(
796788
&self,
797789
token: SyntaxToken,
798-
mut cb: impl FnMut(InFile<SyntaxToken>),
790+
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId),
799791
) {
800792
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
801-
self.descend_into_macros_impl(token, &mut |t| {
802-
cb(t);
793+
self.descend_into_macros_impl(token, &mut |t, ctx| {
794+
cb(t, ctx);
803795
CONTINUE_NO_BREAKS
804796
});
805797
}
@@ -808,7 +800,7 @@ impl<'db> SemanticsImpl<'db> {
808800
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
809801
let mut res = smallvec![];
810802
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
811-
self.descend_into_macros_impl(token, &mut |t| {
803+
self.descend_into_macros_impl(token, &mut |t, _ctx| {
812804
res.push(t.value);
813805
CONTINUE_NO_BREAKS
814806
});
@@ -819,10 +811,27 @@ impl<'db> SemanticsImpl<'db> {
819811
res
820812
}
821813

814+
pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
815+
let mut res = smallvec![];
816+
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
817+
self.descend_into_macros_impl(token, &mut |t, ctx| {
818+
if !ctx.is_opaque(self.db.upcast()) {
819+
// Don't descend into opaque contexts
820+
res.push(t.value);
821+
}
822+
CONTINUE_NO_BREAKS
823+
});
824+
}
825+
if res.is_empty() {
826+
res.push(token);
827+
}
828+
res
829+
}
830+
822831
pub fn descend_into_macros_breakable<T>(
823832
&self,
824833
token: InRealFile<SyntaxToken>,
825-
mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
834+
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
826835
) -> Option<T> {
827836
self.descend_into_macros_impl(token.clone(), &mut cb)
828837
}
@@ -834,10 +843,12 @@ impl<'db> SemanticsImpl<'db> {
834843
let text = token.text();
835844
let kind = token.kind();
836845

837-
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| {
846+
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
838847
let mapped_kind = value.kind();
839848
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
840-
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
849+
let matches = (kind == mapped_kind || any_ident_match())
850+
&& text == value.text()
851+
&& !ctx.is_opaque(self.db.upcast());
841852
if matches {
842853
r.push(value);
843854
}
@@ -854,17 +865,21 @@ impl<'db> SemanticsImpl<'db> {
854865
let text = token.text();
855866
let kind = token.kind();
856867
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
857-
self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
858-
let mapped_kind = value.kind();
859-
let any_ident_match =
860-
|| kind.is_any_identifier() && value.kind().is_any_identifier();
861-
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
862-
if matches {
863-
ControlFlow::Break(value)
864-
} else {
865-
ControlFlow::Continue(())
866-
}
867-
})
868+
self.descend_into_macros_breakable(
869+
token.clone(),
870+
|InFile { value, file_id: _ }, _ctx| {
871+
let mapped_kind = value.kind();
872+
let any_ident_match =
873+
|| kind.is_any_identifier() && value.kind().is_any_identifier();
874+
let matches =
875+
(kind == mapped_kind || any_ident_match()) && text == value.text();
876+
if matches {
877+
ControlFlow::Break(value)
878+
} else {
879+
ControlFlow::Continue(())
880+
}
881+
},
882+
)
868883
} else {
869884
None
870885
}
@@ -874,7 +889,7 @@ impl<'db> SemanticsImpl<'db> {
874889
fn descend_into_macros_impl<T>(
875890
&self,
876891
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
877-
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
892+
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
878893
) -> Option<T> {
879894
let _p = tracing::info_span!("descend_into_macros_impl").entered();
880895
let (sa, span, file_id) = token
@@ -898,7 +913,8 @@ impl<'db> SemanticsImpl<'db> {
898913
// These are tracked to know which macro calls we still have to look into
899914
// the tokens themselves aren't that interesting as the span that is being used to map
900915
// things down never changes.
901-
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
916+
let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
917+
vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])];
902918

903919
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
904920
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
@@ -921,11 +937,11 @@ impl<'db> SemanticsImpl<'db> {
921937
// Filters out all tokens that contain the given range (usually the macro call), any such
922938
// token is redundant as the corresponding macro call has already been processed
923939
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
924-
tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range()))
940+
tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
925941
};
926942

927943
while let Some((expansion, ref mut tokens)) = stack.pop() {
928-
while let Some(token) = tokens.pop() {
944+
while let Some((token, ctx)) = tokens.pop() {
929945
let was_not_remapped = (|| {
930946
// First expand into attribute invocations
931947
let containing_attribute_macro_call = self.with_ctx(|ctx| {
@@ -1036,7 +1052,7 @@ impl<'db> SemanticsImpl<'db> {
10361052
let text_range = attr.syntax().text_range();
10371053
// remove any other token in this macro input, all their mappings are the
10381054
// same as this
1039-
tokens.retain(|t| {
1055+
tokens.retain(|(t, _)| {
10401056
!text_range.contains_range(t.text_range())
10411057
});
10421058
return process_expansion_for_token(
@@ -1093,7 +1109,7 @@ impl<'db> SemanticsImpl<'db> {
10931109
.is_none();
10941110

10951111
if was_not_remapped {
1096-
if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) {
1112+
if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) {
10971113
return Some(b);
10981114
}
10991115
}

src/tools/rust-analyzer/crates/ide-db/src/search.rs

+7-3
Original file line numberDiff line numberDiff line change
@@ -529,9 +529,13 @@ impl<'a> FindUsages<'a> {
529529
})
530530
.into_iter()
531531
.flat_map(move |token| {
532-
sema.descend_into_macros_exact_if_in_macro(token)
533-
.into_iter()
534-
.filter_map(|it| it.parent())
532+
if sema.might_be_inside_macro_call(&token) {
533+
sema.descend_into_macros_exact(token)
534+
} else {
535+
<_>::from([token])
536+
}
537+
.into_iter()
538+
.filter_map(|it| it.parent())
535539
})
536540
}
537541

src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
2929
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
3030
let range = original_token.text_range();
3131
let info: Vec<NavigationTarget> = sema
32-
.descend_into_macros(original_token)
32+
.descend_into_macros_no_opaque(original_token)
3333
.iter()
3434
.filter_map(|token| {
3535
let parent = token.parent()?;

src/tools/rust-analyzer/crates/ide/src/goto_definition.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ pub(crate) fn goto_definition(
8383
}
8484

8585
let navs = sema
86-
.descend_into_macros(original_token.clone())
86+
.descend_into_macros_no_opaque(original_token.clone())
8787
.into_iter()
8888
.filter_map(|token| {
8989
let parent = token.parent()?;

src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition(
6969
}
7070

7171
let range = token.text_range();
72-
sema.descend_into_macros(token)
72+
sema.descend_into_macros_no_opaque(token)
7373
.into_iter()
7474
.filter_map(|token| {
7575
let ty = sema

src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -409,7 +409,8 @@ fn traverse(
409409
let mut r = 0;
410410
sema.descend_into_macros_breakable(
411411
InRealFile::new(file_id, token.clone()),
412-
|tok| {
412+
|tok, _ctx| {
413+
// FIXME: Consider checking ctx transparency for being opaque?
413414
let tok = tok.value;
414415
let tok_kind = tok.kind();
415416

0 commit comments

Comments
 (0)