Skip to content

Commit 026a8c9

Browse files
committed
Simplify
1 parent 89b9940 commit 026a8c9

File tree

5 files changed

+91
-107
lines changed

5 files changed

+91
-107
lines changed

crates/hir-def/src/adt.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22
33
use std::sync::Arc;
44

5-
use crate::tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
65
use base_db::CrateId;
6+
use cfg::CfgOptions;
77
use either::Either;
8+
89
use hir_expand::{
910
name::{AsName, Name},
1011
HirFileId, InFile,
@@ -24,12 +25,12 @@ use crate::{
2425
src::HasChildSource,
2526
src::HasSource,
2627
trace::Trace,
28+
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
2729
type_ref::TypeRef,
2830
visibility::RawVisibility,
2931
EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
3032
VariantId,
3133
};
32-
use cfg::CfgOptions;
3334

3435
/// Note that we use `StructData` for unions as well!
3536
#[derive(Debug, Clone, PartialEq, Eq)]

crates/ide-completion/src/context.rs

+17-20
Original file line numberDiff line numberDiff line change
@@ -571,28 +571,25 @@ impl<'a> CompletionContext<'a> {
571571

572572
// try to skip completions on path with invalid colons
573573
// this approach works in normal path and inside token tree
574-
match original_token.kind() {
575-
T![:] => {
576-
// return if no prev token before colon
577-
let prev_token = original_token.prev_token()?;
578-
579-
// only has a single colon
580-
if prev_token.kind() != T![:] {
581-
return None;
582-
}
574+
if original_token.kind() == T![:] {
575+
// return if no prev token before colon
576+
let prev_token = original_token.prev_token()?;
583577

584-
// has 3 colon or 2 coloncolon in a row
585-
// special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205
586-
// and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751
587-
if prev_token
588-
.prev_token()
589-
.map(|t| t.kind() == T![:] || t.kind() == T![::])
590-
.unwrap_or(false)
591-
{
592-
return None;
593-
}
578+
// only has a single colon
579+
if prev_token.kind() != T![:] {
580+
return None;
581+
}
582+
583+
// has 3 colon or 2 coloncolon in a row
584+
// special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205
585+
// and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751
586+
if prev_token
587+
.prev_token()
588+
.map(|t| t.kind() == T![:] || t.kind() == T![::])
589+
.unwrap_or(false)
590+
{
591+
return None;
594592
}
595-
_ => {}
596593
}
597594

598595
let AnalysisResult {

crates/ide-completion/src/context/analysis.rs

+58-65
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ pub(super) struct AnalysisResult {
2929
pub(super) analysis: CompletionAnalysis,
3030
pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
3131
pub(super) qualifier_ctx: QualifierCtx,
32+
/// the original token of the expanded file
3233
pub(super) token: SyntaxToken,
3334
pub(super) offset: TextSize,
3435
}
@@ -213,15 +214,6 @@ fn analyze(
213214
let _p = profile::span("CompletionContext::analyze");
214215
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
215216
expansion_result;
216-
let syntax_element = NodeOrToken::Token(fake_ident_token);
217-
if is_in_token_of_for_loop(syntax_element.clone()) {
218-
// for pat $0
219-
// there is nothing to complete here except `in` keyword
220-
// don't bother populating the context
221-
// FIXME: the completion calculations should end up good enough
222-
// such that this special case becomes unnecessary
223-
return None;
224-
}
225217

226218
// Overwrite the path kind for derives
227219
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
@@ -249,37 +241,35 @@ fn analyze(
249241
return None;
250242
}
251243

252-
let name_like = match find_node_at_offset(&speculative_file, offset) {
253-
Some(it) => it,
254-
None => {
255-
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
256-
CompletionAnalysis::String {
257-
original,
258-
expanded: ast::String::cast(self_token.clone()),
244+
let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
245+
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
246+
CompletionAnalysis::String {
247+
original,
248+
expanded: ast::String::cast(self_token.clone()),
249+
}
250+
} else {
251+
// Fix up trailing whitespace problem
252+
// #[attr(foo = $0
253+
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
254+
let p = token.parent()?;
255+
if p.kind() == SyntaxKind::TOKEN_TREE
256+
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
257+
{
258+
let colon_prefix = previous_non_trivia_token(self_token.clone())
259+
.map_or(false, |it| T![:] == it.kind());
260+
CompletionAnalysis::UnexpandedAttrTT {
261+
fake_attribute_under_caret: fake_ident_token
262+
.parent_ancestors()
263+
.find_map(ast::Attr::cast),
264+
colon_prefix,
259265
}
260266
} else {
261-
// Fix up trailing whitespace problem
262-
// #[attr(foo = $0
263-
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
264-
let p = token.parent()?;
265-
if p.kind() == SyntaxKind::TOKEN_TREE
266-
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
267-
{
268-
let colon_prefix = previous_non_trivia_token(self_token.clone())
269-
.map_or(false, |it| T![:] == it.kind());
270-
CompletionAnalysis::UnexpandedAttrTT {
271-
fake_attribute_under_caret: syntax_element
272-
.ancestors()
273-
.find_map(ast::Attr::cast),
274-
colon_prefix,
275-
}
276-
} else {
277-
return None;
278-
}
279-
};
280-
return Some((analysis, (None, None), QualifierCtx::default()));
281-
}
267+
return None;
268+
}
269+
};
270+
return Some((analysis, (None, None), QualifierCtx::default()));
282271
};
272+
283273
let expected = expected_type_and_name(sema, self_token, &name_like);
284274
let mut qual_ctx = QualifierCtx::default();
285275
let analysis = match name_like {
@@ -290,6 +280,22 @@ fn analyze(
290280
let parent = name_ref.syntax().parent()?;
291281
let (nameref_ctx, qualifier_ctx) =
292282
classify_name_ref(sema, &original_file, name_ref, parent)?;
283+
284+
if let NameRefContext {
285+
kind:
286+
NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
287+
..
288+
} = &nameref_ctx
289+
{
290+
if is_in_token_of_for_loop(path) {
291+
// for pat $0
292+
// there is nothing to complete here except `in` keyword
293+
// don't bother populating the context
294+
// Ideally this special casing wouldn't be needed, but the parser recovers
295+
return None;
296+
}
297+
}
298+
293299
qual_ctx = qualifier_ctx;
294300
CompletionAnalysis::NameRef(nameref_ctx)
295301
}
@@ -323,16 +329,14 @@ fn expected_type_and_name(
323329
ast::FieldExpr(e) => e
324330
.syntax()
325331
.ancestors()
326-
.map_while(ast::FieldExpr::cast)
327-
.last()
328-
.map(|it| it.syntax().clone()),
332+
.take_while(|it| ast::FieldExpr::can_cast(it.kind()))
333+
.last(),
329334
ast::PathSegment(e) => e
330335
.syntax()
331336
.ancestors()
332337
.skip(1)
333338
.take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
334-
.find_map(ast::PathExpr::cast)
335-
.map(|it| it.syntax().clone()),
339+
.find(|it| ast::PathExpr::can_cast(it.kind())),
336340
_ => None
337341
}
338342
};
@@ -1270,40 +1274,29 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
12701274
Some((use_tree.path()?, true))
12711275
}
12721276

1273-
pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
1277+
fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
12741278
// oh my ...
12751279
(|| {
1276-
let syntax_token = element.into_token()?;
1277-
let range = syntax_token.text_range();
1278-
let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
1279-
1280-
// check if the current token is the `in` token of a for loop
1281-
if let Some(token) = for_expr.in_token() {
1282-
return Some(syntax_token == token);
1280+
let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
1281+
let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
1282+
if for_expr.in_token().is_some() {
1283+
return Some(false);
12831284
}
12841285
let pat = for_expr.pat()?;
1285-
if range.end() < pat.syntax().text_range().end() {
1286-
// if we are inside or before the pattern we can't be at the `in` token position
1287-
return None;
1288-
}
12891286
let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
12901287
Some(match next_sibl {
1291-
// the loop body is some node, if our token is at the start we are at the `in` position,
1292-
// otherwise we could be in a recovered expression, we don't wanna ruin completions there
1293-
syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
1294-
// the loop body consists of a single token, if we are this we are certainly at the `in` token position
1295-
syntax::NodeOrToken::Token(t) => t == syntax_token,
1288+
syntax::NodeOrToken::Node(n) => {
1289+
n.text_range().start() == path.syntax().text_range().start()
1290+
}
1291+
syntax::NodeOrToken::Token(t) => {
1292+
t.text_range().start() == path.syntax().text_range().start()
1293+
}
12961294
})
12971295
})()
12981296
.unwrap_or(false)
12991297
}
13001298

1301-
#[test]
1302-
fn test_for_is_prev2() {
1303-
crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
1304-
}
1305-
1306-
pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
1299+
fn is_in_loop_body(node: &SyntaxNode) -> bool {
13071300
node.ancestors()
13081301
.take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
13091302
.find_map(|it| {

crates/ide-completion/src/lib.rs

+10-8
Original file line numberDiff line numberDiff line change
@@ -156,21 +156,23 @@ pub fn completions(
156156

157157
// prevent `(` from triggering unwanted completion noise
158158
if trigger_character == Some('(') {
159-
if let CompletionAnalysis::NameRef(NameRefContext { kind, .. }) = &analysis {
160-
if let NameRefKind::Path(
161-
path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. },
162-
) = kind
163-
{
164-
completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token);
165-
}
159+
if let CompletionAnalysis::NameRef(NameRefContext {
160+
kind:
161+
NameRefKind::Path(
162+
path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. },
163+
),
164+
..
165+
}) = analysis
166+
{
167+
completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token);
166168
}
167169
return Some(completions.into());
168170
}
169171

170172
{
171173
let acc = &mut completions;
172174

173-
match &analysis {
175+
match analysis {
174176
CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx),
175177
CompletionAnalysis::NameRef(name_ref_ctx) => {
176178
completions::complete_name_ref(acc, ctx, name_ref_ctx)

crates/ide-completion/src/tests.rs

+3-12
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,14 @@ mod type_pos;
2323
mod use_tree;
2424
mod visibility;
2525

26-
use hir::{db::DefDatabase, PrefixKind, Semantics};
26+
use hir::{db::DefDatabase, PrefixKind};
2727
use ide_db::{
2828
base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
2929
imports::insert_use::{ImportGranularity, InsertUseConfig},
3030
RootDatabase, SnippetCap,
3131
};
3232
use itertools::Itertools;
3333
use stdx::{format_to, trim_indent};
34-
use syntax::{AstNode, NodeOrToken, SyntaxElement};
3534
use test_utils::assert_eq_text;
3635

3736
use crate::{
@@ -216,15 +215,6 @@ pub(crate) fn check_edit_with_config(
216215
assert_eq_text!(&ra_fixture_after, &actual)
217216
}
218217

219-
pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxElement) -> bool) {
220-
let (db, pos) = position(code);
221-
222-
let sema = Semantics::new(&db);
223-
let original_file = sema.parse(pos.file_id);
224-
let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap();
225-
assert!(check(NodeOrToken::Token(token)));
226-
}
227-
228218
pub(crate) fn get_all_items(
229219
config: CompletionConfig,
230220
code: &str,
@@ -246,8 +236,9 @@ pub(crate) fn get_all_items(
246236
}
247237

248238
#[test]
249-
fn test_no_completions_required() {
239+
fn test_no_completions_in_for_loop_in_kw_pos() {
250240
assert_eq!(completion_list(r#"fn foo() { for i i$0 }"#), String::new());
241+
assert_eq!(completion_list(r#"fn foo() { for i in$0 }"#), String::new());
251242
}
252243

253244
#[test]

0 commit comments

Comments
 (0)