Skip to content

Commit 1a1ad5e

Browse files
fix: iter_cloned_collect FP with custom From/IntoIterator impl (#14473)
Closes #9119 changelog: [`iter_cloned_collect`]: fix FP with custom `From`/`IntoIterator` impl
2 parents 634c1c8 + ee36124 commit 1a1ad5e

File tree

4 files changed

+69
-3
lines changed

4 files changed

+69
-3
lines changed

Diff for: clippy_lints/src/methods/iter_cloned_collect.rs

+8-2
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,22 @@
11
use crate::methods::utils::derefs_to_slice;
22
use clippy_utils::diagnostics::span_lint_and_sugg;
3-
use clippy_utils::ty::is_type_diagnostic_item;
3+
use clippy_utils::ty::{get_iterator_item_ty, is_type_diagnostic_item};
44
use rustc_errors::Applicability;
55
use rustc_hir as hir;
66
use rustc_lint::LateContext;
7+
use rustc_middle::ty;
78
use rustc_span::sym;
89

910
use super::ITER_CLONED_COLLECT;
1011

1112
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, method_name: &str, expr: &hir::Expr<'_>, recv: &'tcx hir::Expr<'_>) {
12-
if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec)
13+
let expr_ty = cx.typeck_results().expr_ty(expr);
14+
if is_type_diagnostic_item(cx, expr_ty, sym::Vec)
1315
&& let Some(slice) = derefs_to_slice(cx, recv, cx.typeck_results().expr_ty(recv))
16+
&& let ty::Adt(_, args) = expr_ty.kind()
17+
&& let Some(iter_item_ty) = get_iterator_item_ty(cx, cx.typeck_results().expr_ty(recv))
18+
&& let ty::Ref(_, iter_item_ty, _) = iter_item_ty.kind()
19+
&& *iter_item_ty == args.type_at(0)
1420
&& let Some(to_replace) = expr.span.trim_start(slice.span.source_callsite())
1521
{
1622
span_lint_and_sugg(

Diff for: tests/ui/iter_cloned_collect.fixed

+27
Original file line numberDiff line numberDiff line change
@@ -29,3 +29,30 @@ fn main() {
2929
let _: Vec<isize> = v.to_vec();
3030
//~^ iter_cloned_collect
3131
}
32+
33+
mod issue9119 {
34+
35+
use std::iter;
36+
37+
#[derive(Clone)]
38+
struct Example(u16);
39+
40+
impl iter::FromIterator<Example> for Vec<u8> {
41+
fn from_iter<T>(iter: T) -> Self
42+
where
43+
T: IntoIterator<Item = Example>,
44+
{
45+
iter.into_iter().flat_map(|e| e.0.to_le_bytes()).collect()
46+
}
47+
}
48+
49+
fn foo() {
50+
let examples = [Example(1), Example(0x1234)];
51+
let encoded: Vec<u8> = examples.iter().cloned().collect();
52+
assert_eq!(encoded, vec![0x01, 0x00, 0x34, 0x12]);
53+
54+
let a = [&&String::new()];
55+
let v: Vec<&&String> = a.to_vec();
56+
//~^ iter_cloned_collect
57+
}
58+
}

Diff for: tests/ui/iter_cloned_collect.rs

+27
Original file line numberDiff line numberDiff line change
@@ -33,3 +33,30 @@ fn main() {
3333
let _: Vec<isize> = v.iter().copied().collect();
3434
//~^ iter_cloned_collect
3535
}
36+
37+
mod issue9119 {
38+
39+
use std::iter;
40+
41+
#[derive(Clone)]
42+
struct Example(u16);
43+
44+
impl iter::FromIterator<Example> for Vec<u8> {
45+
fn from_iter<T>(iter: T) -> Self
46+
where
47+
T: IntoIterator<Item = Example>,
48+
{
49+
iter.into_iter().flat_map(|e| e.0.to_le_bytes()).collect()
50+
}
51+
}
52+
53+
fn foo() {
54+
let examples = [Example(1), Example(0x1234)];
55+
let encoded: Vec<u8> = examples.iter().cloned().collect();
56+
assert_eq!(encoded, vec![0x01, 0x00, 0x34, 0x12]);
57+
58+
let a = [&&String::new()];
59+
let v: Vec<&&String> = a.iter().cloned().collect();
60+
//~^ iter_cloned_collect
61+
}
62+
}

Diff for: tests/ui/iter_cloned_collect.stderr

+7-1
Original file line numberDiff line numberDiff line change
@@ -36,5 +36,11 @@ error: called `iter().copied().collect()` on a slice to create a `Vec`. Calling
3636
LL | let _: Vec<isize> = v.iter().copied().collect();
3737
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
3838

39-
error: aborting due to 5 previous errors
39+
error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
40+
--> tests/ui/iter_cloned_collect.rs:59:33
41+
|
42+
LL | let v: Vec<&&String> = a.iter().cloned().collect();
43+
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
44+
45+
error: aborting due to 6 previous errors
4046

0 commit comments

Comments
 (0)