Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Clippy] Swap manual_retain to use diagnostic items instead of paths #130522

Merged
merged 1 commit into from
Sep 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions compiler/rustc_span/src/symbol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,7 @@ symbols! {
begin_panic,
bench,
bin,
binaryheap_iter,
bind_by_move_pattern_guards,
bindings_after_at,
bitand,
Expand All @@ -511,6 +512,7 @@ symbols! {
breakpoint,
bridge,
bswap,
btreeset_iter,
builtin_syntax,
c,
c_str,
Expand Down Expand Up @@ -971,6 +973,7 @@ symbols! {
half_open_range_patterns,
half_open_range_patterns_in_slices,
hash,
hashset_iter,
hexagon_target_feature,
hidden,
homogeneous_aggregate,
Expand Down Expand Up @@ -1077,6 +1080,9 @@ symbols! {
item,
item_like_imports,
iter,
iter_cloned,
iter_copied,
iter_filter,
iter_mut,
iter_repeat,
iterator,
Expand Down Expand Up @@ -1817,6 +1823,7 @@ symbols! {
slice,
slice_from_raw_parts,
slice_from_raw_parts_mut,
slice_iter,
slice_len_fn,
slice_patterns,
slicing_syntax,
Expand Down Expand Up @@ -1849,6 +1856,7 @@ symbols! {
stop_after_dataflow,
store,
str,
str_chars,
str_from_utf8,
str_from_utf8_mut,
str_from_utf8_unchecked,
Expand Down Expand Up @@ -2064,6 +2072,7 @@ symbols! {
variant_count,
vec,
vec_macro,
vecdeque_iter,
version,
vfp2,
vis,
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/binary_heap/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -959,6 +959,7 @@ impl<T, A: Allocator> BinaryHeap<T, A> {
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "binaryheap_iter")]
pub fn iter(&self) -> Iter<'_, T> {
Iter { iter: self.data.iter() }
}
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/btree/set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1132,6 +1132,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// assert_eq!(set_iter.next(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "btreeset_iter")]
pub fn iter(&self) -> Iter<'_, T> {
Iter { iter: self.map.keys() }
}
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/vec_deque/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1201,6 +1201,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(&c[..], b);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_iter")]
pub fn iter(&self) -> Iter<'_, T> {
let (a, b) = self.as_slices();
Iter::new(a.iter(), b.iter())
Expand Down
3 changes: 3 additions & 0 deletions library/core/src/iter/traits/iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -876,6 +876,7 @@ pub trait Iterator {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_do_not_const_check]
#[cfg_attr(not(test), rustc_diagnostic_item = "iter_filter")]
fn filter<P>(self, predicate: P) -> Filter<Self, P>
where
Self: Sized,
Expand Down Expand Up @@ -3412,6 +3413,7 @@ pub trait Iterator {
/// ```
#[stable(feature = "iter_copied", since = "1.36.0")]
#[rustc_do_not_const_check]
#[cfg_attr(not(test), rustc_diagnostic_item = "iter_copied")]
fn copied<'a, T: 'a>(self) -> Copied<Self>
where
Self: Sized + Iterator<Item = &'a T>,
Expand Down Expand Up @@ -3460,6 +3462,7 @@ pub trait Iterator {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_do_not_const_check]
#[cfg_attr(not(test), rustc_diagnostic_item = "iter_cloned")]
fn cloned<'a, T: 'a>(self) -> Cloned<Self>
where
Self: Sized + Iterator<Item = &'a T>,
Expand Down
1 change: 1 addition & 0 deletions library/core/src/slice/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1010,6 +1010,7 @@ impl<T> [T] {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
#[cfg_attr(not(test), rustc_diagnostic_item = "slice_iter")]
pub fn iter(&self) -> Iter<'_, T> {
Iter::new(self)
}
Expand Down
1 change: 1 addition & 0 deletions library/core/src/str/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -832,6 +832,7 @@ impl str {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
#[cfg_attr(not(test), rustc_diagnostic_item = "str_chars")]
pub fn chars(&self) -> Chars<'_> {
Chars { iter: self.as_bytes().iter() }
}
Expand Down
1 change: 1 addition & 0 deletions library/std/src/collections/hash/set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,7 @@ impl<T, S> HashSet<T, S> {
#[inline]
#[rustc_lint_query_instability]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "hashset_iter")]
pub fn iter(&self) -> Iter<'_, T> {
Iter { base: self.base.iter() }
}
Expand Down
34 changes: 17 additions & 17 deletions src/tools/clippy/clippy_lints/src/manual_retain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,22 @@ use clippy_config::Conf;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::{get_type_diagnostic_name, is_type_lang_item};
use clippy_utils::{match_def_path, paths, SpanlessEq};
use clippy_utils::SpanlessEq;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::ExprKind::Assign;
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;

const ACCEPTABLE_METHODS: [&[&str]; 5] = [
&paths::BINARYHEAP_ITER,
&paths::HASHSET_ITER,
&paths::BTREESET_ITER,
&paths::SLICE_INTO,
&paths::VEC_DEQUE_ITER,
const ACCEPTABLE_METHODS: [Symbol; 5] = [
sym::binaryheap_iter,
sym::hashset_iter,
sym::btreeset_iter,
sym::slice_iter,
sym::vecdeque_iter,
];

declare_clippy_lint! {
Expand Down Expand Up @@ -84,7 +84,7 @@ fn check_into_iter(
) {
if let hir::ExprKind::MethodCall(_, into_iter_expr, [_], _) = &target_expr.kind
&& let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
&& match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
&& cx.tcx.is_diagnostic_item(sym::iter_filter, filter_def_id)
&& let hir::ExprKind::MethodCall(_, struct_expr, [], _) = &into_iter_expr.kind
&& let Some(into_iter_def_id) = cx.typeck_results().type_dependent_def_id(into_iter_expr.hir_id)
&& Some(into_iter_def_id) == cx.tcx.lang_items().into_iter_fn()
Expand Down Expand Up @@ -127,14 +127,14 @@ fn check_iter(
) {
if let hir::ExprKind::MethodCall(_, filter_expr, [], _) = &target_expr.kind
&& let Some(copied_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
&& (match_def_path(cx, copied_def_id, &paths::CORE_ITER_COPIED)
|| match_def_path(cx, copied_def_id, &paths::CORE_ITER_CLONED))
&& (cx.tcx.is_diagnostic_item(sym::iter_copied, copied_def_id)
|| cx.tcx.is_diagnostic_item(sym::iter_cloned, copied_def_id))
&& let hir::ExprKind::MethodCall(_, iter_expr, [_], _) = &filter_expr.kind
&& let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(filter_expr.hir_id)
&& match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
&& cx.tcx.is_diagnostic_item(sym::iter_filter, filter_def_id)
&& let hir::ExprKind::MethodCall(_, struct_expr, [], _) = &iter_expr.kind
&& let Some(iter_expr_def_id) = cx.typeck_results().type_dependent_def_id(iter_expr.hir_id)
&& match_acceptable_def_path(cx, iter_expr_def_id)
&& match_acceptable_sym(cx, iter_expr_def_id)
&& match_acceptable_type(cx, left_expr, msrv)
&& SpanlessEq::new(cx).eq_expr(left_expr, struct_expr)
&& let hir::ExprKind::MethodCall(_, _, [closure_expr], _) = filter_expr.kind
Expand Down Expand Up @@ -189,10 +189,10 @@ fn check_to_owned(
&& cx.tcx.is_diagnostic_item(sym::to_owned_method, to_owned_def_id)
&& let hir::ExprKind::MethodCall(_, chars_expr, [_], _) = &filter_expr.kind
&& let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(filter_expr.hir_id)
&& match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
&& cx.tcx.is_diagnostic_item(sym::iter_filter, filter_def_id)
&& let hir::ExprKind::MethodCall(_, str_expr, [], _) = &chars_expr.kind
&& let Some(chars_expr_def_id) = cx.typeck_results().type_dependent_def_id(chars_expr.hir_id)
&& match_def_path(cx, chars_expr_def_id, &paths::STR_CHARS)
&& cx.tcx.is_diagnostic_item(sym::str_chars, chars_expr_def_id)
&& let ty = cx.typeck_results().expr_ty(str_expr).peel_refs()
&& is_type_lang_item(cx, ty, hir::LangItem::String)
&& SpanlessEq::new(cx).eq_expr(left_expr, str_expr)
Expand Down Expand Up @@ -247,10 +247,10 @@ fn make_sugg(
}
}

fn match_acceptable_def_path(cx: &LateContext<'_>, collect_def_id: DefId) -> bool {
fn match_acceptable_sym(cx: &LateContext<'_>, collect_def_id: DefId) -> bool {
ACCEPTABLE_METHODS
.iter()
.any(|&method| match_def_path(cx, collect_def_id, method))
.any(|&method| cx.tcx.is_diagnostic_item(method, collect_def_id))
}

fn match_acceptable_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>, msrv: &Msrv) -> bool {
Expand Down
9 changes: 0 additions & 9 deletions src/tools/clippy/clippy_utils/src/paths.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,8 @@ pub const APPLICABILITY_VALUES: [[&str; 3]; 4] = [
["rustc_lint_defs", "Applicability", "MachineApplicable"],
];
pub const DIAG: [&str; 2] = ["rustc_errors", "Diag"];
pub const BINARYHEAP_ITER: [&str; 5] = ["alloc", "collections", "binary_heap", "BinaryHeap", "iter"];
pub const BTREEMAP_CONTAINS_KEY: [&str; 6] = ["alloc", "collections", "btree", "map", "BTreeMap", "contains_key"];
pub const BTREEMAP_INSERT: [&str; 6] = ["alloc", "collections", "btree", "map", "BTreeMap", "insert"];
pub const BTREESET_ITER: [&str; 6] = ["alloc", "collections", "btree", "set", "BTreeSet", "iter"];
pub const CORE_ITER_CLONED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "cloned"];
pub const CORE_ITER_COPIED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "copied"];
pub const CORE_ITER_FILTER: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "filter"];
pub const CORE_RESULT_OK_METHOD: [&str; 4] = ["core", "result", "Result", "ok"];
pub const CSTRING_AS_C_STR: [&str; 5] = ["alloc", "ffi", "c_str", "CString", "as_c_str"];
pub const EARLY_CONTEXT: [&str; 2] = ["rustc_lint", "EarlyContext"];
Expand All @@ -39,7 +34,6 @@ pub const HASHMAP_VALUES: [&str; 5] = ["std", "collections", "hash", "map", "Val
pub const HASHMAP_DRAIN: [&str; 5] = ["std", "collections", "hash", "map", "Drain"];
pub const HASHMAP_VALUES_MUT: [&str; 5] = ["std", "collections", "hash", "map", "ValuesMut"];
pub const HASHSET_ITER_TY: [&str; 5] = ["std", "collections", "hash", "set", "Iter"];
pub const HASHSET_ITER: [&str; 6] = ["std", "collections", "hash", "set", "HashSet", "iter"];
pub const HASHSET_DRAIN: [&str; 5] = ["std", "collections", "hash", "set", "Drain"];
pub const IDENT: [&str; 3] = ["rustc_span", "symbol", "Ident"];
pub const IDENT_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Ident", "as_str"];
Expand Down Expand Up @@ -71,13 +65,11 @@ pub const REGEX_SET_NEW: [&str; 3] = ["regex", "RegexSet", "new"];
pub const SERDE_DESERIALIZE: [&str; 3] = ["serde", "de", "Deserialize"];
pub const SERDE_DE_VISITOR: [&str; 3] = ["serde", "de", "Visitor"];
pub const SLICE_INTO_VEC: [&str; 4] = ["alloc", "slice", "<impl [T]>", "into_vec"];
pub const SLICE_INTO: [&str; 4] = ["core", "slice", "<impl [T]>", "iter"];
pub const STD_IO_SEEK_FROM_CURRENT: [&str; 4] = ["std", "io", "SeekFrom", "Current"];
pub const STD_IO_SEEKFROM_START: [&str; 4] = ["std", "io", "SeekFrom", "Start"];
pub const STRING_AS_MUT_STR: [&str; 4] = ["alloc", "string", "String", "as_mut_str"];
pub const STRING_AS_STR: [&str; 4] = ["alloc", "string", "String", "as_str"];
pub const STRING_NEW: [&str; 4] = ["alloc", "string", "String", "new"];
pub const STR_CHARS: [&str; 4] = ["core", "str", "<impl str>", "chars"];
pub const STR_ENDS_WITH: [&str; 4] = ["core", "str", "<impl str>", "ends_with"];
pub const STR_LEN: [&str; 4] = ["core", "str", "<impl str>", "len"];
pub const STR_STARTS_WITH: [&str; 4] = ["core", "str", "<impl str>", "starts_with"];
Expand All @@ -100,7 +92,6 @@ pub const TOKIO_IO_OPEN_OPTIONS: [&str; 4] = ["tokio", "fs", "open_options", "Op
pub const TOKIO_IO_OPEN_OPTIONS_NEW: [&str; 5] = ["tokio", "fs", "open_options", "OpenOptions", "new"];
pub const VEC_AS_MUT_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_mut_slice"];
pub const VEC_AS_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_slice"];
pub const VEC_DEQUE_ITER: [&str; 5] = ["alloc", "collections", "vec_deque", "VecDeque", "iter"];
pub const VEC_FROM_ELEM: [&str; 3] = ["alloc", "vec", "from_elem"];
pub const VEC_NEW: [&str; 4] = ["alloc", "vec", "Vec", "new"];
pub const VEC_WITH_CAPACITY: [&str; 4] = ["alloc", "vec", "Vec", "with_capacity"];
Expand Down
Loading