fix: Fix new nightly lints

This commit is contained in:
Lukas Wirth 2025-03-31 13:26:52 +02:00
parent 00191d8e91
commit 1e1571e1c8
26 changed files with 88 additions and 96 deletions

View File

@ -1360,8 +1360,7 @@ impl ExprCollector<'_> {
else {
panic!("just expanded a macro, ExpansionSpanMap should be available");
};
let old_span_map =
mem::replace(&mut self.current_span_map, Some(new_span_map.clone()));
let old_span_map = self.current_span_map.replace(new_span_map.clone());
let id = collector(self, Some(expansion.tree()));
self.current_span_map = old_span_map;
self.ast_id_map = prev_ast_id_map;

View File

@ -329,26 +329,30 @@ impl GenericParams {
params.clone()
} else {
Arc::new(GenericParams {
type_or_consts: all_type_or_consts_enabled
.then(|| params.type_or_consts.clone())
.unwrap_or_else(|| {
type_or_consts: if all_type_or_consts_enabled {
params.type_or_consts.clone()
} else {
{
params
.type_or_consts
.iter()
.filter(|&(idx, _)| enabled(attr_owner_ct(idx)))
.map(|(_, param)| param.clone())
.collect()
}),
lifetimes: all_lifetimes_enabled
.then(|| params.lifetimes.clone())
.unwrap_or_else(|| {
}
},
lifetimes: if all_lifetimes_enabled {
params.lifetimes.clone()
} else {
{
params
.lifetimes
.iter()
.filter(|&(idx, _)| enabled(attr_owner_lt(idx)))
.map(|(_, param)| param.clone())
.collect()
}),
}
},
where_predicates: params.where_predicates.clone(),
types_map: params.types_map.clone(),
})

View File

@ -305,13 +305,12 @@ impl Attr {
Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
}
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
let input = match input.flat_tokens().get(1) {
match input.flat_tokens().get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
Some(Box::new(AttrInput::Literal(lit.clone())))
}
_ => None,
};
input
}
}
_ => None,
};

View File

@ -1314,15 +1314,15 @@ fn coerce_pointee_expand(
}
})
});
let self_for_traits = make::path_from_segments(
make::path_from_segments(
[make::generic_ty_path_segment(
make::name_ref(&struct_name.text()),
self_params_for_traits,
)],
false,
)
.clone_for_update();
self_for_traits
.clone_for_update()
};
let mut span_map = span::SpanMap::empty();

View File

@ -144,22 +144,21 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
let mut result = vec![];
if fps.is_empty() {
debug!("Unrestricted search for {:?} impls...", trait_);
self.for_trait_impls(trait_, self_ty_fp, |impls| {
result.extend(impls.for_trait(trait_).map(id_to_chalk));
ControlFlow::Continue(())
})
} else {
self.for_trait_impls(trait_, self_ty_fp, |impls| {
result.extend(
fps.iter().flat_map(move |fp| {
_ =
if fps.is_empty() {
debug!("Unrestricted search for {:?} impls...", trait_);
self.for_trait_impls(trait_, self_ty_fp, |impls| {
result.extend(impls.for_trait(trait_).map(id_to_chalk));
ControlFlow::Continue(())
})
} else {
self.for_trait_impls(trait_, self_ty_fp, |impls| {
result.extend(fps.iter().flat_map(move |fp| {
impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
}),
);
ControlFlow::Continue(())
})
};
}));
ControlFlow::Continue(())
})
};
debug!("impls_for_trait returned {} impls", result.len());
result

View File

@ -242,7 +242,7 @@ impl<'a> PatCtxt<'a> {
ty: &Ty,
subpatterns: Vec<FieldPat>,
) -> PatKind {
let kind = match self.infer.variant_resolution_for_pat(pat) {
match self.infer.variant_resolution_for_pat(pat) {
Some(variant_id) => {
if let VariantId::EnumVariantId(enum_variant) = variant_id {
let substs = match ty.kind(Interner) {
@ -266,8 +266,7 @@ impl<'a> PatCtxt<'a> {
self.errors.push(PatternError::UnresolvedVariant);
PatKind::Wild
}
};
kind
}
}
fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat {

View File

@ -32,8 +32,8 @@ fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
},
None => db.trait_impls_in_crate(module.krate()),
};
let result = impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some();
result
impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]

View File

@ -115,7 +115,7 @@ pub fn dyn_compatibility_of_trait_query(
trait_: TraitId,
) -> Option<DynCompatibilityViolation> {
let mut res = None;
dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
_ = dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
res = Some(osv);
ControlFlow::Break(())
});
@ -592,7 +592,7 @@ fn contains_illegal_impl_trait_in_trait(
let ret = sig.skip_binders().ret();
let mut visitor = OpaqueTypeCollector(FxHashSet::default());
ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
_ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
// Since we haven't implemented RPITIT in proper way like rustc yet,
// just check whether `ret` contains RPIT for now

View File

@ -53,7 +53,7 @@ fn check_dyn_compatibility<'a>(
continue;
};
let mut osvs = FxHashSet::default();
dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
_ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
osvs.insert(match osv {
DynCompatibilityViolation::SizedSelf => SizedSelf,
DynCompatibilityViolation::SelfReferential => SelfReferential,

View File

@ -1143,7 +1143,7 @@ impl<'a> InferenceContext<'a> {
non_assocs: FxHashMap::default(),
};
for ty in tait_candidates {
ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
_ = ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
}
// Non-assoc TAITs can be define-used everywhere as long as they are

View File

@ -517,10 +517,9 @@ impl InferenceContext<'_> {
return None;
}
let hygiene = self.body.expr_or_pat_path_hygiene(id);
let result = self
.resolver
.resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene)
.and_then(|result| match result {
self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene).and_then(
|result| match result {
ValueNs::LocalBinding(binding) => {
let mir_span = match id {
ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
@ -530,8 +529,8 @@ impl InferenceContext<'_> {
Some(HirPlace { local: binding, projections: Vec::new() })
}
_ => None,
});
result
},
)
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.

View File

@ -472,8 +472,7 @@ impl InferenceContext<'_> {
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_closure = mem::replace(&mut self.current_closure, id);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty)));
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty));
let prev_resume_yield_tys =
mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
@ -1168,8 +1167,7 @@ impl InferenceContext<'_> {
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty.clone()));
// FIXME: We should handle async blocks like we handle closures
let expected = &Expectation::has_type(ret_ty);

View File

@ -1027,7 +1027,8 @@ mod resolve {
.assert_ty_ref(Interner)
.clone();
}
let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
// known_ty may contain other variables that are known by now
self.var_stack.push(var);
let result = known_ty.fold_with(self, outer_binder);
@ -1038,8 +1039,7 @@ mod resolve {
(self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
.assert_ty_ref(Interner)
.clone()
};
result
}
}
fn fold_inference_const(

View File

@ -1013,7 +1013,7 @@ where
T: ?Sized + TypeVisitable<Interner>,
{
let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() };
value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
_ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
collector.placeholders.into_iter().collect()
}

View File

@ -585,7 +585,7 @@ pub(crate) fn iterate_method_candidates<T>(
mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option<T>,
) -> Option<T> {
let mut slot = None;
iterate_method_candidates_dyn(
_ = iterate_method_candidates_dyn(
ty,
db,
env,
@ -898,7 +898,10 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
}
};
// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
// FIXME: param coverage
// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
trait_ref.substitution.type_parameters(Interner).any(|ty| {
match unwrap_fundamental(ty).kind(Interner) {
&TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
TyKind::Error => true,
@ -907,10 +910,7 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
}),
_ => false,
}
});
// FIXME: param coverage
// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
is_not_orphan
})
}
pub fn iterate_path_candidates(

View File

@ -1635,10 +1635,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>,
) -> Result<Option<BasicBlockId>> {
let begin = self.new_basic_block();
let prev = mem::replace(
&mut self.current_loop_blocks,
Some(LoopBlocks { begin, end: None, place, drop_scope_index: self.drop_scopes.len() }),
);
let prev = self.current_loop_blocks.replace(LoopBlocks {
begin,
end: None,
place,
drop_scope_index: self.drop_scopes.len(),
});
let prev_label = if let Some(label) = label {
// We should generate the end now, to make sure that it wouldn't change later. It is
// bad as we may emit end (unnecessary unreachable block) for unterminating loop, but

View File

@ -294,15 +294,13 @@ impl Crate {
}
fn core(db: &dyn HirDatabase) -> Option<Crate> {
let result = db
.all_crates()
db.all_crates()
.iter()
.copied()
.find(|&krate| {
matches!(krate.data(db).origin, CrateOrigin::Lang(LangCrateOrigin::Core))
})
.map(Crate::from);
result
.map(Crate::from)
}
}

View File

@ -79,11 +79,11 @@ pub(crate) fn extract_expressions_from_format_string(
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
_ => arg,
};
let arg = match arg.split_last() {
match arg.split_last() {
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
_ => arg,
};
arg
}
});
args.collect()

View File

@ -64,11 +64,11 @@ pub(crate) fn render_variant_pat(
),
None => {
let name = local_name.unwrap_or_else(|| variant.name(ctx.db()));
let it = (
(
name.as_str().to_smolstr(),
name.display(ctx.db(), ctx.completion.edition).to_smolstr(),
);
it
)
}
};

View File

@ -629,7 +629,8 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
let krate = self.sema.scope(expr.syntax()).map(|it| it.krate()).unwrap_or_else(|| {
hir::Crate::from(*self.sema.db.all_crates().last().expect("no crate graph present"))
});
let res = code_type
code_type
.autoderef(self.sema.db)
.enumerate()
.find(|(_, deref_code_type)| pattern_type == deref_code_type)
@ -642,8 +643,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
pattern_type.display(self.sema.db, display_target),
code_type.display(self.sema.db, display_target)
)
});
res
})
}
fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> {

View File

@ -71,7 +71,7 @@ pub(crate) fn goto_type_definition(
sema.descend_into_macros_no_opaque(token)
.into_iter()
.filter_map(|token| {
let ty = sema
sema
.token_ancestors_with_macros(token)
// When `token` is within a macro call, we can't determine its type. Don't continue
// this traversal because otherwise we'll end up returning the type of *that* macro
@ -103,8 +103,7 @@ pub(crate) fn goto_type_definition(
};
Some(ty)
});
ty
})
})
.for_each(process_ty);
Some(RangeInfo::new(range, res))

View File

@ -113,7 +113,8 @@ fn punctuation(
) -> Highlight {
let operator_parent = token.parent();
let parent_kind = operator_parent.as_ref().map_or(EOF, SyntaxNode::kind);
let h = match (kind, parent_kind) {
match (kind, parent_kind) {
(T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
(T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
(T![&], REF_EXPR | REF_PAT) => HlTag::Operator(HlOperator::Other).into(),
@ -240,8 +241,7 @@ fn punctuation(
_ => HlPunct::Other,
}
.into(),
};
h
}
}
fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight {

View File

@ -62,8 +62,7 @@ impl ProcMacroServerProcess {
let mut srv = create_srv()?;
tracing::info!("sending proc-macro server version check");
match srv.version_check() {
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
io::ErrorKind::Other,
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::other(
format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
),
@ -82,7 +81,7 @@ impl ProcMacroServerProcess {
Err(e) => {
tracing::info!(%e, "proc-macro version check failed");
Err(
io::Error::new(io::ErrorKind::Other, format!("proc-macro server version check failed: {e}")),
io::Error::other(format!("proc-macro server version check failed: {e}")),
)
}
}

View File

@ -166,12 +166,9 @@ impl<T: Sized + Send + 'static> CommandHandle<T> {
if read_at_least_one_message || exit_status.success() {
Ok(())
} else {
Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
),
))
Err(io::Error::other(format!(
"Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
)))
}
}
}

View File

@ -363,7 +363,8 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
) -> impl Iterator<Item = TokenTreesView<'a, S>> {
let mut subtree_iter = self.iter();
let mut need_to_yield_even_if_empty = true;
let result = std::iter::from_fn(move || {
std::iter::from_fn(move || {
if subtree_iter.is_empty() && !need_to_yield_even_if_empty {
return None;
};
@ -379,8 +380,7 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
result = subtree_iter.from_savepoint(savepoint);
}
Some(result)
});
result
})
}
}

View File

@ -40,7 +40,7 @@ pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThread
debug!("sending message {:#?}", msg);
if let Err(e) = reader_sender.send(msg) {
return Err(io::Error::new(io::ErrorKind::Other, e));
return Err(io::Error::other(e));
}
if is_exit {