Auto merge of #144603 - lnicola:sync-from-ra, r=lnicola

`rust-analyzer` subtree update

Subtree update of `rust-analyzer` to 511c999bea.

Created using https://github.com/rust-lang/josh-sync.

r? `@ghost`
This commit is contained in:
bors 2025-07-28 19:39:43 +00:00
commit b7c0421d4d
97 changed files with 2183 additions and 1425 deletions

20
.github/workflows/rustc-pull.yml vendored Normal file
View File

@ -0,0 +1,20 @@
name: rustc-pull
on:
workflow_dispatch:
schedule:
# Run at 04:00 UTC every Monday and Thursday
- cron: '0 4 * * 1,4'
jobs:
pull:
if: github.repository == 'rust-lang/rust-analyzer'
uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main
with:
zulip-stream-id: 185405
zulip-bot-email: "rust-analyzer-ci-bot@rust-lang.zulipchat.com"
pr-base-branch: master
branch-name: rustc-pull
secrets:
zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}

49
Cargo.lock generated
View File

@ -395,15 +395,6 @@ dependencies = [
"syn",
]
[[package]]
name = "directories"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs"
version = "6.0.0"
@ -1268,7 +1259,7 @@ dependencies = [
"expect-test",
"intern",
"parser",
"ra-ap-rustc_lexer 0.122.0",
"ra-ap-rustc_lexer 0.123.0",
"rustc-hash 2.1.1",
"smallvec",
"span",
@ -1504,7 +1495,7 @@ dependencies = [
"drop_bomb",
"edition",
"expect-test",
"ra-ap-rustc_lexer 0.122.0",
"ra-ap-rustc_lexer 0.123.0",
"rustc-literal-escaper",
"stdx",
"tracing",
@ -1614,7 +1605,7 @@ dependencies = [
"object",
"paths",
"proc-macro-test",
"ra-ap-rustc_lexer 0.122.0",
"ra-ap-rustc_lexer 0.123.0",
"span",
"syntax-bridge",
"tt",
@ -1688,6 +1679,7 @@ dependencies = [
"serde_json",
"span",
"stdx",
"temp-dir",
"toolchain",
"tracing",
"triomphe",
@ -1756,9 +1748,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.122.0"
version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb01e1fec578003c85481c1cad4ff8cd8195b07c2dc85ae3f716108507ae15d5"
checksum = "f18c877575c259d127072e9bfc41d985202262fb4d6bfdae3d1252147c2562c2"
dependencies = [
"bitflags 2.9.1",
"ra-ap-rustc_hashes",
@ -1768,18 +1760,18 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_hashes"
version = "0.122.0"
version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0ec056e72a472ffef8761ce96ece6c626eb07368c09d0105b6df30d27d07673"
checksum = "2439ed1df3472443133b66949f81080dff88089b42f825761455463709ee1cad"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.122.0"
version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fcdd1001db0295e59052e9f53aeda588bbe81e362534f4687d41bd44777b5a7"
checksum = "57a24fe0be21be1f8ebc21dcb40129214fb4cefb0f2753f3d46b6dbe656a1a45"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@ -1787,9 +1779,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.122.0"
version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "728d64dd98e25530b32e3f7c7c1e844e52722b269360daa1cdeba9dff9727a26"
checksum = "844a27ddcad0116facae2df8e741fd788662cf93dc13029cd864f2b8013b81f9"
dependencies = [
"proc-macro2",
"quote",
@ -1809,9 +1801,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.122.0"
version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "415f0821f512608d825b3215489a6a6a2c18ed9f0045953d514e7ec23d4b90ab"
checksum = "2b734cfcb577d09877799a22742f1bd398be6c00bc428d9de56d48d11ece5771"
dependencies = [
"memchr",
"unicode-properties",
@ -1830,9 +1822,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.122.0"
version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4657fcfdfe06e2a02ec8180d4e7c95aecf4811ba50367e363d1a2300b7623284"
checksum = "75b0ee1f059b9dea0818c6c7267478926eee95ba4c7dcf89c8db32fa165d3904"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@ -2293,6 +2285,12 @@ dependencies = [
"tt",
]
[[package]]
name = "temp-dir"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83176759e9416cf81ee66cb6508dbfe9c96f20b8b56265a39917551c23c70964"
[[package]]
name = "tenthash"
version = "1.1.0"
@ -2592,7 +2590,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"intern",
"ra-ap-rustc_lexer 0.122.0",
"ra-ap-rustc_lexer 0.123.0",
"stdx",
"text-size",
]
@ -3105,7 +3103,6 @@ name = "xtask"
version = "0.1.0"
dependencies = [
"anyhow",
"directories",
"edition",
"either",
"flate2",

View File

@ -89,11 +89,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.122", default-features = false }
ra-ap-rustc_lexer = { version = "0.123", default-features = false }
ra-ap-rustc_parse_format = { version = "0.121", default-features = false }
ra-ap-rustc_index = { version = "0.122", default-features = false }
ra-ap-rustc_abi = { version = "0.122", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.122", default-features = false }
ra-ap-rustc_index = { version = "0.123", default-features = false }
ra-ap-rustc_abi = { version = "0.123", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.123", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@ -156,6 +156,7 @@ smallvec = { version = "1.15.1", features = [
"const_generics",
] }
smol_str = "0.3.2"
temp-dir = "0.1.16"
text-size = "1.1.1"
tracing = "0.1.41"
tracing-tree = "0.4.0"

View File

@ -30,6 +30,7 @@ pub type ProcMacroPaths =
pub enum ProcMacroLoadingError {
Disabled,
FailedToBuild,
ExpectedProcMacroArtifact,
MissingDylibPath,
NotYetBuilt,
NoProcMacros,
@ -39,7 +40,8 @@ impl ProcMacroLoadingError {
pub fn is_hard_error(&self) -> bool {
match self {
ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
ProcMacroLoadingError::FailedToBuild
ProcMacroLoadingError::ExpectedProcMacroArtifact
| ProcMacroLoadingError::FailedToBuild
| ProcMacroLoadingError::MissingDylibPath
| ProcMacroLoadingError::NoProcMacros
| ProcMacroLoadingError::ProcMacroSrvError(_) => true,
@ -51,10 +53,16 @@ impl Error for ProcMacroLoadingError {}
impl fmt::Display for ProcMacroLoadingError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ProcMacroLoadingError::ExpectedProcMacroArtifact => {
write!(f, "proc-macro crate did not build proc-macro artifact")
}
ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
ProcMacroLoadingError::MissingDylibPath => {
write!(f, "proc-macro crate build data is missing a dylib path")
write!(
f,
"proc-macro crate built but the dylib path is missing, this indicates a problem with your build system."
)
}
ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
ProcMacroLoadingError::NoProcMacros => {

View File

@ -16,7 +16,7 @@ use std::{
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name};
use hir_expand::{InFile, MacroCallId, mod_path::ModPath, name::Name};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
@ -281,7 +281,6 @@ struct FormatTemplate {
#[derive(Debug, Eq, PartialEq)]
pub enum ExpressionStoreDiagnostics {
InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
MacroError { node: InFile<MacroCallPtr>, err: ExpandError },
UnresolvedMacroCall { node: InFile<MacroCallPtr>, path: ModPath },
UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String },

View File

@ -960,37 +960,28 @@ impl ExprCollector<'_> {
impl_trait_lower_fn: ImplTraitLowerFn<'_>,
) -> TypeBound {
match node.kind() {
ast::TypeBoundKind::PathType(path_type) => {
let m = match node.question_mark_token() {
Some(_) => TraitBoundModifier::Maybe,
None => TraitBoundModifier::None,
};
self.lower_path_type(&path_type, impl_trait_lower_fn)
.map(|p| {
TypeBound::Path(self.alloc_path(p, AstPtr::new(&path_type).upcast()), m)
})
.unwrap_or(TypeBound::Error)
}
ast::TypeBoundKind::ForType(for_type) => {
let lt_refs = match for_type.generic_param_list() {
ast::TypeBoundKind::PathType(binder, path_type) => {
let binder = match binder.and_then(|it| it.generic_param_list()) {
Some(gpl) => gpl
.lifetime_params()
.flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(&lt.text())))
.collect(),
None => ThinVec::default(),
};
let path = for_type.ty().and_then(|ty| match &ty {
ast::Type::PathType(path_type) => {
self.lower_path_type(path_type, impl_trait_lower_fn).map(|p| (p, ty))
}
_ => None,
});
match path {
Some((p, ty)) => {
TypeBound::ForLifetime(lt_refs, self.alloc_path(p, AstPtr::new(&ty)))
}
None => TypeBound::Error,
}
let m = match node.question_mark_token() {
Some(_) => TraitBoundModifier::Maybe,
None => TraitBoundModifier::None,
};
self.lower_path_type(&path_type, impl_trait_lower_fn)
.map(|p| {
let path = self.alloc_path(p, AstPtr::new(&path_type).upcast());
if binder.is_empty() {
TypeBound::Path(path, m)
} else {
TypeBound::ForLifetime(binder, path)
}
})
.unwrap_or(TypeBound::Error)
}
ast::TypeBoundKind::Use(gal) => TypeBound::Use(
gal.use_bound_generic_args()
@ -1981,13 +1972,7 @@ impl ExprCollector<'_> {
return collector(self, None);
}
};
if record_diagnostics {
if let Some(err) = res.err {
self.store
.diagnostics
.push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
}
}
// No need to push macro and parsing errors as they'll be recreated from `macro_calls()`.
match res.value {
Some((mark, expansion)) => {
@ -1997,10 +1982,6 @@ impl ExprCollector<'_> {
self.store.expansions.insert(macro_call_ptr, macro_file);
}
if record_diagnostics {
// FIXME: Report parse errors here
}
let id = collector(self, expansion.map(|it| it.tree()));
self.expander.exit(mark);
id

View File

@ -180,17 +180,18 @@ impl GenericParamsCollector {
continue;
};
let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
// Higher-Ranked Trait Bounds
param_list
.lifetime_params()
.map(|lifetime_param| {
lifetime_param
.lifetime()
.map_or_else(Name::missing, |lt| Name::new_lifetime(&lt.text()))
})
.collect()
});
let lifetimes: Option<Box<_>> =
pred.for_binder().and_then(|it| it.generic_param_list()).map(|param_list| {
// Higher-Ranked Trait Bounds
param_list
.lifetime_params()
.map(|lifetime_param| {
lifetime_param
.lifetime()
.map_or_else(Name::missing, |lt| Name::new_lifetime(&lt.text()))
})
.collect()
});
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
self.lower_type_bound_as_predicate(ec, bound, lifetimes.as_deref(), target);
}

View File

@ -27,7 +27,7 @@ pub enum Path {
}
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
const _: () = {
assert!(size_of::<Path>() == 24);
assert!(size_of::<Option<Path>>() == 24);

View File

@ -148,7 +148,7 @@ pub enum TypeRef {
Error,
}
#[cfg(target_arch = "x86_64")]
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;

View File

@ -175,8 +175,9 @@ impl ExprValidator {
});
}
let receiver_ty = self.infer[*receiver].clone();
checker.prev_receiver_ty = Some(receiver_ty);
if let Some(receiver_ty) = self.infer.type_of_expr_with_adjust(*receiver) {
checker.prev_receiver_ty = Some(receiver_ty.clone());
}
}
}
@ -187,7 +188,9 @@ impl ExprValidator {
arms: &[MatchArm],
db: &dyn HirDatabase,
) {
let scrut_ty = &self.infer[scrutinee_expr];
let Some(scrut_ty) = self.infer.type_of_expr_with_adjust(scrutinee_expr) else {
return;
};
if scrut_ty.contains_unknown() {
return;
}
@ -200,7 +203,7 @@ impl ExprValidator {
// Note: Skipping the entire diagnostic rather than just not including a faulty match arm is
// preferred to avoid the chance of false positives.
for arm in arms {
let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
let Some(pat_ty) = self.infer.type_of_pat_with_adjust(arm.pat) else {
return;
};
if pat_ty.contains_unknown() {
@ -328,7 +331,7 @@ impl ExprValidator {
continue;
}
let Some(initializer) = initializer else { continue };
let ty = &self.infer[initializer];
let Some(ty) = self.infer.type_of_expr_with_adjust(initializer) else { continue };
if ty.contains_unknown() {
continue;
}
@ -433,44 +436,44 @@ impl ExprValidator {
Statement::Expr { expr, .. } => Some(*expr),
_ => None,
});
if let Some(last_then_expr) = last_then_expr {
let last_then_expr_ty = &self.infer[last_then_expr];
if last_then_expr_ty.is_never() {
// Only look at sources if the then branch diverges and we have an else branch.
let source_map = db.body_with_source_map(self.owner).1;
let Ok(source_ptr) = source_map.expr_syntax(id) else {
return;
};
let root = source_ptr.file_syntax(db);
let either::Left(ast::Expr::IfExpr(if_expr)) =
source_ptr.value.to_node(&root)
else {
return;
};
let mut top_if_expr = if_expr;
loop {
let parent = top_if_expr.syntax().parent();
let has_parent_expr_stmt_or_stmt_list =
parent.as_ref().is_some_and(|node| {
ast::ExprStmt::can_cast(node.kind())
| ast::StmtList::can_cast(node.kind())
});
if has_parent_expr_stmt_or_stmt_list {
// Only emit diagnostic if parent or direct ancestor is either
// an expr stmt or a stmt list.
break;
}
let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else {
// Bail if parent is neither an if expr, an expr stmt nor a stmt list.
return;
};
// Check parent if expr.
top_if_expr = parent_if_expr;
if let Some(last_then_expr) = last_then_expr
&& let Some(last_then_expr_ty) =
self.infer.type_of_expr_with_adjust(last_then_expr)
&& last_then_expr_ty.is_never()
{
// Only look at sources if the then branch diverges and we have an else branch.
let source_map = db.body_with_source_map(self.owner).1;
let Ok(source_ptr) = source_map.expr_syntax(id) else {
return;
};
let root = source_ptr.file_syntax(db);
let either::Left(ast::Expr::IfExpr(if_expr)) = source_ptr.value.to_node(&root)
else {
return;
};
let mut top_if_expr = if_expr;
loop {
let parent = top_if_expr.syntax().parent();
let has_parent_expr_stmt_or_stmt_list =
parent.as_ref().is_some_and(|node| {
ast::ExprStmt::can_cast(node.kind())
| ast::StmtList::can_cast(node.kind())
});
if has_parent_expr_stmt_or_stmt_list {
// Only emit diagnostic if parent or direct ancestor is either
// an expr stmt or a stmt list.
break;
}
self.diagnostics
.push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id })
let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else {
// Bail if parent is neither an if expr, an expr stmt nor a stmt list.
return;
};
// Check parent if expr.
top_if_expr = parent_if_expr;
}
self.diagnostics
.push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id })
}
}
}

View File

@ -561,6 +561,32 @@ impl InferenceResult {
ExprOrPatId::PatId(id) => self.type_of_pat.get(id),
}
}
pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<&Ty> {
match self.expr_adjustments.get(&id).and_then(|adjustments| {
adjustments
.iter()
.filter(|adj| {
// https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140
!matches!(
adj,
Adjustment {
kind: Adjust::NeverToAny,
target,
} if target.is_never()
)
})
.next_back()
}) {
Some(adjustment) => Some(&adjustment.target),
None => self.type_of_expr.get(id),
}
}
pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<&Ty> {
match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
adjusted @ Some(_) => adjusted,
None => self.type_of_pat.get(id),
}
}
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}

View File

@ -3,9 +3,9 @@
use std::{cmp, ops::Bound};
use hir_def::{
AdtId, VariantId,
layout::{Integer, ReprOptions, TargetDataLayout},
signatures::{StructFlags, VariantFields},
AdtId, VariantId,
};
use intern::sym;
use rustc_index::IndexVec;
@ -13,9 +13,9 @@ use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
db::HirDatabase,
layout::{field_ty, Layout, LayoutError},
Substitution, TraitEnvironment,
db::HirDatabase,
layout::{Layout, LayoutError, field_ty},
};
use super::LayoutCx;

View File

@ -590,9 +590,14 @@ impl<'a> TyLoweringContext<'a> {
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
let pointee_sized = LangItem::PointeeSized
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
if meta_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
let destruct = LangItem::Destruct
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
let hir_trait_id = trait_ref.hir_trait_id();
if meta_sized.is_some_and(|it| it == hir_trait_id)
|| destruct.is_some_and(|it| it == hir_trait_id)
{
// Ignore this bound
} else if pointee_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
} else if pointee_sized.is_some_and(|it| it == hir_trait_id) {
// Regard this as `?Sized` bound
ctx.ty_ctx().unsized_types.insert(self_ty);
} else {

View File

@ -2349,3 +2349,37 @@ fn test() {
"#]],
);
}
#[test]
fn rust_destruct_option_clone() {
check_types(
r#"
//- minicore: option, drop
fn test(o: &Option<i32>) {
o.my_clone();
//^^^^^^^^^^^^ Option<i32>
}
pub trait MyClone: Sized {
fn my_clone(&self) -> Self;
}
impl<T> const MyClone for Option<T>
where
T: ~const MyClone + ~const Destruct,
{
fn my_clone(&self) -> Self {
match self {
Some(x) => Some(x.my_clone()),
None => None,
}
}
}
impl const MyClone for i32 {
fn my_clone(&self) -> Self {
*self
}
}
#[lang = "destruct"]
pub trait Destruct {}
"#,
);
}

View File

@ -1922,10 +1922,6 @@ impl DefWithBody {
Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints);
}
source_map
.macro_calls()
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
@ -2130,9 +2126,9 @@ impl DefWithBody {
}
}
fn expr_store_diagnostics(
db: &dyn HirDatabase,
acc: &mut Vec<AnyDiagnostic<'_>>,
fn expr_store_diagnostics<'db>(
db: &'db dyn HirDatabase,
acc: &mut Vec<AnyDiagnostic<'db>>,
source_map: &ExpressionStoreSourceMap,
) {
for diag in source_map.diagnostics() {
@ -2140,30 +2136,6 @@ fn expr_store_diagnostics(
ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => {
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
}
ExpressionStoreDiagnostics::MacroError { node, err } => {
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == node.file_id {
Some(
err.span().range
+ db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
)
} else {
None
};
MacroError {
node: (node).map(|it| it.into()),
precise_location,
message,
error,
kind,
}
.into()
}
ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
precise_location: None,
@ -2182,6 +2154,10 @@ fn expr_store_diagnostics(
}
});
}
source_map
.macro_calls()
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function {

View File

@ -441,7 +441,7 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<GenericSubstitution<'db>> {
let body = self.store()?;
if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?;
let (adt, subst) = infer.type_of_expr_with_adjust(object_expr)?.as_adt()?;
return Some(GenericSubstitution::new(
adt.into(),
subst.clone(),
@ -1780,10 +1780,3 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
let ctx = span_map.span_at(name.value.text_range().start()).ctx;
HygieneId::new(ctx.opaque_and_semitransparent(db))
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
Some(adjustment) => Some(&adjustment.target),
None => Some(&infer[id]),
}
}

View File

@ -2,6 +2,7 @@ use hir::HasSource;
use syntax::{
Edition,
ast::{self, AstNode, make},
syntax_editor::{Position, SyntaxEditor},
};
use crate::{
@ -147,45 +148,78 @@ fn add_missing_impl_members_inner(
let target = impl_def.syntax().text_range();
acc.add(AssistId::quick_fix(assist_id), label, target, |edit| {
let new_impl_def = edit.make_mut(impl_def.clone());
let first_new_item = add_trait_assoc_items_to_impl(
let new_item = add_trait_assoc_items_to_impl(
&ctx.sema,
ctx.config,
&missing_items,
trait_,
&new_impl_def,
&impl_def,
&target_scope,
);
let Some((first_new_item, other_items)) = new_item.split_first() else {
return;
};
let mut first_new_item = if let DefaultMethods::No = mode
&& let ast::AssocItem::Fn(func) = &first_new_item
&& let Some(body) = try_gen_trait_body(
ctx,
func,
trait_ref,
&impl_def,
target_scope.krate().edition(ctx.sema.db),
)
&& let Some(func_body) = func.body()
{
let mut func_editor = SyntaxEditor::new(first_new_item.syntax().clone_subtree());
func_editor.replace(func_body.syntax(), body.syntax());
ast::AssocItem::cast(func_editor.finish().new_root().clone())
} else {
Some(first_new_item.clone())
};
let new_assoc_items = first_new_item
.clone()
.into_iter()
.chain(other_items.iter().cloned())
.map(either::Either::Right)
.collect::<Vec<_>>();
let mut editor = edit.make_editor(impl_def.syntax());
if let Some(assoc_item_list) = impl_def.assoc_item_list() {
let items = new_assoc_items.into_iter().filter_map(either::Either::right).collect();
assoc_item_list.add_items(&mut editor, items);
} else {
let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update();
editor.insert_all(
Position::after(impl_def.syntax()),
vec![make::tokens::whitespace(" ").into(), assoc_item_list.syntax().clone().into()],
);
first_new_item = assoc_item_list.assoc_items().next();
}
if let Some(cap) = ctx.config.snippet_cap {
let mut placeholder = None;
if let DefaultMethods::No = mode {
if let ast::AssocItem::Fn(func) = &first_new_item {
if try_gen_trait_body(
ctx,
func,
trait_ref,
&impl_def,
target_scope.krate().edition(ctx.sema.db),
)
.is_none()
if let Some(ast::AssocItem::Fn(func)) = &first_new_item {
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
&& m.syntax().text() == "todo!()"
{
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
{
if m.syntax().text() == "todo!()" {
placeholder = Some(m);
}
}
placeholder = Some(m);
}
}
}
if let Some(macro_call) = placeholder {
edit.add_placeholder_snippet(cap, macro_call);
} else {
edit.add_tabstop_before(cap, first_new_item);
let placeholder = edit.make_placeholder_snippet(cap);
editor.add_annotation(macro_call.syntax(), placeholder);
} else if let Some(first_new_item) = first_new_item {
let tabstop = edit.make_tabstop_before(cap);
editor.add_annotation(first_new_item.syntax(), tabstop);
};
};
edit.add_file_edits(ctx.vfs_file_id(), editor);
})
}
@ -195,7 +229,7 @@ fn try_gen_trait_body(
trait_ref: hir::TraitRef<'_>,
impl_def: &ast::Impl,
edition: Edition,
) -> Option<()> {
) -> Option<ast::BlockExpr> {
let trait_path = make::ext::ident_path(
&trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(),
);
@ -322,7 +356,7 @@ impl Foo for S {
}
#[test]
fn test_impl_def_without_braces() {
fn test_impl_def_without_braces_macro() {
check_assist(
add_missing_impl_members,
r#"
@ -340,6 +374,33 @@ impl Foo for S {
);
}
#[test]
fn test_impl_def_without_braces_tabstop_first_item() {
check_assist(
add_missing_impl_members,
r#"
trait Foo {
type Output;
fn foo(&self);
}
struct S;
impl Foo for S { $0 }"#,
r#"
trait Foo {
type Output;
fn foo(&self);
}
struct S;
impl Foo for S {
$0type Output;
fn foo(&self) {
todo!()
}
}"#,
);
}
#[test]
fn fill_in_type_params_1() {
check_assist(

View File

@ -228,8 +228,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
closure_body,
Some(ast::ElseBranch::Block(make.block_expr(None, Some(none_path)))),
)
.indent(mcall.indent_level())
.clone_for_update();
.indent(mcall.indent_level());
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
editor.add_mappings(make.finish_with_mappings());

View File

@ -13,7 +13,6 @@ use syntax::{
edit::{AstNodeEdit, IndentLevel},
make,
},
ted,
};
use crate::{
@ -117,7 +116,7 @@ fn if_expr_to_guarded_return(
then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?;
let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update();
let then_block_items = then_block.dedent(IndentLevel(1));
let end_of_then = then_block_items.syntax().last_child_or_token()?;
let end_of_then = if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) {
@ -132,7 +131,6 @@ fn if_expr_to_guarded_return(
"Convert to guarded return",
target,
|edit| {
let if_expr = edit.make_mut(if_expr);
let if_indent_level = IndentLevel::from_node(if_expr.syntax());
let replacement = match if_let_pat {
None => {
@ -143,7 +141,7 @@ fn if_expr_to_guarded_return(
let cond = invert_boolean_expression_legacy(cond_expr);
make::expr_if(cond, then_branch, None).indent(if_indent_level)
};
new_expr.syntax().clone_for_update()
new_expr.syntax().clone()
}
Some(pat) => {
// If-let.
@ -154,7 +152,7 @@ fn if_expr_to_guarded_return(
ast::make::tail_only_block_expr(early_expression),
);
let let_else_stmt = let_else_stmt.indent(if_indent_level);
let_else_stmt.syntax().clone_for_update()
let_else_stmt.syntax().clone()
}
};
@ -168,8 +166,9 @@ fn if_expr_to_guarded_return(
.take_while(|i| *i != end_of_then),
)
.collect();
ted::replace_with_many(if_expr.syntax(), then_statements)
let mut editor = edit.make_editor(if_expr.syntax());
editor.replace_with_many(if_expr.syntax(), then_statements);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -214,7 +213,6 @@ fn let_stmt_to_guarded_return(
"Convert to guarded return",
target,
|edit| {
let let_stmt = edit.make_mut(let_stmt);
let let_indent_level = IndentLevel::from_node(let_stmt.syntax());
let replacement = {
@ -225,10 +223,11 @@ fn let_stmt_to_guarded_return(
ast::make::tail_only_block_expr(early_expression),
);
let let_else_stmt = let_else_stmt.indent(let_indent_level);
let_else_stmt.syntax().clone_for_update()
let_else_stmt.syntax().clone()
};
ted::replace(let_stmt.syntax(), replacement)
let mut editor = edit.make_editor(let_stmt.syntax());
editor.replace(let_stmt.syntax(), replacement);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View File

@ -8,8 +8,7 @@ use syntax::{
AstNode, AstToken, NodeOrToken,
SyntaxKind::WHITESPACE,
T,
ast::{self, make},
ted,
ast::{self, make, syntax_factory::SyntaxFactory},
};
// Assist: extract_expressions_from_format_string
@ -58,8 +57,6 @@ pub(crate) fn extract_expressions_from_format_string(
"Extract format expressions",
tt.syntax().text_range(),
|edit| {
let tt = edit.make_mut(tt);
// Extract existing arguments in macro
let tokens = tt.token_trees_and_tokens().collect_vec();
@ -131,8 +128,10 @@ pub(crate) fn extract_expressions_from_format_string(
}
// Insert new args
let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
ted::replace(tt.syntax(), new_tt.syntax());
let make = SyntaxFactory::with_mappings();
let new_tt = make.token_tree(tt_delimiter, new_tt_bits);
let mut editor = edit.make_editor(tt.syntax());
editor.replace(tt.syntax(), new_tt.syntax());
if let Some(cap) = ctx.config.snippet_cap {
// Add placeholder snippets over placeholder args
@ -145,15 +144,19 @@ pub(crate) fn extract_expressions_from_format_string(
};
if stdx::always!(placeholder.kind() == T![_]) {
edit.add_placeholder_snippet_token(cap, placeholder);
let annotation = edit.make_placeholder_snippet(cap);
editor.add_annotation(placeholder, annotation);
}
}
// Add the final tabstop after the format literal
if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) {
edit.add_tabstop_after_token(cap, literal);
let annotation = edit.make_tabstop_after(cap);
editor.add_annotation(literal, annotation);
}
}
editor.add_mappings(make.finish_with_mappings());
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
);

View File

@ -16,8 +16,9 @@ use syntax::{
SyntaxKind::*,
SyntaxNode, T,
ast::{
self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::IndentLevel,
edit_in_place::Indent, make,
self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility,
edit::{AstNodeEdit, IndentLevel},
make,
},
match_ast, ted,
};
@ -110,20 +111,30 @@ pub(crate) fn extract_struct_from_enum_variant(
let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
// resolve GenericArg in field_list to actual type
let field_list = field_list.clone_for_update();
if let Some((target_scope, source_scope)) =
let field_list = if let Some((target_scope, source_scope)) =
ctx.sema.scope(enum_ast.syntax()).zip(ctx.sema.scope(field_list.syntax()))
{
PathTransform::generic_transformation(&target_scope, &source_scope)
.apply(field_list.syntax());
}
let field_list = field_list.reset_indent();
let field_list =
PathTransform::generic_transformation(&target_scope, &source_scope)
.apply(field_list.syntax());
match_ast! {
match field_list {
ast::RecordFieldList(field_list) => Either::Left(field_list),
ast::TupleFieldList(field_list) => Either::Right(field_list),
_ => unreachable!(),
}
}
} else {
field_list.clone_for_update()
};
let def =
create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
let enum_ast = variant.parent_enum();
let indent = enum_ast.indent_level();
def.reindent_to(indent);
let def = def.indent(indent);
ted::insert_all(
ted::Position::before(enum_ast.syntax()),
@ -279,7 +290,7 @@ fn create_struct_def(
field_list.clone().into()
}
};
field_list.reindent_to(IndentLevel::single());
let field_list = field_list.indent(IndentLevel::single());
let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update();

View File

@ -7,7 +7,9 @@ use syntax::{
NodeOrToken, SyntaxKind, SyntaxNode, T,
algo::ancestors_at_offset,
ast::{
self, AstNode, edit::IndentLevel, edit_in_place::Indent, make,
self, AstNode,
edit::{AstNodeEdit, IndentLevel},
make,
syntax_factory::SyntaxFactory,
},
syntax_editor::Position,
@ -253,12 +255,11 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
// `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`.
editor.replace(expr_replace, name_expr.syntax());
make.block_expr([new_stmt], Some(to_wrap.clone()))
};
}
// fixup indentation of block
.indent_with_mapping(indent_to, &make);
editor.replace(to_wrap.syntax(), block.syntax());
// fixup indentation of block
block.indent(indent_to);
}
}

View File

@ -114,9 +114,13 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let source_scope = ctx.sema.scope(v.syntax());
let target_scope = ctx.sema.scope(strukt.syntax());
if let (Some(s), Some(t)) = (source_scope, target_scope) {
PathTransform::generic_transformation(&t, &s).apply(v.syntax());
ast::Fn::cast(
PathTransform::generic_transformation(&t, &s).apply(v.syntax()),
)
.unwrap_or(v)
} else {
v
}
v
}
None => return,
};

View File

@ -255,7 +255,6 @@ fn generate_impl(
delegee: &Delegee,
edition: Edition,
) -> Option<ast::Impl> {
let delegate: ast::Impl;
let db = ctx.db();
let ast_strukt = &strukt.strukt;
let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string()));
@ -266,7 +265,7 @@ fn generate_impl(
let bound_def = ctx.sema.source(delegee.to_owned())?.value;
let bound_params = bound_def.generic_param_list();
delegate = make::impl_trait(
let delegate = make::impl_trait(
delegee.is_unsafe(db),
bound_params.clone(),
bound_params.map(|params| params.to_generic_args()),
@ -304,7 +303,7 @@ fn generate_impl(
let target_scope = ctx.sema.scope(strukt.strukt.syntax())?;
let source_scope = ctx.sema.scope(bound_def.syntax())?;
let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
transform.apply(delegate.syntax());
ast::Impl::cast(transform.apply(delegate.syntax()))
}
Delegee::Impls(trait_, old_impl) => {
let old_impl = ctx.sema.source(old_impl.to_owned())?.value;
@ -358,20 +357,28 @@ fn generate_impl(
// 2.3) Instantiate generics with `transform_impl`, this step also
// remove unused params.
let mut trait_gen_args = old_impl.trait_()?.generic_arg_list();
if let Some(trait_args) = &mut trait_gen_args {
*trait_args = trait_args.clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, trait_args.syntax())?;
}
let trait_gen_args = old_impl.trait_()?.generic_arg_list().and_then(|trait_args| {
let trait_args = &mut trait_args.clone_for_update();
if let Some(new_args) = transform_impl(
ctx,
ast_strukt,
&old_impl,
&transform_args,
trait_args.clone_subtree(),
) {
*trait_args = new_args.clone_subtree();
Some(new_args)
} else {
None
}
});
let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
let path_type =
make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?;
let path_type = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type)?;
// 3) Generate delegate trait impl
delegate = make::impl_trait(
let delegate = make::impl_trait(
trait_.is_unsafe(db),
trait_gen_params,
trait_gen_args,
@ -385,7 +392,6 @@ fn generate_impl(
None,
)
.clone_for_update();
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type =
make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
@ -398,7 +404,7 @@ fn generate_impl(
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
{
let item = item.clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?;
let item = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item)?;
let assoc = process_assoc_item(item, qualified_path_type.clone(), field_name)?;
delegate_assoc_items.add_item(assoc);
@ -408,19 +414,18 @@ fn generate_impl(
if let Some(wc) = delegate.where_clause() {
remove_useless_where_clauses(&delegate.trait_()?, &delegate.self_ty()?, wc);
}
Some(delegate)
}
}
Some(delegate)
}
fn transform_impl(
fn transform_impl<N: ast::AstNode>(
ctx: &AssistContext<'_>,
strukt: &ast::Struct,
old_impl: &ast::Impl,
args: &Option<GenericArgList>,
syntax: &syntax::SyntaxNode,
) -> Option<()> {
syntax: N,
) -> Option<N> {
let source_scope = ctx.sema.scope(old_impl.self_ty()?.syntax())?;
let target_scope = ctx.sema.scope(strukt.syntax())?;
let hir_old_impl = ctx.sema.to_impl_def(old_impl)?;
@ -437,8 +442,7 @@ fn transform_impl(
},
);
transform.apply(syntax);
Some(())
N::cast(transform.apply(syntax.syntax()))
}
fn remove_instantiated_params(
@ -570,9 +574,7 @@ where
let scope = ctx.sema.scope(item.syntax())?;
let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone());
transform.apply(item.syntax());
Some(item)
N::cast(transform.apply(item.syntax()))
}
fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> {
@ -767,7 +769,7 @@ fn func_assoc_item(
)
.clone_for_update();
Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update()))
Some(AssocItem::Fn(func.indent(edit::IndentLevel(1))))
}
fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> {

View File

@ -743,17 +743,30 @@ fn fn_generic_params(
let where_preds: Vec<ast::WherePred> =
where_preds.into_iter().map(|it| it.node.clone_for_update()).collect();
// 4. Rewrite paths
if let Some(param) = generic_params.first() {
let source_scope = ctx.sema.scope(param.syntax())?;
let target_scope = ctx.sema.scope(&target.parent())?;
if source_scope.module() != target_scope.module() {
let (generic_params, where_preds): (Vec<ast::GenericParam>, Vec<ast::WherePred>) =
if let Some(param) = generic_params.first()
&& let source_scope = ctx.sema.scope(param.syntax())?
&& let target_scope = ctx.sema.scope(&target.parent())?
&& source_scope.module() != target_scope.module()
{
// 4. Rewrite paths
let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
let generic_params = generic_params.iter().map(|it| it.syntax());
let where_preds = where_preds.iter().map(|it| it.syntax());
transform.apply_all(generic_params.chain(where_preds));
}
}
transform
.apply_all(generic_params.chain(where_preds))
.into_iter()
.filter_map(|it| {
if let Some(it) = ast::GenericParam::cast(it.clone()) {
Some(either::Either::Left(it))
} else {
ast::WherePred::cast(it).map(either::Either::Right)
}
})
.partition_map(|it| it)
} else {
(generic_params, where_preds)
};
let generic_param_list = make::generic_param_list(generic_params);
let where_clause =

View File

@ -1,12 +1,17 @@
use syntax::{
ast::{self, AstNode, HasName, edit_in_place::Indent, make},
ast::{self, AstNode, HasGenericParams, HasName, edit_in_place::Indent, make},
syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, utils};
use crate::{
AssistContext, AssistId, Assists,
utils::{self, DefaultMethods, IgnoreAssocItems},
};
fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) {
fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Indent) {
let indent = nominal.indent_level();
impl_.indent(indent);
editor.insert_all(
Position::after(nominal.syntax()),
vec![
@ -120,6 +125,126 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
)
}
// Assist: generate_impl_trait
//
// Adds this trait impl for a type.
//
// ```
// trait $0Foo {
// fn foo(&self) -> i32;
// }
// ```
// ->
// ```
// trait Foo {
// fn foo(&self) -> i32;
// }
//
// impl Foo for ${1:_} {
// fn foo(&self) -> i32 {
// $0todo!()
// }
// }
// ```
pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let name = ctx.find_node_at_offset::<ast::Name>()?;
let trait_ = ast::Trait::cast(name.syntax().parent()?)?;
let target_scope = ctx.sema.scope(trait_.syntax())?;
let hir_trait = ctx.sema.to_def(&trait_)?;
let target = trait_.syntax().text_range();
acc.add(
AssistId::generate("generate_impl_trait"),
format!("Generate `{name}` impl for type"),
target,
|edit| {
let mut editor = edit.make_editor(trait_.syntax());
let holder_arg = ast::GenericArg::TypeArg(make::type_arg(make::ty_placeholder()));
let missing_items = utils::filter_assoc_items(
&ctx.sema,
&hir_trait.items(ctx.db()),
DefaultMethods::No,
IgnoreAssocItems::DocHiddenAttrPresent,
);
let trait_gen_args = trait_.generic_param_list().map(|list| {
make::generic_arg_list(list.generic_params().map(|_| holder_arg.clone()))
});
let make_impl_ = |body| {
make::impl_trait(
trait_.unsafe_token().is_some(),
None,
trait_gen_args.clone(),
None,
None,
false,
make::ty(&name.text()),
make::ty_placeholder(),
None,
None,
body,
)
.clone_for_update()
};
let impl_ = if missing_items.is_empty() {
make_impl_(None)
} else {
let impl_ = make_impl_(None);
let assoc_items = utils::add_trait_assoc_items_to_impl(
&ctx.sema,
ctx.config,
&missing_items,
hir_trait,
&impl_,
&target_scope,
);
let assoc_items = assoc_items.into_iter().map(either::Either::Right).collect();
let assoc_item_list = make::assoc_item_list(Some(assoc_items));
make_impl_(Some(assoc_item_list))
};
if let Some(cap) = ctx.config.snippet_cap {
if let Some(generics) = impl_.trait_().and_then(|it| it.generic_arg_list()) {
for generic in generics.generic_args() {
let placeholder = edit.make_placeholder_snippet(cap);
editor.add_annotation(generic.syntax(), placeholder);
}
}
if let Some(ty) = impl_.self_ty() {
let placeholder = edit.make_placeholder_snippet(cap);
editor.add_annotation(ty.syntax(), placeholder);
}
if let Some(expr) =
impl_.assoc_item_list().and_then(|it| it.assoc_items().find_map(extract_expr))
{
let tabstop = edit.make_tabstop_before(cap);
editor.add_annotation(expr.syntax(), tabstop);
} else if let Some(l_curly) =
impl_.assoc_item_list().and_then(|it| it.l_curly_token())
{
let tabstop = edit.make_tabstop_after(cap);
editor.add_annotation(l_curly, tabstop);
}
}
insert_impl(&mut editor, &impl_, &trait_);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
fn extract_expr(item: ast::AssocItem) -> Option<ast::Expr> {
let ast::AssocItem::Fn(f) = item else {
return None;
};
f.body()?.tail_expr()
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_target};
@ -492,4 +617,209 @@ mod tests {
"#,
);
}
#[test]
fn test_add_impl_trait() {
check_assist(
generate_impl_trait,
r#"
trait $0Foo {
fn foo(&self) -> i32;
fn bar(&self) -> i32 {
self.foo()
}
}
"#,
r#"
trait Foo {
fn foo(&self) -> i32;
fn bar(&self) -> i32 {
self.foo()
}
}
impl Foo for ${1:_} {
fn foo(&self) -> i32 {
$0todo!()
}
}
"#,
);
}
#[test]
fn test_add_impl_trait_use_generic() {
check_assist(
generate_impl_trait,
r#"
trait $0Foo<T> {
fn foo(&self) -> T;
fn bar(&self) -> T {
self.foo()
}
}
"#,
r#"
trait Foo<T> {
fn foo(&self) -> T;
fn bar(&self) -> T {
self.foo()
}
}
impl Foo<${1:_}> for ${2:_} {
fn foo(&self) -> _ {
$0todo!()
}
}
"#,
);
check_assist(
generate_impl_trait,
r#"
trait $0Foo<T, U> {
fn foo(&self) -> T;
fn bar(&self) -> T {
self.foo()
}
}
"#,
r#"
trait Foo<T, U> {
fn foo(&self) -> T;
fn bar(&self) -> T {
self.foo()
}
}
impl Foo<${1:_}, ${2:_}> for ${3:_} {
fn foo(&self) -> _ {
$0todo!()
}
}
"#,
);
}
#[test]
fn test_add_impl_trait_docs() {
check_assist(
generate_impl_trait,
r#"
/// foo
trait $0Foo {
/// foo method
fn foo(&self) -> i32;
fn bar(&self) -> i32 {
self.foo()
}
}
"#,
r#"
/// foo
trait Foo {
/// foo method
fn foo(&self) -> i32;
fn bar(&self) -> i32 {
self.foo()
}
}
impl Foo for ${1:_} {
fn foo(&self) -> i32 {
$0todo!()
}
}
"#,
);
}
#[test]
fn test_add_impl_trait_assoc_types() {
check_assist(
generate_impl_trait,
r#"
trait $0Foo {
type Output;
fn foo(&self) -> Self::Output;
}
"#,
r#"
trait Foo {
type Output;
fn foo(&self) -> Self::Output;
}
impl Foo for ${1:_} {
type Output;
fn foo(&self) -> Self::Output {
$0todo!()
}
}
"#,
);
}
#[test]
fn test_add_impl_trait_indent() {
check_assist(
generate_impl_trait,
r#"
mod foo {
mod bar {
trait $0Foo {
type Output;
fn foo(&self) -> Self::Output;
}
}
}
"#,
r#"
mod foo {
mod bar {
trait Foo {
type Output;
fn foo(&self) -> Self::Output;
}
impl Foo for ${1:_} {
type Output;
fn foo(&self) -> Self::Output {
$0todo!()
}
}
}
}
"#,
);
}
#[test]
fn test_add_impl_trait_empty() {
check_assist(
generate_impl_trait,
r#"
trait $0Foo {}
"#,
r#"
trait Foo {}
impl Foo for ${1:_} {$0}
"#,
);
}
}

View File

@ -94,7 +94,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
})?;
let _ = process_ref_mut(&fn_);
let assoc_list = make::assoc_item_list().clone_for_update();
let assoc_list = make::assoc_item_list(None).clone_for_update();
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));

View File

@ -4,12 +4,12 @@ use ide_db::{
};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
ted,
syntax_editor::Position,
};
use crate::{
AssistContext, AssistId, Assists,
utils::{find_struct_impl, generate_impl},
utils::{find_struct_impl, generate_impl_with_item},
};
// Assist: generate_new
@ -149,7 +149,53 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
.clone_for_update();
fn_.indent(1.into());
if let Some(cap) = ctx.config.snippet_cap {
let mut editor = builder.make_editor(strukt.syntax());
// Get the node for set annotation
let contain_fn = if let Some(impl_def) = impl_def {
fn_.indent(impl_def.indent_level());
if let Some(l_curly) = impl_def.assoc_item_list().and_then(|list| list.l_curly_token())
{
editor.insert_all(
Position::after(l_curly),
vec![
make::tokens::whitespace(&format!("\n{}", impl_def.indent_level() + 1))
.into(),
fn_.syntax().clone().into(),
make::tokens::whitespace("\n").into(),
],
);
fn_.syntax().clone()
} else {
let items = vec![either::Either::Right(ast::AssocItem::Fn(fn_))];
let list = make::assoc_item_list(Some(items));
editor.insert(Position::after(impl_def.syntax()), list.syntax());
list.syntax().clone()
}
} else {
// Generate a new impl to add the method to
let indent_level = strukt.indent_level();
let body = vec![either::Either::Right(ast::AssocItem::Fn(fn_))];
let list = make::assoc_item_list(Some(body));
let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list));
impl_def.indent(strukt.indent_level());
// Insert it after the adt
editor.insert_all(
Position::after(strukt.syntax()),
vec![
make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
impl_def.syntax().clone().into(),
],
);
impl_def.syntax().clone()
};
if let Some(fn_) = contain_fn.descendants().find_map(ast::Fn::cast)
&& let Some(cap) = ctx.config.snippet_cap
{
match strukt.kind() {
StructKind::Tuple(_) => {
let struct_args = fn_
@ -168,8 +214,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) {
if let Some(fn_pat) = fn_param.pat() {
let fn_pat = fn_pat.syntax().clone();
builder
.add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]);
let placeholder = builder.make_placeholder_snippet(cap);
editor.add_annotation_all(vec![struct_arg, fn_pat], placeholder)
}
}
}
@ -179,36 +225,12 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
// Add a tabstop before the name
if let Some(name) = fn_.name() {
builder.add_tabstop_before(cap, name);
let tabstop_before = builder.make_tabstop_before(cap);
editor.add_annotation(name.syntax(), tabstop_before);
}
}
// Get the mutable version of the impl to modify
let impl_def = if let Some(impl_def) = impl_def {
fn_.indent(impl_def.indent_level());
builder.make_mut(impl_def)
} else {
// Generate a new impl to add the method to
let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone()));
let indent_level = strukt.indent_level();
fn_.indent(indent_level);
// Insert it after the adt
let strukt = builder.make_mut(strukt.clone());
ted::insert_all_raw(
ted::Position::after(strukt.syntax()),
vec![
make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
impl_def.syntax().clone().into(),
],
);
impl_def
};
// Add the `new` method at the start of the impl
impl_def.get_or_create_assoc_item_list().add_item_at_start(fn_.into());
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}

View File

@ -3,7 +3,7 @@ use ide_db::assists::AssistId;
use syntax::{
AstNode, SyntaxKind, T,
ast::{
self, HasGenericParams, HasName,
self, HasGenericParams, HasName, HasVisibility,
edit_in_place::{HasVisibilityEdit, Indent},
make,
},
@ -164,6 +164,12 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
/// `E0449` Trait items always share the visibility of their trait
fn remove_items_visibility(item: &ast::AssocItem) {
if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) {
if let Some(vis) = has_vis.visibility()
&& let Some(token) = vis.syntax().next_sibling_or_token()
&& token.kind() == SyntaxKind::WHITESPACE
{
ted::remove(token);
}
has_vis.set_visibility(None);
}
}
@ -333,11 +339,11 @@ impl F$0oo {
struct Foo;
trait NewTrait {
fn a_func() -> Option<()>;
fn a_func() -> Option<()>;
}
impl NewTrait for Foo {
fn a_func() -> Option<()> {
fn a_func() -> Option<()> {
Some(())
}
}"#,

View File

@ -537,8 +537,13 @@ fn inline(
if let Some(generic_arg_list) = generic_arg_list.clone() {
if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
{
PathTransform::function_call(target, source, function, generic_arg_list)
.apply(body.syntax());
body.reindent_to(IndentLevel(0));
if let Some(new_body) = ast::BlockExpr::cast(
PathTransform::function_call(target, source, function, generic_arg_list)
.apply(body.syntax()),
) {
body = new_body;
}
}
}

View File

@ -5,12 +5,12 @@ use syntax::{
SyntaxKind::WHITESPACE,
T,
ast::{self, AstNode, HasName, make},
ted::{self, Position},
syntax_editor::{Position, SyntaxEditor},
};
use crate::{
AssistConfig, AssistId,
assist_context::{AssistContext, Assists, SourceChangeBuilder},
assist_context::{AssistContext, Assists},
utils::{
DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items,
gen_trait_fn_body, generate_trait_impl,
@ -126,98 +126,56 @@ fn add_assist(
let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`");
acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| {
let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax());
let insert_after = Position::after(adt.syntax());
let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false);
let impl_def_with_items = impl_def_from_trait(
let impl_def = impl_def_from_trait(
&ctx.sema,
ctx.config,
adt,
&annotated_name,
trait_,
replace_trait_path,
impl_is_unsafe,
);
update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
let mut editor = builder.make_editor(attr.syntax());
update_attribute(&mut editor, old_derives, old_tree, old_trait_path, attr);
let trait_path = make::ty_path(replace_trait_path.clone());
match (ctx.config.snippet_cap, impl_def_with_items) {
(None, None) => {
let impl_def = generate_trait_impl(adt, trait_path);
if impl_is_unsafe {
ted::insert(
Position::first_child_of(impl_def.syntax()),
make::token(T![unsafe]),
);
}
let (impl_def, first_assoc_item) = if let Some(impl_def) = impl_def {
(
impl_def.clone(),
impl_def.assoc_item_list().and_then(|list| list.assoc_items().next()),
)
} else {
(generate_trait_impl(impl_is_unsafe, adt, trait_path), None)
};
ted::insert_all(
insert_after,
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
);
}
(None, Some((impl_def, _))) => {
if impl_is_unsafe {
ted::insert(
Position::first_child_of(impl_def.syntax()),
make::token(T![unsafe]),
);
}
ted::insert_all(
insert_after,
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
);
}
(Some(cap), None) => {
let impl_def = generate_trait_impl(adt, trait_path);
if impl_is_unsafe {
ted::insert(
Position::first_child_of(impl_def.syntax()),
make::token(T![unsafe]),
);
}
if let Some(l_curly) = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())
if let Some(cap) = ctx.config.snippet_cap {
if let Some(first_assoc_item) = first_assoc_item {
if let ast::AssocItem::Fn(ref func) = first_assoc_item
&& let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
&& m.syntax().text() == "todo!()"
{
builder.add_tabstop_after_token(cap, l_curly);
}
ted::insert_all(
insert_after,
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
);
}
(Some(cap), Some((impl_def, first_assoc_item))) => {
let mut added_snippet = false;
if impl_is_unsafe {
ted::insert(
Position::first_child_of(impl_def.syntax()),
make::token(T![unsafe]),
);
}
if let ast::AssocItem::Fn(ref func) = first_assoc_item {
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) {
if m.syntax().text() == "todo!()" {
// Make the `todo!()` a placeholder
builder.add_placeholder_snippet(cap, m);
added_snippet = true;
}
}
}
if !added_snippet {
// Make the `todo!()` a placeholder
builder.add_placeholder_snippet(cap, m);
} else {
// If we haven't already added a snippet, add a tabstop before the generated function
builder.add_tabstop_before(cap, first_assoc_item);
}
ted::insert_all(
insert_after,
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
);
} else if let Some(l_curly) =
impl_def.assoc_item_list().and_then(|it| it.l_curly_token())
{
builder.add_tabstop_after_token(cap, l_curly);
}
};
}
editor.insert_all(
insert_after,
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
);
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
@ -228,7 +186,8 @@ fn impl_def_from_trait(
annotated_name: &ast::Name,
trait_: Option<hir::Trait>,
trait_path: &ast::Path,
) -> Option<(ast::Impl, ast::AssocItem)> {
impl_is_unsafe: bool,
) -> Option<ast::Impl> {
let trait_ = trait_?;
let target_scope = sema.scope(annotated_name.syntax())?;
@ -245,21 +204,43 @@ fn impl_def_from_trait(
if trait_items.is_empty() {
return None;
}
let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone()));
let impl_def = generate_trait_impl(impl_is_unsafe, adt, make::ty_path(trait_path.clone()));
let first_assoc_item =
let assoc_items =
add_trait_assoc_items_to_impl(sema, config, &trait_items, trait_, &impl_def, &target_scope);
let assoc_item_list = if let Some((first, other)) =
assoc_items.split_first().map(|(first, other)| (first.clone_subtree(), other))
{
let first_item = if let ast::AssocItem::Fn(ref func) = first
&& let Some(body) = gen_trait_fn_body(func, trait_path, adt, None)
&& let Some(func_body) = func.body()
{
let mut editor = SyntaxEditor::new(first.syntax().clone());
editor.replace(func_body.syntax(), body.syntax());
ast::AssocItem::cast(editor.finish().new_root().clone())
} else {
Some(first.clone())
};
let items = first_item
.into_iter()
.chain(other.iter().cloned())
.map(either::Either::Right)
.collect();
make::assoc_item_list(Some(items))
} else {
make::assoc_item_list(None)
}
.clone_for_update();
// Generate a default `impl` function body for the derived trait.
if let ast::AssocItem::Fn(ref func) = first_assoc_item {
let _ = gen_trait_fn_body(func, trait_path, adt, None);
};
Some((impl_def, first_assoc_item))
let impl_def = impl_def.clone_subtree();
let mut editor = SyntaxEditor::new(impl_def.syntax().clone());
editor.replace(impl_def.assoc_item_list()?.syntax(), assoc_item_list.syntax());
let impl_def = ast::Impl::cast(editor.finish().new_root().clone())?;
Some(impl_def)
}
fn update_attribute(
builder: &mut SourceChangeBuilder,
editor: &mut SyntaxEditor,
old_derives: &[ast::Path],
old_tree: &ast::TokenTree,
old_trait_path: &ast::Path,
@ -272,8 +253,6 @@ fn update_attribute(
let has_more_derives = !new_derives.is_empty();
if has_more_derives {
let old_tree = builder.make_mut(old_tree.clone());
// Make the paths into flat lists of tokens in a vec
let tt = new_derives.iter().map(|path| path.syntax().clone()).map(|node| {
node.descendants_with_tokens()
@ -288,18 +267,17 @@ fn update_attribute(
let tt = tt.collect::<Vec<_>>();
let new_tree = make::token_tree(T!['('], tt).clone_for_update();
ted::replace(old_tree.syntax(), new_tree.syntax());
editor.replace(old_tree.syntax(), new_tree.syntax());
} else {
// Remove the attr and any trailing whitespace
let attr = builder.make_mut(attr.clone());
if let Some(line_break) =
attr.syntax().next_sibling_or_token().filter(|t| t.kind() == WHITESPACE)
{
ted::remove(line_break)
editor.delete(line_break)
}
ted::remove(attr.syntax())
editor.delete(attr.syntax())
}
}

View File

@ -302,6 +302,7 @@ mod handlers {
generate_function::generate_function,
generate_impl::generate_impl,
generate_impl::generate_trait_impl,
generate_impl::generate_impl_trait,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_mut_trait_impl::generate_mut_trait_impl,
generate_new::generate_new,

View File

@ -1880,6 +1880,29 @@ impl<T: Clone> Ctx<T> {$0}
)
}
#[test]
fn doctest_generate_impl_trait() {
check_doc_test(
"generate_impl_trait",
r#####"
trait $0Foo {
fn foo(&self) -> i32;
}
"#####,
r#####"
trait Foo {
fn foo(&self) -> i32;
}
impl Foo for ${1:_} {
fn foo(&self) -> i32 {
$0todo!()
}
}
"#####,
)
}
#[test]
fn doctest_generate_is_empty_from_len() {
check_doc_test(

View File

@ -23,10 +23,11 @@ use syntax::{
ast::{
self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
edit::{AstNodeEdit, IndentLevel},
edit_in_place::{AttrsOwnerEdit, Indent, Removable},
edit_in_place::{AttrsOwnerEdit, Removable},
make,
syntax_factory::SyntaxFactory,
},
syntax_editor::SyntaxEditor,
ted,
};
@ -178,6 +179,7 @@ pub fn filter_assoc_items(
/// [`filter_assoc_items()`]), clones each item for update and applies path transformation to it,
/// then inserts into `impl_`. Returns the modified `impl_` and the first associated item that got
/// inserted.
#[must_use]
pub fn add_trait_assoc_items_to_impl(
sema: &Semantics<'_, RootDatabase>,
config: &AssistConfig,
@ -185,71 +187,66 @@ pub fn add_trait_assoc_items_to_impl(
trait_: hir::Trait,
impl_: &ast::Impl,
target_scope: &hir::SemanticsScope<'_>,
) -> ast::AssocItem {
) -> Vec<ast::AssocItem> {
let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
let items = original_items.iter().map(|InFile { file_id, value: original_item }| {
let cloned_item = {
if let Some(macro_file) = file_id.macro_file() {
let span_map = sema.db.expansion_span_map(macro_file);
let item_prettified = prettify_macro_expansion(
sema.db,
original_item.syntax().clone(),
&span_map,
target_scope.krate().into(),
);
if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
return formatted;
} else {
stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");
original_items
.iter()
.map(|InFile { file_id, value: original_item }| {
let mut cloned_item = {
if let Some(macro_file) = file_id.macro_file() {
let span_map = sema.db.expansion_span_map(macro_file);
let item_prettified = prettify_macro_expansion(
sema.db,
original_item.syntax().clone(),
&span_map,
target_scope.krate().into(),
);
if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
return formatted;
} else {
stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");
}
}
original_item.clone_for_update()
}
original_item.clone_for_update()
};
.reset_indent();
if let Some(source_scope) = sema.scope(original_item.syntax()) {
// FIXME: Paths in nested macros are not handled well. See
// `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test.
let transform =
PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone());
transform.apply(cloned_item.syntax());
}
cloned_item.remove_attrs_and_docs();
cloned_item.reindent_to(new_indent_level);
cloned_item
});
let assoc_item_list = impl_.get_or_create_assoc_item_list();
let mut first_item = None;
for item in items {
first_item.get_or_insert_with(|| item.clone());
match &item {
ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
let body = AstNodeEdit::indent(
&make::block_expr(
None,
Some(match config.expr_fill_default {
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
ExprFillDefaultMode::Default => make::ext::expr_todo(),
}),
),
new_indent_level,
);
ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
if let Some(source_scope) = sema.scope(original_item.syntax()) {
// FIXME: Paths in nested macros are not handled well. See
// `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test.
let transform =
PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone());
cloned_item = ast::AssocItem::cast(transform.apply(cloned_item.syntax())).unwrap();
}
ast::AssocItem::TypeAlias(type_alias) => {
if let Some(type_bound_list) = type_alias.type_bound_list() {
type_bound_list.remove()
cloned_item.remove_attrs_and_docs();
cloned_item
})
.map(|item| {
match &item {
ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
let body = AstNodeEdit::indent(
&make::block_expr(
None,
Some(match config.expr_fill_default {
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
ExprFillDefaultMode::Default => make::ext::expr_todo(),
}),
),
IndentLevel::single(),
);
ted::replace(fn_.get_or_create_body().syntax(), body.syntax());
}
ast::AssocItem::TypeAlias(type_alias) => {
if let Some(type_bound_list) = type_alias.type_bound_list() {
type_bound_list.remove()
}
}
_ => {}
}
_ => {}
}
assoc_item_list.add_item(item)
}
first_item.unwrap()
AstNodeEdit::indent(&item, new_indent_level)
})
.collect()
}
pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
@ -334,7 +331,7 @@ fn invert_special_case(make: &SyntaxFactory, expr: &ast::Expr) -> Option<ast::Ex
fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> {
match expr {
ast::Expr::BinExpr(bin) => {
let bin = bin.clone_for_update();
let bin = bin.clone_subtree();
let op_token = bin.op_token()?;
let rev_token = match op_token.kind() {
T![==] => T![!=],
@ -350,8 +347,9 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> {
);
}
};
ted::replace(op_token, make::token(rev_token));
Some(bin.into())
let mut bin_editor = SyntaxEditor::new(bin.syntax().clone());
bin_editor.replace(op_token, make::token(rev_token));
ast::Expr::cast(bin_editor.finish().new_root().clone())
}
ast::Expr::MethodCallExpr(mce) => {
let receiver = mce.receiver()?;
@ -664,16 +662,23 @@ fn generate_impl_text_inner(
/// Generates the corresponding `impl Type {}` including type and lifetime
/// parameters.
pub(crate) fn generate_impl_with_item(
adt: &ast::Adt,
body: Option<ast::AssocItemList>,
) -> ast::Impl {
generate_impl_inner(false, adt, None, true, body)
}
pub(crate) fn generate_impl(adt: &ast::Adt) -> ast::Impl {
generate_impl_inner(adt, None, true)
generate_impl_inner(false, adt, None, true, None)
}
/// Generates the corresponding `impl <trait> for Type {}` including type
/// and lifetime parameters, with `<trait>` appended to `impl`'s generic parameters' bounds.
///
/// This is useful for traits like `PartialEq`, since `impl<T> PartialEq for U<T>` often requires `T: PartialEq`.
pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl {
generate_impl_inner(adt, Some(trait_), true)
pub(crate) fn generate_trait_impl(is_unsafe: bool, adt: &ast::Adt, trait_: ast::Type) -> ast::Impl {
generate_impl_inner(is_unsafe, adt, Some(trait_), true, None)
}
/// Generates the corresponding `impl <trait> for Type {}` including type
@ -681,13 +686,15 @@ pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Imp
///
/// This is useful for traits like `From<T>`, since `impl<T> From<T> for U<T>` doesn't require `T: From<T>`.
pub(crate) fn generate_trait_impl_intransitive(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl {
generate_impl_inner(adt, Some(trait_), false)
generate_impl_inner(false, adt, Some(trait_), false, None)
}
fn generate_impl_inner(
is_unsafe: bool,
adt: &ast::Adt,
trait_: Option<ast::Type>,
trait_is_transitive: bool,
body: Option<ast::AssocItemList>,
) -> ast::Impl {
// Ensure lifetime params are before type & const params
let generic_params = adt.generic_param_list().map(|generic_params| {
@ -727,7 +734,7 @@ fn generate_impl_inner(
let impl_ = match trait_ {
Some(trait_) => make::impl_trait(
false,
is_unsafe,
None,
None,
generic_params,
@ -737,9 +744,9 @@ fn generate_impl_inner(
ty,
None,
adt.where_clause(),
None,
body,
),
None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), None),
None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), body),
}
.clone_for_update();

View File

@ -1,10 +1,7 @@
//! This module contains functions to generate default trait impl function bodies where possible.
use hir::TraitRef;
use syntax::{
ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make},
ted,
};
use syntax::ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make};
/// Generate custom trait bodies without default implementation where possible.
///
@ -18,21 +15,33 @@ pub(crate) fn gen_trait_fn_body(
trait_path: &ast::Path,
adt: &ast::Adt,
trait_ref: Option<TraitRef<'_>>,
) -> Option<()> {
) -> Option<ast::BlockExpr> {
let _ = func.body()?;
match trait_path.segment()?.name_ref()?.text().as_str() {
"Clone" => gen_clone_impl(adt, func),
"Debug" => gen_debug_impl(adt, func),
"Default" => gen_default_impl(adt, func),
"Hash" => gen_hash_impl(adt, func),
"PartialEq" => gen_partial_eq(adt, func, trait_ref),
"PartialOrd" => gen_partial_ord(adt, func, trait_ref),
"Clone" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "clone"));
gen_clone_impl(adt)
}
"Debug" => gen_debug_impl(adt),
"Default" => gen_default_impl(adt),
"Hash" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "hash"));
gen_hash_impl(adt)
}
"PartialEq" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
gen_partial_eq(adt, trait_ref)
}
"PartialOrd" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
gen_partial_ord(adt, trait_ref)
}
_ => None,
}
}
/// Generate a `Clone` impl based on the fields and members of the target type.
fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "clone"));
fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
fn gen_clone_call(target: ast::Expr) -> ast::Expr {
let method = make::name_ref("clone");
make::expr_method_call(target, method, make::arg_list(None)).into()
@ -139,12 +148,11 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
}
};
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
/// Generate a `Debug` impl based on the fields and members of the target type.
fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_debug_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let annotated_name = adt.name()?;
match adt {
// `Debug` cannot be derived for unions, so no default impl can be provided.
@ -248,8 +256,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let body = make::block_expr(None, Some(match_expr.into()));
let body = body.indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
ast::Adt::Struct(strukt) => {
@ -296,14 +303,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let method = make::name_ref("finish");
let expr = make::expr_method_call(expr, method, make::arg_list(None)).into();
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
}
}
/// Generate a `Debug` impl based on the fields and members of the target type.
fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_default_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
fn gen_default_call() -> Option<ast::Expr> {
let fn_name = make::ext::path_from_idents(["Default", "default"])?;
Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into())
@ -342,15 +348,13 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
}
};
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
}
}
/// Generate a `Hash` impl based on the fields and members of the target type.
fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "hash"));
fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
let method = make::name_ref("hash");
let arg = make::expr_path(make::ext::ident_path("state"));
@ -400,13 +404,11 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
},
};
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
/// Generate a `PartialEq` impl based on the fields and members of the target type.
fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> {
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
match expr {
Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
@ -595,12 +597,10 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>
},
};
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> {
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
let mut arms = vec![];
@ -686,8 +686,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_
},
};
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
Some(body)
}
fn make_discriminant() -> Option<ast::Expr> {

View File

@ -276,7 +276,7 @@ fn get_transformed_assoc_item(
let assoc_item = assoc_item.clone_for_update();
// FIXME: Paths in nested macros are not handled well. See
// `macro_generated_assoc_item2` test.
transform.apply(assoc_item.syntax());
let assoc_item = ast::AssocItem::cast(transform.apply(assoc_item.syntax()))?;
assoc_item.remove_attrs_and_docs();
Some(assoc_item)
}
@ -301,7 +301,7 @@ fn get_transformed_fn(
let fn_ = fn_.clone_for_update();
// FIXME: Paths in nested macros are not handled well. See
// `macro_generated_assoc_item2` test.
transform.apply(fn_.syntax());
let fn_ = ast::Fn::cast(transform.apply(fn_.syntax()))?;
fn_.remove_attrs_and_docs();
match async_ {
AsyncSugaring::Desugar => {

View File

@ -12,15 +12,16 @@ use span::Edition;
use syntax::{
NodeOrToken, SyntaxNode,
ast::{self, AstNode, HasGenericArgs, make},
ted,
syntax_editor::{self, SyntaxEditor},
};
#[derive(Default)]
#[derive(Default, Debug)]
struct AstSubsts {
types_and_consts: Vec<TypeOrConst>,
lifetimes: Vec<ast::LifetimeArg>,
}
#[derive(Debug)]
enum TypeOrConst {
Either(ast::TypeArg), // indistinguishable type or const param
Const(ast::ConstArg),
@ -128,15 +129,18 @@ impl<'a> PathTransform<'a> {
}
}
pub fn apply(&self, syntax: &SyntaxNode) {
#[must_use]
pub fn apply(&self, syntax: &SyntaxNode) -> SyntaxNode {
self.build_ctx().apply(syntax)
}
pub fn apply_all<'b>(&self, nodes: impl IntoIterator<Item = &'b SyntaxNode>) {
#[must_use]
pub fn apply_all<'b>(
&self,
nodes: impl IntoIterator<Item = &'b SyntaxNode>,
) -> Vec<SyntaxNode> {
let ctx = self.build_ctx();
for node in nodes {
ctx.apply(node);
}
nodes.into_iter().map(|node| ctx.apply(&node.clone())).collect()
}
fn prettify_target_node(&self, node: SyntaxNode) -> SyntaxNode {
@ -236,7 +240,7 @@ impl<'a> PathTransform<'a> {
Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?))
})
.collect();
let ctx = Ctx {
let mut ctx = Ctx {
type_substs,
const_substs,
lifetime_substs,
@ -272,42 +276,75 @@ fn preorder_rev(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> {
}
impl Ctx<'_> {
fn apply(&self, item: &SyntaxNode) {
fn apply(&self, item: &SyntaxNode) -> SyntaxNode {
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe.
let paths = preorder_rev(item).filter_map(ast::Path::cast).collect::<Vec<_>>();
for path in paths {
self.transform_path(path);
}
preorder_rev(item).filter_map(ast::Lifetime::cast).for_each(|lifetime| {
let item = self.transform_path(item).clone_subtree();
let mut editor = SyntaxEditor::new(item.clone());
preorder_rev(&item).filter_map(ast::Lifetime::cast).for_each(|lifetime| {
if let Some(subst) = self.lifetime_substs.get(&lifetime.syntax().text().to_string()) {
ted::replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax());
editor
.replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax());
}
});
editor.finish().new_root().clone()
}
fn transform_default_values(&self, defaulted_params: Vec<DefaultedParam>) {
fn transform_default_values(&mut self, defaulted_params: Vec<DefaultedParam>) {
// By now the default values are simply copied from where they are declared
// and should be transformed. As any value is allowed to refer to previous
// generic (both type and const) parameters, they should be all iterated left-to-right.
for param in defaulted_params {
let value = match param {
Either::Left(k) => self.type_substs.get(&k).unwrap().syntax(),
Either::Right(k) => self.const_substs.get(&k).unwrap(),
let value = match &param {
Either::Left(k) => self.type_substs.get(k).unwrap().syntax(),
Either::Right(k) => self.const_substs.get(k).unwrap(),
};
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe.
let paths = preorder_rev(value).filter_map(ast::Path::cast).collect::<Vec<_>>();
for path in paths {
self.transform_path(path);
let new_value = self.transform_path(value);
match param {
Either::Left(k) => {
self.type_substs.insert(k, ast::Type::cast(new_value.clone()).unwrap());
}
Either::Right(k) => {
self.const_substs.insert(k, new_value.clone());
}
}
}
}
fn transform_path(&self, path: ast::Path) -> Option<()> {
fn transform_path(&self, path: &SyntaxNode) -> SyntaxNode {
fn find_child_paths(root_path: &SyntaxNode) -> Vec<ast::Path> {
let mut result = Vec::new();
for child in root_path.children() {
if let Some(child_path) = ast::Path::cast(child.clone()) {
result.push(child_path);
} else {
result.extend(find_child_paths(&child));
}
}
result
}
let root_path = path.clone_subtree();
let result = find_child_paths(&root_path);
let mut editor = SyntaxEditor::new(root_path.clone());
for sub_path in result {
let new = self.transform_path(sub_path.syntax());
editor.replace(sub_path.syntax(), new);
}
let update_sub_item = editor.finish().new_root().clone().clone_subtree();
let item = find_child_paths(&update_sub_item);
let mut editor = SyntaxEditor::new(update_sub_item);
for sub_path in item {
self.transform_path_(&mut editor, &sub_path);
}
editor.finish().new_root().clone()
}
fn transform_path_(&self, editor: &mut SyntaxEditor, path: &ast::Path) -> Option<()> {
if path.qualifier().is_some() {
return None;
}
@ -319,8 +356,7 @@ impl Ctx<'_> {
// don't try to qualify sole `self` either, they are usually locals, but are returned as modules due to namespace clashing
return None;
}
let resolution = self.source_scope.speculative_resolve(&path)?;
let resolution = self.source_scope.speculative_resolve(path)?;
match resolution {
hir::PathResolution::TypeParam(tp) => {
@ -360,12 +396,12 @@ impl Ctx<'_> {
let segment = make::path_segment_ty(subst.clone(), trait_ref);
let qualified = make::path_from_segments(std::iter::once(segment), false);
ted::replace(path.syntax(), qualified.clone_for_update().syntax());
editor.replace(path.syntax(), qualified.clone_for_update().syntax());
} else if let Some(path_ty) = ast::PathType::cast(parent) {
let old = path_ty.syntax();
if old.parent().is_some() {
ted::replace(old, subst.clone_subtree().clone_for_update().syntax());
editor.replace(old, subst.clone_subtree().clone_for_update().syntax());
} else {
// Some `path_ty` has no parent, especially ones made for default value
// of type parameters.
@ -377,13 +413,13 @@ impl Ctx<'_> {
}
let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?;
let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?;
ted::replace_all(
editor.replace_all(
start..=end,
new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(),
);
}
} else {
ted::replace(
editor.replace(
path.syntax(),
subst.clone_subtree().clone_for_update().syntax(),
);
@ -409,17 +445,28 @@ impl Ctx<'_> {
};
let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?;
let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update();
let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree());
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
let old = segment.get_or_create_generic_arg_list();
ted::replace(old.syntax(), args.clone_subtree().syntax().clone_for_update())
if let Some(old) = segment.generic_arg_list() {
res_editor.replace(
old.syntax(),
args.clone_subtree().syntax().clone_for_update(),
)
} else {
res_editor.insert(
syntax_editor::Position::last_child_of(segment.syntax()),
args.clone_subtree().syntax().clone_for_update(),
);
}
}
}
ted::replace(path.syntax(), res.syntax())
let res = res_editor.finish().new_root().clone();
editor.replace(path.syntax().clone(), res);
}
hir::PathResolution::ConstParam(cp) => {
if let Some(subst) = self.const_substs.get(&cp) {
ted::replace(path.syntax(), subst.clone_subtree().clone_for_update());
editor.replace(path.syntax(), subst.clone_subtree().clone_for_update());
}
}
hir::PathResolution::SelfType(imp) => {
@ -456,13 +503,13 @@ impl Ctx<'_> {
mod_path_to_ast(&found_path, self.target_edition).qualifier()
{
let res = make::path_concat(qual, path_ty.path()?).clone_for_update();
ted::replace(path.syntax(), res.syntax());
editor.replace(path.syntax(), res.syntax());
return Some(());
}
}
}
ted::replace(path.syntax(), ast_ty.syntax());
editor.replace(path.syntax(), ast_ty.syntax());
}
hir::PathResolution::Local(_)
| hir::PathResolution::Def(_)

View File

@ -131,4 +131,28 @@ fn foo(v: Enum<()>) {
"#,
);
}
#[test]
fn regression_20259() {
check_diagnostics(
r#"
//- minicore: deref
use core::ops::Deref;
struct Foo<T>(T);
impl<T> Deref for Foo<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
fn test(x: Foo<(i32, bool)>) {
let (_a, _b): &(i32, bool) = &x;
}
"#,
);
}
}

View File

@ -73,11 +73,13 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
}
if fn_node.body().is_some() {
// Get the actual start of the function (excluding doc comments)
let fn_start = fn_node
.fn_token()
.map(|token| token.text_range().start())
.unwrap_or(node.text_range().start());
res.push(Fold {
range: TextRange::new(
node.text_range().start(),
node.text_range().end(),
),
range: TextRange::new(fn_start, node.text_range().end()),
kind: FoldKind::Function,
});
continue;
@ -688,4 +690,21 @@ type Foo<T, U> = foo<fold arglist><
"#,
)
}
#[test]
fn test_fold_doc_comments_with_multiline_paramlist_function() {
check(
r#"
<fold comment>/// A very very very very very very very very very very very very very very very
/// very very very long description</fold>
<fold function>fn foo<fold arglist>(
very_long_parameter_name: u32,
another_very_long_parameter_name: u32,
third_very_long_param: u32,
)</fold> <fold block>{
todo!()
}</fold></fold>
"#,
);
}
}

View File

@ -77,17 +77,18 @@ pub(super) fn fn_ptr_hints(
return None;
}
let parent_for_type = func
let parent_for_binder = func
.syntax()
.ancestors()
.skip(1)
.take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE))
.find_map(ast::ForType::cast);
.find_map(ast::ForType::cast)
.and_then(|it| it.for_binder());
let param_list = func.param_list()?;
let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list());
let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list());
let ret_type = func.ret_type();
let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token());
let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token());
hints_(
acc,
ctx,
@ -143,15 +144,16 @@ pub(super) fn fn_path_hints(
// FIXME: Support general path types
let (param_list, ret_type) = func.path().as_ref().and_then(path_as_fn)?;
let parent_for_type = func
let parent_for_binder = func
.syntax()
.ancestors()
.skip(1)
.take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE))
.find_map(ast::ForType::cast);
.find_map(ast::ForType::cast)
.and_then(|it| it.for_binder());
let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list());
let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token());
let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list());
let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token());
hints_(
acc,
ctx,

View File

@ -12,6 +12,7 @@ use ide_db::{
source_change::SourceChangeBuilder,
};
use itertools::Itertools;
use std::fmt::Write;
use stdx::{always, never};
use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast};
@ -459,35 +460,22 @@ fn rename_self_to_param(
}
fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: String) -> Option<TextEdit> {
fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
return Some(p.path()?.segment()?.name_ref()?.text().to_string());
}
None
let mut replacement_text = new_name;
replacement_text.push_str(": ");
if self_param.amp_token().is_some() {
replacement_text.push('&');
}
if let Some(lifetime) = self_param.lifetime() {
write!(replacement_text, "{lifetime} ").unwrap();
}
if self_param.amp_token().and(self_param.mut_token()).is_some() {
replacement_text.push_str("mut ");
}
match self_param.syntax().ancestors().find_map(ast::Impl::cast) {
Some(impl_def) => {
let type_name = target_type_name(&impl_def)?;
replacement_text.push_str("Self");
let mut replacement_text = new_name;
replacement_text.push_str(": ");
match (self_param.amp_token(), self_param.mut_token()) {
(Some(_), None) => replacement_text.push('&'),
(Some(_), Some(_)) => replacement_text.push_str("&mut "),
(_, _) => (),
};
replacement_text.push_str(type_name.as_str());
Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
}
None => {
cov_mark::hit!(rename_self_outside_of_methods);
let mut replacement_text = new_name;
replacement_text.push_str(": _");
Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
}
}
Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
}
#[cfg(test)]
@ -2069,7 +2057,7 @@ impl Foo {
struct Foo { i: i32 }
impl Foo {
fn f(foo: &mut Foo) -> i32 {
fn f(foo: &mut Self) -> i32 {
foo.i
}
}
@ -2095,7 +2083,33 @@ impl Foo {
struct Foo { i: i32 }
impl Foo {
fn f(foo: Foo) -> i32 {
fn f(foo: Self) -> i32 {
foo.i
}
}
"#,
);
}
#[test]
fn test_owned_self_to_parameter_with_lifetime() {
cov_mark::check!(rename_self_to_param);
check(
"foo",
r#"
struct Foo<'a> { i: &'a i32 }
impl<'a> Foo<'a> {
fn f(&'a $0self) -> i32 {
self.i
}
}
"#,
r#"
struct Foo<'a> { i: &'a i32 }
impl<'a> Foo<'a> {
fn f(foo: &'a Self) -> i32 {
foo.i
}
}
@ -2105,7 +2119,6 @@ impl Foo {
#[test]
fn test_self_outside_of_methods() {
cov_mark::check!(rename_self_outside_of_methods);
check(
"foo",
r#"
@ -2114,7 +2127,7 @@ fn f($0self) -> i32 {
}
"#,
r#"
fn f(foo: _) -> i32 {
fn f(foo: Self) -> i32 {
foo.i
}
"#,
@ -2159,7 +2172,7 @@ impl Foo {
struct Foo { i: i32 }
impl Foo {
fn f(foo: &Foo) -> i32 {
fn f(foo: &Self) -> i32 {
let self_var = 1;
foo.i
}

View File

@ -572,9 +572,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
// test closure_binder
// fn main() { for<'a> || (); }
if p.at(T![for]) {
let b = p.start();
types::for_binder(p);
b.complete(p, CLOSURE_BINDER);
}
// test const_closure
// fn main() { let cl = const || _ = 0; }

View File

@ -13,7 +13,7 @@ pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) {
// test_err generic_param_list_recover
// fn f<T: Clone,, U:, V>() {}
fn generic_param_list(p: &mut Parser<'_>) {
pub(super) fn generic_param_list(p: &mut Parser<'_>) {
assert!(p.at(T![<]));
let m = p.start();
delimited(
@ -147,7 +147,15 @@ fn type_bound(p: &mut Parser<'_>) -> bool {
let has_paren = p.eat(T!['(']);
match p.current() {
LIFETIME_IDENT => lifetime(p),
T![for] => types::for_type(p, false),
// test for_binder_bound
// fn foo<T: for<'a> [const] async Trait>() {}
T![for] => {
types::for_binder(p);
if path_type_bound(p).is_err() {
m.abandon(p);
return false;
}
}
// test precise_capturing
// fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T, Self> {}
@ -180,44 +188,8 @@ fn type_bound(p: &mut Parser<'_>) -> bool {
p.bump_any();
types::for_type(p, false)
}
current => {
match current {
T![?] => p.bump_any(),
T![~] => {
p.bump_any();
p.expect(T![const]);
}
T!['['] => {
p.bump_any();
p.expect(T![const]);
p.expect(T![']']);
}
// test const_trait_bound
// const fn foo(_: impl const Trait) {}
T![const] => {
p.bump_any();
}
// test async_trait_bound
// fn async_foo(_: impl async Fn(&i32)) {}
T![async] => {
p.bump_any();
}
_ => (),
}
if paths::is_use_path_start(p) {
types::path_type_bounds(p, false);
// test_err type_bounds_macro_call_recovery
// fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
if p.at(T![!]) {
let m = p.start();
p.bump(T![!]);
p.error("unexpected `!` in type path, macro calls are not allowed here");
if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) {
items::token_tree(p);
}
m.complete(p, ERROR);
}
} else {
_ => {
if path_type_bound(p).is_err() {
m.abandon(p);
return false;
}
@ -231,6 +203,43 @@ fn type_bound(p: &mut Parser<'_>) -> bool {
true
}
fn path_type_bound(p: &mut Parser<'_>) -> Result<(), ()> {
if p.eat(T![~]) {
p.expect(T![const]);
} else if p.eat(T!['[']) {
// test maybe_const_trait_bound
// const fn foo(_: impl [const] Trait) {}
p.expect(T![const]);
p.expect(T![']']);
} else {
// test const_trait_bound
// const fn foo(_: impl const Trait) {}
p.eat(T![const]);
}
// test async_trait_bound
// fn async_foo(_: impl async Fn(&i32)) {}
p.eat(T![async]);
p.eat(T![?]);
if paths::is_use_path_start(p) {
types::path_type_bounds(p, false);
// test_err type_bounds_macro_call_recovery
// fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
if p.at(T![!]) {
let m = p.start();
p.bump(T![!]);
p.error("unexpected `!` in type path, macro calls are not allowed here");
if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) {
items::token_tree(p);
}
m.complete(p, ERROR);
}
Ok(())
} else {
Err(())
}
}
// test where_clause
// fn foo()
// where

View File

@ -249,13 +249,14 @@ fn fn_ptr_type(p: &mut Parser<'_>) {
}
pub(super) fn for_binder(p: &mut Parser<'_>) {
assert!(p.at(T![for]));
let m = p.start();
p.bump(T![for]);
if p.at(T![<]) {
generic_params::opt_generic_param_list(p);
generic_params::generic_param_list(p);
} else {
p.error("expected `<`");
}
m.complete(p, FOR_BINDER);
}
// test for_type

View File

@ -185,7 +185,6 @@ pub enum SyntaxKind {
BREAK_EXPR,
CALL_EXPR,
CAST_EXPR,
CLOSURE_BINDER,
CLOSURE_EXPR,
CONST,
CONST_ARG,
@ -203,6 +202,7 @@ pub enum SyntaxKind {
FN_PTR_TYPE,
FORMAT_ARGS_ARG,
FORMAT_ARGS_EXPR,
FOR_BINDER,
FOR_EXPR,
FOR_TYPE,
GENERIC_ARG_LIST,
@ -358,7 +358,6 @@ impl SyntaxKind {
| BREAK_EXPR
| CALL_EXPR
| CAST_EXPR
| CLOSURE_BINDER
| CLOSURE_EXPR
| CONST
| CONST_ARG
@ -376,6 +375,7 @@ impl SyntaxKind {
| FN_PTR_TYPE
| FORMAT_ARGS_ARG
| FORMAT_ARGS_EXPR
| FOR_BINDER
| FOR_EXPR
| FOR_TYPE
| GENERIC_ARG_LIST

View File

@ -253,6 +253,10 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs");
}
#[test]
fn for_binder_bound() {
run_and_expect_no_errors("test_data/parser/inline/ok/for_binder_bound.rs");
}
#[test]
fn for_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/for_expr.rs"); }
#[test]
fn for_range_from() {
@ -402,6 +406,10 @@ mod ok {
#[test]
fn match_guard() { run_and_expect_no_errors("test_data/parser/inline/ok/match_guard.rs"); }
#[test]
fn maybe_const_trait_bound() {
run_and_expect_no_errors("test_data/parser/inline/ok/maybe_const_trait_bound.rs");
}
#[test]
fn metas() { run_and_expect_no_errors("test_data/parser/inline/ok/metas.rs"); }
#[test]
fn method_call_expr() {

View File

@ -37,7 +37,7 @@ SOURCE_FILE
WHITESPACE " "
TYPE_BOUND
L_PAREN "("
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@ -45,18 +45,18 @@ SOURCE_FILE
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
GENERIC_ARG_LIST
L_ANGLE "<"
LIFETIME_ARG
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
GENERIC_ARG_LIST
L_ANGLE "<"
LIFETIME_ARG
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
R_PAREN ")"
R_ANGLE ">"
PARAM_LIST
@ -124,7 +124,7 @@ SOURCE_FILE
WHITESPACE " "
TYPE_BOUND
L_PAREN "("
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@ -132,18 +132,18 @@ SOURCE_FILE
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
GENERIC_ARG_LIST
L_ANGLE "<"
LIFETIME_ARG
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
GENERIC_ARG_LIST
L_ANGLE "<"
LIFETIME_ARG
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
R_PAREN ")"
ERROR
R_ANGLE ">"
@ -186,7 +186,7 @@ SOURCE_FILE
TUPLE_EXPR
L_PAREN "("
CLOSURE_EXPR
CLOSURE_BINDER
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@ -243,13 +243,14 @@ SOURCE_FILE
PAREN_TYPE
L_PAREN "("
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH

View File

@ -12,13 +12,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE " "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE "\n"
BLOCK_EXPR
STMT_LIST

View File

@ -8,13 +8,14 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
REF_TYPE
AMP "&"
@ -37,13 +38,14 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
TUPLE_TYPE
L_PAREN "("
@ -70,13 +72,14 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
SLICE_TYPE
L_BRACK "["
@ -97,22 +100,24 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'b"
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'b"
R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
@ -164,31 +169,34 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'b"
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'c"
LIFETIME_IDENT "'b"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'c"
R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"

View File

@ -14,7 +14,7 @@ SOURCE_FILE
WHITESPACE " "
EXPR_STMT
CLOSURE_EXPR
CLOSURE_BINDER
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"

View File

@ -103,7 +103,7 @@ SOURCE_FILE
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@ -111,12 +111,12 @@ SOURCE_FILE
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Path"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Path"
SEMICOLON ";"
WHITESPACE "\n"
TYPE_ALIAS

View File

@ -0,0 +1,45 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
GENERIC_PARAM_LIST
L_ANGLE "<"
TYPE_PARAM
NAME
IDENT "T"
COLON ":"
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
L_BRACK "["
CONST_KW "const"
R_BRACK "]"
WHITESPACE " "
ASYNC_KW "async"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
R_ANGLE ">"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"

View File

@ -0,0 +1 @@
fn foo<T: for<'a> [const] async Trait>() {}

View File

@ -8,13 +8,14 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
@ -39,13 +40,14 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
UNSAFE_KW "unsafe"
@ -86,13 +88,14 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH

View File

@ -202,7 +202,7 @@ SOURCE_FILE
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
CLOSURE_BINDER
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@ -223,7 +223,7 @@ SOURCE_FILE
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
CLOSURE_BINDER
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"

View File

@ -0,0 +1,36 @@
SOURCE_FILE
FN
CONST_KW "const"
WHITESPACE " "
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
PARAM_LIST
L_PAREN "("
PARAM
WILDCARD_PAT
UNDERSCORE "_"
COLON ":"
WHITESPACE " "
IMPL_TRAIT_TYPE
IMPL_KW "impl"
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
L_BRACK "["
CONST_KW "const"
R_BRACK "]"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"

View File

@ -0,0 +1 @@
const fn foo(_: impl [const] Trait) {}

View File

@ -11,13 +11,14 @@ SOURCE_FILE
TYPE_BOUND_LIST
TYPE_BOUND
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH

View File

@ -29,10 +29,11 @@ SOURCE_FILE
TYPE_BOUND
QUESTION "?"
FOR_TYPE
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH

View File

@ -18,13 +18,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH

View File

@ -36,7 +36,7 @@ SOURCE_FILE
PLUS "+"
WHITESPACE " "
TYPE_BOUND
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@ -44,18 +44,18 @@ SOURCE_FILE
LIFETIME
LIFETIME_IDENT "'de"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Deserialize"
GENERIC_ARG_LIST
L_ANGLE "<"
LIFETIME_ARG
LIFETIME
LIFETIME_IDENT "'de"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Deserialize"
GENERIC_ARG_LIST
L_ANGLE "<"
LIFETIME_ARG
LIFETIME
LIFETIME_IDENT "'de"
R_ANGLE ">"
WHITESPACE " "
PLUS "+"
WHITESPACE " "

View File

@ -18,13 +18,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
@ -81,13 +82,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
REF_TYPE
AMP "&"
@ -135,13 +137,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PAREN_TYPE
L_PAREN "("
@ -206,13 +209,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
SLICE_TYPE
L_BRACK "["
@ -276,13 +280,14 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
@ -349,22 +354,24 @@ SOURCE_FILE
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'b"
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
FOR_TYPE
FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'b"
R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"

View File

@ -20,6 +20,7 @@ semver.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_derive.workspace = true
temp-dir.workspace = true
tracing.workspace = true
triomphe.workspace = true
la-arena.workspace = true

View File

@ -16,11 +16,13 @@ use la_arena::ArenaMap;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::Deserialize as _;
use stdx::never;
use toolchain::Tool;
use crate::{
CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot,
TargetKind, utf8_stdout,
TargetKind, cargo_config_file::make_lockfile_copy,
cargo_workspace::MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH, utf8_stdout,
};
/// Output of the build script and proc-macro building steps for a workspace.
@ -30,6 +32,15 @@ pub struct WorkspaceBuildScripts {
error: Option<String>,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub enum ProcMacroDylibPath {
Path(AbsPathBuf),
DylibNotFound,
NotProcMacro,
#[default]
NotBuilt,
}
/// Output of the build script and proc-macro building step for a concrete package.
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct BuildScriptOutput {
@ -43,7 +54,7 @@ pub(crate) struct BuildScriptOutput {
/// Directory where a build script might place its output.
pub(crate) out_dir: Option<AbsPathBuf>,
/// Path to the proc-macro library file if this package exposes proc-macros.
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
pub(crate) proc_macro_dylib_path: ProcMacroDylibPath,
}
impl BuildScriptOutput {
@ -51,7 +62,10 @@ impl BuildScriptOutput {
self.cfgs.is_empty()
&& self.envs.is_empty()
&& self.out_dir.is_none()
&& self.proc_macro_dylib_path.is_none()
&& matches!(
self.proc_macro_dylib_path,
ProcMacroDylibPath::NotBuilt | ProcMacroDylibPath::NotProcMacro
)
}
}
@ -67,7 +81,7 @@ impl WorkspaceBuildScripts {
let current_dir = workspace.workspace_root();
let allowed_features = workspace.workspace_features();
let cmd = Self::build_command(
let (_guard, cmd) = Self::build_command(
config,
&allowed_features,
workspace.manifest_path(),
@ -88,7 +102,7 @@ impl WorkspaceBuildScripts {
) -> io::Result<Vec<WorkspaceBuildScripts>> {
assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
let cmd = Self::build_command(
let (_guard, cmd) = Self::build_command(
config,
&Default::default(),
// This is not gonna be used anyways, so just construct a dummy here
@ -126,6 +140,8 @@ impl WorkspaceBuildScripts {
|package, cb| {
if let Some(&(package, workspace)) = by_id.get(package) {
cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
} else {
never!("Received compiler message for unknown package: {}", package);
}
},
progress,
@ -140,12 +156,9 @@ impl WorkspaceBuildScripts {
if tracing::enabled!(tracing::Level::INFO) {
for (idx, workspace) in workspaces.iter().enumerate() {
for package in workspace.packages() {
let package_build_data = &mut res[idx].outputs[package];
let package_build_data: &mut BuildScriptOutput = &mut res[idx].outputs[package];
if !package_build_data.is_empty() {
tracing::info!(
"{}: {package_build_data:?}",
workspace[package].manifest.parent(),
);
tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
}
}
}
@ -198,10 +211,33 @@ impl WorkspaceBuildScripts {
let path = dir_entry.path();
let extension = path.extension()?;
if extension == std::env::consts::DLL_EXTENSION {
let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?)
.ok()?;
return Some((name, path));
let name = path
.file_stem()?
.to_str()?
.split_once('-')?
.0
.trim_start_matches("lib")
.to_owned();
let path = match Utf8PathBuf::from_path_buf(path) {
Ok(path) => path,
Err(path) => {
tracing::warn!(
"Proc-macro dylib path contains non-UTF8 characters: {:?}",
path.display()
);
return None;
}
};
return match AbsPathBuf::try_from(path) {
Ok(path) => Some((name, path)),
Err(path) => {
tracing::error!(
"proc-macro dylib path is not absolute: {:?}",
path
);
None
}
};
}
}
None
@ -209,28 +245,24 @@ impl WorkspaceBuildScripts {
.collect();
for p in rustc.packages() {
let package = &rustc[p];
if package
.targets
.iter()
.any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }))
{
if let Some((_, path)) = proc_macro_dylibs
.iter()
.find(|(name, _)| *name.trim_start_matches("lib") == package.name)
{
bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
bs.outputs[p].proc_macro_dylib_path =
if package.targets.iter().any(|&it| {
matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })
}) {
match proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) {
Some((_, path)) => ProcMacroDylibPath::Path(path.clone()),
_ => ProcMacroDylibPath::DylibNotFound,
}
} else {
ProcMacroDylibPath::NotProcMacro
}
}
}
if tracing::enabled!(tracing::Level::INFO) {
for package in rustc.packages() {
let package_build_data = &bs.outputs[package];
if !package_build_data.is_empty() {
tracing::info!(
"{}: {package_build_data:?}",
rustc[package].manifest.parent(),
);
tracing::info!("{}: {package_build_data:?}", rustc[package].manifest,);
}
}
}
@ -263,6 +295,12 @@ impl WorkspaceBuildScripts {
|package, cb| {
if let Some(&package) = by_id.get(package) {
cb(&workspace[package].name, &mut outputs[package]);
} else {
never!(
"Received compiler message for unknown package: {}\n {}",
package,
by_id.keys().join(", ")
);
}
},
progress,
@ -272,10 +310,7 @@ impl WorkspaceBuildScripts {
for package in workspace.packages() {
let package_build_data = &outputs[package];
if !package_build_data.is_empty() {
tracing::info!(
"{}: {package_build_data:?}",
workspace[package].manifest.parent(),
);
tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
}
}
}
@ -348,15 +383,23 @@ impl WorkspaceBuildScripts {
progress(format!(
"building compile-time-deps: proc-macro {name} built"
));
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt {
data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro;
}
if !matches!(data.proc_macro_dylib_path, ProcMacroDylibPath::Path(_))
&& message
.target
.kind
.contains(&cargo_metadata::TargetKind::ProcMacro)
{
// Skip rmeta file
if let Some(filename) =
message.filenames.iter().find(|file| is_dylib(file))
{
let filename = AbsPath::assert(filename);
data.proc_macro_dylib_path = Some(filename.to_owned());
}
data.proc_macro_dylib_path =
match message.filenames.iter().find(|file| is_dylib(file)) {
Some(filename) => {
let filename = AbsPath::assert(filename);
ProcMacroDylibPath::Path(filename.to_owned())
}
None => ProcMacroDylibPath::DylibNotFound,
};
}
});
}
@ -393,14 +436,15 @@ impl WorkspaceBuildScripts {
current_dir: &AbsPath,
sysroot: &Sysroot,
toolchain: Option<&semver::Version>,
) -> io::Result<Command> {
) -> io::Result<(Option<temp_dir::TempDir>, Command)> {
match config.run_build_script_command.as_deref() {
Some([program, args @ ..]) => {
let mut cmd = toolchain::command(program, current_dir, &config.extra_env);
cmd.args(args);
Ok(cmd)
Ok((None, cmd))
}
_ => {
let mut requires_unstable_options = false;
let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
@ -416,7 +460,19 @@ impl WorkspaceBuildScripts {
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
let mut temp_dir_guard = None;
if toolchain
.is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
{
let lockfile_path =
<_ as AsRef<Utf8Path>>::as_ref(manifest_path).with_extension("lock");
if let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile_path) {
requires_unstable_options = true;
temp_dir_guard = Some(temp_dir);
cmd.arg("--lockfile-path");
cmd.arg(target_lockfile.as_str());
}
}
match &config.features {
CargoFeatures::All => {
cmd.arg("--all-features");
@ -438,6 +494,7 @@ impl WorkspaceBuildScripts {
}
if manifest_path.is_rust_manifest() {
requires_unstable_options = true;
cmd.arg("-Zscript");
}
@ -457,8 +514,7 @@ impl WorkspaceBuildScripts {
toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
if cargo_comp_time_deps_available {
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.arg("-Zunstable-options");
requires_unstable_options = true;
cmd.arg("--compile-time-deps");
// we can pass this unconditionally, because we won't actually build the
// binaries, and as such, this will succeed even on targets without libtest
@ -481,7 +537,11 @@ impl WorkspaceBuildScripts {
cmd.env("RA_RUSTC_WRAPPER", "1");
}
}
Ok(cmd)
if requires_unstable_options {
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.arg("-Zunstable-options");
}
Ok((temp_dir_guard, cmd))
}
}
}

View File

@ -1,4 +1,5 @@
//! Read `.cargo/config.toml` as a JSON object
use paths::{Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use toolchain::Tool;
@ -32,3 +33,24 @@ pub(crate) fn read(
Some(json)
}
pub(crate) fn make_lockfile_copy(
lockfile_path: &Utf8Path,
) -> Option<(temp_dir::TempDir, Utf8PathBuf)> {
let temp_dir = temp_dir::TempDir::with_prefix("rust-analyzer").ok()?;
let target_lockfile = temp_dir.path().join("Cargo.lock").try_into().ok()?;
match std::fs::copy(lockfile_path, &target_lockfile) {
Ok(_) => {
tracing::debug!("Copied lock file from `{}` to `{}`", lockfile_path, target_lockfile);
Some((temp_dir, target_lockfile))
}
// lockfile does not yet exist, so we can just create a new one in the temp dir
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Some((temp_dir, target_lockfile)),
Err(e) => {
tracing::warn!(
"Failed to copy lock file from `{lockfile_path}` to `{target_lockfile}`: {e}",
);
None
}
}
}

View File

@ -15,16 +15,18 @@ use span::Edition;
use stdx::process::spawn_with_streaming_output;
use toolchain::Tool;
use crate::cargo_config_file::make_lockfile_copy;
use crate::{CfgOverrides, InvocationStrategy};
use crate::{ManifestPath, Sysroot};
const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version {
major: 1,
minor: 82,
patch: 0,
pre: semver::Prerelease::EMPTY,
build: semver::BuildMetadata::EMPTY,
};
pub(crate) const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version =
semver::Version {
major: 1,
minor: 82,
patch: 0,
pre: semver::Prerelease::EMPTY,
build: semver::BuildMetadata::EMPTY,
};
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output.
@ -245,7 +247,7 @@ pub enum TargetKind {
}
impl TargetKind {
fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
pub fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
for kind in kinds {
return match kind {
cargo_metadata::TargetKind::Bin => TargetKind::Bin,
@ -552,7 +554,10 @@ impl CargoWorkspace {
pub(crate) struct FetchMetadata {
command: cargo_metadata::MetadataCommand,
#[expect(dead_code)]
manifest_path: ManifestPath,
lockfile_path: Option<Utf8PathBuf>,
#[expect(dead_code)]
kind: &'static str,
no_deps: bool,
no_deps_result: anyhow::Result<cargo_metadata::Metadata>,
@ -596,25 +601,22 @@ impl FetchMetadata {
}
command.current_dir(current_dir);
let mut needs_nightly = false;
let mut other_options = vec![];
// cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
// the only relevant flags for metadata here are unstable ones, so we pass those along
// but nothing else
let mut extra_args = config.extra_args.iter();
while let Some(arg) = extra_args.next() {
if arg == "-Z" {
if let Some(arg) = extra_args.next() {
needs_nightly = true;
other_options.push("-Z".to_owned());
other_options.push(arg.to_owned());
}
if arg == "-Z"
&& let Some(arg) = extra_args.next()
{
other_options.push("-Z".to_owned());
other_options.push(arg.to_owned());
}
}
let mut lockfile_path = None;
if cargo_toml.is_rust_manifest() {
needs_nightly = true;
other_options.push("-Zscript".to_owned());
} else if config
.toolchain_version
@ -632,10 +634,6 @@ impl FetchMetadata {
command.other_options(other_options.clone());
if needs_nightly {
command.env("RUSTC_BOOTSTRAP", "1");
}
// Pre-fetch basic metadata using `--no-deps`, which:
// - avoids fetching registries like crates.io,
// - skips dependency resolution and does not modify lockfiles,
@ -655,7 +653,15 @@ impl FetchMetadata {
}
.with_context(|| format!("Failed to run `{cargo_command:?}`"));
Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options }
Self {
manifest_path: cargo_toml.clone(),
command,
lockfile_path,
kind: config.kind,
no_deps,
no_deps_result,
other_options,
}
}
pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> {
@ -672,40 +678,34 @@ impl FetchMetadata {
locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } =
self;
_ = target_dir;
let Self {
mut command,
manifest_path: _,
lockfile_path,
kind: _,
no_deps,
no_deps_result,
mut other_options,
} = self;
if no_deps {
return no_deps_result.map(|m| (m, None));
}
let mut using_lockfile_copy = false;
// The manifest is a rust file, so this means its a script manifest
if let Some(lockfile) = lockfile_path {
let target_lockfile =
target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock");
match std::fs::copy(&lockfile, &target_lockfile) {
Ok(_) => {
using_lockfile_copy = true;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
// There exists no lockfile yet
using_lockfile_copy = true;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
}
Err(e) => {
tracing::warn!(
"Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
);
}
}
let mut _temp_dir_guard;
if let Some(lockfile) = lockfile_path
&& let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile)
{
_temp_dir_guard = temp_dir;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
using_lockfile_copy = true;
}
if using_lockfile_copy {
if using_lockfile_copy || other_options.iter().any(|it| it.starts_with("-Z")) {
command.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
other_options.push("-Zunstable-options".to_owned());
command.env("RUSTC_BOOTSTRAP", "1");
}
// No need to lock it if we copied the lockfile, we won't modify the original after all/
// This way cargo cannot error out on us if the lockfile requires updating.
@ -714,13 +714,11 @@ impl FetchMetadata {
}
command.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require
// calling crates.io. We should be reporting progress here, but it's
// unclear whether cargo itself supports it.
progress("cargo metadata: started".to_owned());
let res = (|| -> anyhow::Result<(_, _)> {
let mut errored = false;
tracing::debug!("Running `{:?}`", command.cargo_command());
let output =
spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| {
errored = errored || line.starts_with("error") || line.starts_with("warning");

View File

@ -59,7 +59,7 @@ use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashSet;
pub use crate::{
build_dependencies::WorkspaceBuildScripts,
build_dependencies::{ProcMacroDylibPath, WorkspaceBuildScripts},
cargo_workspace::{
CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData,
PackageDependency, RustLibSource, Target, TargetData, TargetKind,
@ -139,21 +139,22 @@ impl ProjectManifest {
}
fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
if path.file_name().unwrap_or_default() == target_file_name {
if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) {
return Some(manifest);
}
if path.file_name().unwrap_or_default() == target_file_name
&& let Ok(manifest) = ManifestPath::try_from(path.to_path_buf())
{
return Some(manifest);
}
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join(target_file_name);
if fs::metadata(&candidate).is_ok() {
if let Ok(manifest) = ManifestPath::try_from(candidate) {
return Some(manifest);
}
if fs::metadata(&candidate).is_ok()
&& let Ok(manifest) = ManifestPath::try_from(candidate)
{
return Some(manifest);
}
curr = path.parent();
}

View File

@ -143,12 +143,11 @@ impl Sysroot {
Some(root) => {
// special case rustc, we can look that up directly in the sysroot's bin folder
// as it should never invoke another cargo binary
if let Tool::Rustc = tool {
if let Some(path) =
if let Tool::Rustc = tool
&& let Some(path) =
probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
{
return toolchain::command(path, current_dir, envs);
}
{
return toolchain::command(path, current_dir, envs);
}
let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
@ -291,29 +290,26 @@ impl Sysroot {
pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
self.workspace = workspace;
if self.error.is_none() {
if let Some(src_root) = &self.rust_lib_src_root {
let has_core = match &self.workspace {
RustLibSrcWorkspace::Workspace(ws) => {
ws.packages().any(|p| ws[p].name == "core")
}
RustLibSrcWorkspace::Json(project_json) => project_json
.crates()
.filter_map(|(_, krate)| krate.display_name.clone())
.any(|name| name.canonical_name().as_str() == "core"),
RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
RustLibSrcWorkspace::Empty => true,
if self.error.is_none()
&& let Some(src_root) = &self.rust_lib_src_root
{
let has_core = match &self.workspace {
RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
RustLibSrcWorkspace::Json(project_json) => project_json
.crates()
.filter_map(|(_, krate)| krate.display_name.clone())
.any(|name| name.canonical_name().as_str() == "core"),
RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
RustLibSrcWorkspace::Empty => true,
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
} else {
", try running `rustup component add rust-src` to possibly fix this"
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
} else {
", try running `rustup component add rust-src` to possibly fix this"
};
self.error = Some(format!(
"sysroot at `{src_root}` is missing a `core` library{var_note}",
));
}
self.error =
Some(format!("sysroot at `{src_root}` is missing a `core` library{var_note}",));
}
}
}

View File

@ -65,6 +65,7 @@ fn rustc_print_cfg(
let (sysroot, current_dir) = match config {
QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS);
if let Some(target) = target {
cmd.args(["--target", target]);

View File

@ -24,7 +24,7 @@ use crate::{
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
build_dependencies::BuildScriptOutput,
build_dependencies::{BuildScriptOutput, ProcMacroDylibPath},
cargo_config_file,
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
@ -424,12 +424,12 @@ impl ProjectWorkspace {
sysroot.set_workspace(loaded_sysroot);
}
if !cargo.requires_rustc_private() {
if let Err(e) = &mut rustc {
// We don't need the rustc sources here,
// so just discard the error.
_ = e.take();
}
if !cargo.requires_rustc_private()
&& let Err(e) = &mut rustc
{
// We don't need the rustc sources here,
// so just discard the error.
_ = e.take();
}
Ok(ProjectWorkspace {
@ -1163,17 +1163,15 @@ fn project_json_to_crate_graph(
crate = display_name.as_ref().map(|name| name.canonical_name().as_str()),
"added root to crate graph"
);
if *is_proc_macro {
if let Some(path) = proc_macro_dylib_path.clone() {
let node = Ok((
display_name
.as_ref()
.map(|it| it.canonical_name().as_str().to_owned())
.unwrap_or_else(|| format!("crate{}", idx.0)),
path,
));
proc_macros.insert(crate_graph_crate_id, node);
}
if *is_proc_macro && let Some(path) = proc_macro_dylib_path.clone() {
let node = Ok((
display_name
.as_ref()
.map(|it| it.canonical_name().as_str().to_owned())
.unwrap_or_else(|| format!("crate{}", idx.0)),
path,
));
proc_macros.insert(crate_graph_crate_id, node);
}
(idx, crate_graph_crate_id)
},
@ -1318,16 +1316,17 @@ fn cargo_to_crate_graph(
public_deps.add_to_crate_graph(crate_graph, from);
// Add dep edge of all targets to the package's lib target
if let Some((to, name)) = lib_tgt.clone() {
if to != from && kind != TargetKind::BuildScript {
// (build script can not depend on its library target)
if let Some((to, name)) = lib_tgt.clone()
&& to != from
&& kind != TargetKind::BuildScript
{
// (build script can not depend on its library target)
// For root projects with dashes in their name,
// cargo metadata does not do any normalization,
// so we do it ourselves currently
let name = CrateName::normalize_dashes(&name);
add_dep(crate_graph, from, name, to);
}
// For root projects with dashes in their name,
// cargo metadata does not do any normalization,
// so we do it ourselves currently
let name = CrateName::normalize_dashes(&name);
add_dep(crate_graph, from, name, to);
}
}
}
@ -1638,9 +1637,19 @@ fn add_target_crate_root(
let proc_macro = match build_data {
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
None => Err(ProcMacroLoadingError::MissingDylibPath),
ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())),
ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt),
ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound
if has_errors =>
{
Err(ProcMacroLoadingError::FailedToBuild)
}
ProcMacroDylibPath::NotProcMacro => {
Err(ProcMacroLoadingError::ExpectedProcMacroArtifact)
}
ProcMacroDylibPath::DylibNotFound => {
Err(ProcMacroLoadingError::MissingDylibPath)
}
}
}
None => Err(ProcMacroLoadingError::NotYetBuilt),
@ -1905,7 +1914,8 @@ fn cargo_target_dir(
meta.manifest_path(manifest);
// `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve.
// So we can use it to get `target_directory` before copying lockfiles
let mut other_options = vec!["--no-deps".to_owned()];
meta.no_deps();
let mut other_options = vec![];
if manifest.is_rust_manifest() {
meta.env("RUSTC_BOOTSTRAP", "1");
other_options.push("-Zscript".to_owned());

View File

@ -656,22 +656,26 @@ impl flags::AnalysisStats {
let mut sw = self.stop_watch();
let mut all = 0;
let mut fail = 0;
for &body in bodies {
if matches!(body, DefWithBody::Variant(_)) {
for &body_id in bodies {
if matches!(body_id, DefWithBody::Variant(_)) {
continue;
}
let module = body_id.module(db);
if !self.should_process(db, body_id, module) {
continue;
}
all += 1;
let Err(e) = db.mir_body(body.into()) else {
let Err(e) = db.mir_body(body_id.into()) else {
continue;
};
if verbosity.is_spammy() {
let full_name = body
.module(db)
let full_name = module
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
.chain(Some(body.name(db).unwrap_or_else(Name::missing)))
.chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db, Edition::LATEST).to_string())
.join("::");
bar.println(format!("Mir body for {full_name} failed due {e:?}"));
@ -727,26 +731,9 @@ impl flags::AnalysisStats {
let name = body_id.name(db).unwrap_or_else(Name::missing);
let module = body_id.module(db);
let display_target = module.krate().to_display_target(db);
let full_name = move || {
module
.krate()
.display_name(db)
.map(|it| it.canonical_name().as_str().to_owned())
.into_iter()
.chain(
module
.path_to_root(db)
.into_iter()
.filter_map(|it| it.name(db))
.rev()
.chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db, Edition::LATEST).to_string()),
)
.join("::")
};
if let Some(only_name) = self.only.as_deref() {
if name.display(db, Edition::LATEST).to_string() != only_name
&& full_name() != only_name
&& full_name(db, body_id, module) != only_name
{
continue;
}
@ -763,12 +750,17 @@ impl flags::AnalysisStats {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
format!(
"processing: {} ({} {:?})",
full_name(db, body_id, module),
path,
syntax_range
)
} else {
format!("processing: {}", full_name())
format!("processing: {}", full_name(db, body_id, module))
}
} else {
format!("processing: {}", full_name())
format!("processing: {}", full_name(db, body_id, module))
}
};
if verbosity.is_spammy() {
@ -781,9 +773,11 @@ impl flags::AnalysisStats {
Ok(inference_result) => inference_result,
Err(p) => {
if let Some(s) = p.downcast_ref::<&str>() {
eprintln!("infer panicked for {}: {}", full_name(), s);
eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
} else if let Some(s) = p.downcast_ref::<String>() {
eprintln!("infer panicked for {}: {}", full_name(), s);
eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
} else {
eprintln!("infer panicked for {}", full_name(db, body_id, module));
}
panics += 1;
bar.inc(1);
@ -890,7 +884,7 @@ impl flags::AnalysisStats {
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} exprs, {} unknown, {} partial",
full_name(),
full_name(db, body_id, module),
num_exprs - previous_exprs,
num_exprs_unknown - previous_unknown,
num_exprs_partially_unknown - previous_partially_unknown
@ -993,7 +987,7 @@ impl flags::AnalysisStats {
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} pats, {} unknown, {} partial",
full_name(),
full_name(db, body_id, module),
num_pats - previous_pats,
num_pats_unknown - previous_unknown,
num_pats_partially_unknown - previous_partially_unknown
@ -1049,34 +1043,8 @@ impl flags::AnalysisStats {
bar.tick();
for &body_id in bodies {
let module = body_id.module(db);
let full_name = move || {
module
.krate()
.display_name(db)
.map(|it| it.canonical_name().as_str().to_owned())
.into_iter()
.chain(
module
.path_to_root(db)
.into_iter()
.filter_map(|it| it.name(db))
.rev()
.chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db, Edition::LATEST).to_string()),
)
.join("::")
};
if let Some(only_name) = self.only.as_deref() {
if body_id
.name(db)
.unwrap_or_else(Name::missing)
.display(db, Edition::LATEST)
.to_string()
!= only_name
&& full_name() != only_name
{
continue;
}
if !self.should_process(db, body_id, module) {
continue;
}
let msg = move || {
if verbosity.is_verbose() {
@ -1090,12 +1058,17 @@ impl flags::AnalysisStats {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
format!(
"processing: {} ({} {:?})",
full_name(db, body_id, module),
path,
syntax_range
)
} else {
format!("processing: {}", full_name())
format!("processing: {}", full_name(db, body_id, module))
}
} else {
format!("processing: {}", full_name())
format!("processing: {}", full_name(db, body_id, module))
}
};
if verbosity.is_spammy() {
@ -1205,11 +1178,42 @@ impl flags::AnalysisStats {
eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
}
fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool {
if let Some(only_name) = self.only.as_deref() {
let name = body_id.name(db).unwrap_or_else(Name::missing);
if name.display(db, Edition::LATEST).to_string() != only_name
&& full_name(db, body_id, module) != only_name
{
return false;
}
}
true
}
fn stop_watch(&self) -> StopWatch {
StopWatch::start()
}
}
fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String {
module
.krate()
.display_name(db)
.map(|it| it.canonical_name().as_str().to_owned())
.into_iter()
.chain(
module
.path_to_root(db)
.into_iter()
.filter_map(|it| it.name(db))
.rev()
.chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db, Edition::LATEST).to_string()),
)
.join("::")
}
fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String {
let src = match sm.expr_syntax(expr_id) {
Ok(s) => s,

View File

@ -2162,6 +2162,7 @@ impl Config {
extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(),
extra_env: self.extra_env(source_root).clone(),
target_dir: self.target_dir_from_config(source_root),
set_test: true,
}
}
@ -2219,6 +2220,7 @@ impl Config {
extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(),
extra_env: self.check_extra_env(source_root),
target_dir: self.target_dir_from_config(source_root),
set_test: *self.cfg_setTest(source_root),
},
ansi_color_output: self.color_diagnostic_output(),
},

View File

@ -31,6 +31,7 @@ pub(crate) enum InvocationStrategy {
pub(crate) struct CargoOptions {
pub(crate) target_tuples: Vec<String>,
pub(crate) all_targets: bool,
pub(crate) set_test: bool,
pub(crate) no_default_features: bool,
pub(crate) all_features: bool,
pub(crate) features: Vec<String>,
@ -54,7 +55,13 @@ impl CargoOptions {
cmd.args(["--target", target.as_str()]);
}
if self.all_targets {
cmd.arg("--all-targets");
if self.set_test {
cmd.arg("--all-targets");
} else {
// No --benches unfortunately, as this implies --tests (see https://github.com/rust-lang/cargo/issues/6454),
// and users setting `cfg.seTest = false` probably prefer disabling benches than enabling tests.
cmd.args(["--lib", "--bins", "--examples"]);
}
}
if self.all_features {
cmd.arg("--all-features");
@ -104,7 +111,18 @@ impl fmt::Display for FlycheckConfig {
match self {
FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"),
FlycheckConfig::CustomCommand { command, args, .. } => {
write!(f, "{command} {}", args.join(" "))
// Don't show `my_custom_check --foo $saved_file` literally to the user, as it
// looks like we've forgotten to substitute $saved_file.
//
// Instead, show `my_custom_check --foo ...`. The
// actual path is often too long to be worth showing
// in the IDE (e.g. in the VS Code status bar).
let display_args = args
.iter()
.map(|arg| if arg == SAVED_FILE_PLACEHOLDER { "..." } else { arg })
.collect::<Vec<_>>();
write!(f, "{command} {}", display_args.join(" "))
}
}
}

View File

@ -101,7 +101,7 @@ WhereClause =
'where' predicates:(WherePred (',' WherePred)* ','?)
WherePred =
('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
ForBinder? (Lifetime | Type) ':' TypeBoundList?
//*************************//
@ -534,10 +534,10 @@ FieldExpr =
Attr* Expr '.' NameRef
ClosureExpr =
Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType?
Attr* ForBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType?
body:Expr
ClosureBinder =
ForBinder =
'for' GenericParamList
IfExpr =
@ -658,7 +658,7 @@ FnPtrType =
'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
ForType =
'for' GenericParamList Type
ForBinder Type
ImplTraitType =
'impl' TypeBoundList
@ -671,7 +671,7 @@ TypeBoundList =
TypeBound =
Lifetime
| ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type
| ForBinder? ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type
| 'use' UseBoundGenericArgs
UseBoundGenericArgs =

View File

@ -393,8 +393,7 @@ where
let pred = predicates.next().unwrap();
let mut bounds = pred.type_bound_list().unwrap().bounds();
assert!(pred.for_token().is_none());
assert!(pred.generic_param_list().is_none());
assert!(pred.for_binder().is_none());
assert_eq!("T", pred.ty().unwrap().syntax().text().to_string());
assert_bound("Clone", bounds.next());
assert_bound("Copy", bounds.next());
@ -432,8 +431,10 @@ where
let pred = predicates.next().unwrap();
let mut bounds = pred.type_bound_list().unwrap().bounds();
assert!(pred.for_token().is_some());
assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string());
assert_eq!(
"<'a>",
pred.for_binder().unwrap().generic_param_list().unwrap().syntax().text().to_string()
);
assert_eq!("F", pred.ty().unwrap().syntax().text().to_string());
assert_bound("Fn(&'a str)", bounds.next());
}

View File

@ -6,9 +6,12 @@ use std::{fmt, iter, ops};
use crate::{
AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
ast::{self, AstNode, make},
syntax_editor::{SyntaxEditor, SyntaxMappingBuilder},
ted,
};
use super::syntax_factory::SyntaxFactory;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct IndentLevel(pub u8);
@ -95,6 +98,24 @@ impl IndentLevel {
}
}
pub(super) fn clone_increase_indent(self, node: &SyntaxNode) -> SyntaxNode {
let node = node.clone_subtree();
let mut editor = SyntaxEditor::new(node.clone());
let tokens = node
.preorder_with_tokens()
.filter_map(|event| match event {
rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
_ => None,
})
.filter_map(ast::Whitespace::cast)
.filter(|ws| ws.text().contains('\n'));
for ws in tokens {
let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax()));
editor.replace(ws.syntax(), &new_ws);
}
editor.finish().new_root().clone()
}
pub(super) fn decrease_indent(self, node: &SyntaxNode) {
let tokens = node.preorder_with_tokens().filter_map(|event| match event {
rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
@ -111,36 +132,54 @@ impl IndentLevel {
}
}
}
pub(super) fn clone_decrease_indent(self, node: &SyntaxNode) -> SyntaxNode {
let node = node.clone_subtree();
let mut editor = SyntaxEditor::new(node.clone());
let tokens = node
.preorder_with_tokens()
.filter_map(|event| match event {
rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
_ => None,
})
.filter_map(ast::Whitespace::cast)
.filter(|ws| ws.text().contains('\n'));
for ws in tokens {
let new_ws =
make::tokens::whitespace(&ws.syntax().text().replace(&format!("\n{self}"), "\n"));
editor.replace(ws.syntax(), &new_ws);
}
editor.finish().new_root().clone()
}
}
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
iter::successors(Some(token), |token| token.prev_token())
}
/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`.
pub trait AstNodeEdit: AstNode + Clone + Sized {
fn indent_level(&self) -> IndentLevel {
IndentLevel::from_node(self.syntax())
}
#[must_use]
fn indent(&self, level: IndentLevel) -> Self {
fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
let res = node.clone_subtree().clone_for_update();
level.increase_indent(&res);
res.clone_subtree()
Self::cast(level.clone_increase_indent(self.syntax())).unwrap()
}
#[must_use]
fn indent_with_mapping(&self, level: IndentLevel, make: &SyntaxFactory) -> Self {
let new_node = self.indent(level);
if let Some(mut mapping) = make.mappings() {
let mut builder = SyntaxMappingBuilder::new(new_node.syntax().clone());
for (old, new) in self.syntax().children().zip(new_node.syntax().children()) {
builder.map_node(old, new);
}
builder.finish(&mut mapping);
}
Self::cast(indent_inner(self.syntax(), level)).unwrap()
new_node
}
#[must_use]
fn dedent(&self, level: IndentLevel) -> Self {
fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
let res = node.clone_subtree().clone_for_update();
level.decrease_indent(&res);
res.clone_subtree()
}
Self::cast(dedent_inner(self.syntax(), level)).unwrap()
Self::cast(level.clone_decrease_indent(self.syntax())).unwrap()
}
#[must_use]
fn reset_indent(&self) -> Self {

View File

@ -644,7 +644,7 @@ impl Removable for ast::Use {
impl ast::Impl {
pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
if self.assoc_item_list().is_none() {
let assoc_item_list = make::assoc_item_list().clone_for_update();
let assoc_item_list = make::assoc_item_list(None).clone_for_update();
ted::append_child(self.syntax(), assoc_item_list.syntax());
}
self.assoc_item_list().unwrap()

View File

@ -377,22 +377,13 @@ impl CastExpr {
#[inline]
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
pub struct ClosureBinder {
pub(crate) syntax: SyntaxNode,
}
impl ClosureBinder {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
pub struct ClosureExpr {
pub(crate) syntax: SyntaxNode,
}
impl ast::HasAttrs for ClosureExpr {}
impl ClosureExpr {
#[inline]
pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) }
pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
#[inline]
@ -615,6 +606,15 @@ impl FnPtrType {
#[inline]
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
}
pub struct ForBinder {
pub(crate) syntax: SyntaxNode,
}
impl ForBinder {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
pub struct ForExpr {
pub(crate) syntax: SyntaxNode,
}
@ -632,11 +632,9 @@ pub struct ForType {
}
impl ForType {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
pub struct FormatArgsArg {
pub(crate) syntax: SyntaxNode,
@ -1765,6 +1763,8 @@ pub struct TypeBound {
pub(crate) syntax: SyntaxNode,
}
impl TypeBound {
#[inline]
pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
#[inline]
@ -1938,13 +1938,11 @@ pub struct WherePred {
impl ast::HasTypeBounds for WherePred {}
impl WherePred {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
pub struct WhileExpr {
pub(crate) syntax: SyntaxNode,
@ -3239,42 +3237,6 @@ impl fmt::Debug for CastExpr {
f.debug_struct("CastExpr").field("syntax", &self.syntax).finish()
}
}
impl AstNode for ClosureBinder {
#[inline]
fn kind() -> SyntaxKind
where
Self: Sized,
{
CLOSURE_BINDER
}
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER }
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl hash::Hash for ClosureBinder {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
}
impl Eq for ClosureBinder {}
impl PartialEq for ClosureBinder {
fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
}
impl Clone for ClosureBinder {
fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
}
impl fmt::Debug for ClosureBinder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ClosureBinder").field("syntax", &self.syntax).finish()
}
}
impl AstNode for ClosureExpr {
#[inline]
fn kind() -> SyntaxKind
@ -3815,6 +3777,42 @@ impl fmt::Debug for FnPtrType {
f.debug_struct("FnPtrType").field("syntax", &self.syntax).finish()
}
}
impl AstNode for ForBinder {
#[inline]
fn kind() -> SyntaxKind
where
Self: Sized,
{
FOR_BINDER
}
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_BINDER }
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl hash::Hash for ForBinder {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
}
impl Eq for ForBinder {}
impl PartialEq for ForBinder {
fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
}
impl Clone for ForBinder {
fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
}
impl fmt::Debug for ForBinder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ForBinder").field("syntax", &self.syntax).finish()
}
}
impl AstNode for ForExpr {
#[inline]
fn kind() -> SyntaxKind
@ -10146,11 +10144,6 @@ impl std::fmt::Display for CastExpr {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ClosureBinder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ClosureExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
@ -10226,6 +10219,11 @@ impl std::fmt::Display for FnPtrType {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ForBinder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ForExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)

View File

@ -229,8 +229,18 @@ pub fn ty_fn_ptr<I: Iterator<Item = Param>>(
}
}
pub fn assoc_item_list() -> ast::AssocItemList {
ast_from_text("impl C for D {}")
pub fn assoc_item_list(
body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
) -> ast::AssocItemList {
let is_break_braces = body.is_some();
let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() };
let body_indent = if is_break_braces { " ".to_owned() } else { String::new() };
let body = match body {
Some(bd) => bd.iter().map(|elem| elem.to_string()).join("\n\n "),
None => String::new(),
};
ast_from_text(&format!("impl C for D {{{body_newline}{body_indent}{body}{body_newline}}}"))
}
fn merge_gen_params(
@ -273,7 +283,7 @@ pub fn impl_(
generic_args: Option<ast::GenericArgList>,
path_type: ast::Type,
where_clause: Option<ast::WhereClause>,
body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
body: Option<ast::AssocItemList>,
) -> ast::Impl {
let gen_args = generic_args.map_or_else(String::new, |it| it.to_string());
@ -281,20 +291,13 @@ pub fn impl_(
let body_newline =
if where_clause.is_some() && body.is_none() { "\n".to_owned() } else { String::new() };
let where_clause = match where_clause {
Some(pr) => format!("\n{pr}\n"),
None => " ".to_owned(),
};
let body = match body {
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
None => String::new(),
};
ast_from_text(&format!(
"impl{gen_params} {path_type}{gen_args}{where_clause}{{{body_newline}{body}}}"
))
let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string());
ast_from_text(&format!("impl{gen_params} {path_type}{gen_args}{where_clause}{body}"))
}
pub fn impl_trait(
@ -308,7 +311,7 @@ pub fn impl_trait(
ty: ast::Type,
trait_where_clause: Option<ast::WhereClause>,
ty_where_clause: Option<ast::WhereClause>,
body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
body: Option<ast::AssocItemList>,
) -> ast::Impl {
let is_unsafe = if is_unsafe { "unsafe " } else { "" };
@ -330,13 +333,10 @@ pub fn impl_trait(
let where_clause = merge_where_clause(ty_where_clause, trait_where_clause)
.map_or_else(|| " ".to_owned(), |wc| format!("\n{wc}\n"));
let body = match body {
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
None => String::new(),
};
let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string());
ast_from_text(&format!(
"{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}"
"{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{body}"
))
}

View File

@ -805,9 +805,7 @@ impl ast::SelfParam {
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum TypeBoundKind {
/// Trait
PathType(ast::PathType),
/// for<'a> ...
ForType(ast::ForType),
PathType(Option<ast::ForBinder>, ast::PathType),
/// use
Use(ast::UseBoundGenericArgs),
/// 'a
@ -817,9 +815,7 @@ pub enum TypeBoundKind {
impl ast::TypeBound {
pub fn kind(&self) -> TypeBoundKind {
if let Some(path_type) = support::children(self.syntax()).next() {
TypeBoundKind::PathType(path_type)
} else if let Some(for_type) = support::children(self.syntax()).next() {
TypeBoundKind::ForType(for_type)
TypeBoundKind::PathType(self.for_binder(), path_type)
} else if let Some(args) = self.use_bound_generic_args() {
TypeBoundKind::Use(args)
} else if let Some(lifetime) = self.lifetime() {

View File

@ -38,7 +38,7 @@ impl SyntaxFactory {
self.mappings.as_ref().map(|mappings| mappings.take()).unwrap_or_default()
}
fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
pub(crate) fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
self.mappings.as_ref().map(|it| it.borrow_mut())
}
}

View File

@ -5,7 +5,7 @@
//! [`SyntaxEditor`]: https://github.com/dotnet/roslyn/blob/43b0b05cc4f492fd5de00f6f6717409091df8daa/src/Workspaces/Core/Portable/Editing/SyntaxEditor.cs
use std::{
fmt,
fmt, iter,
num::NonZeroU32,
ops::RangeInclusive,
sync::atomic::{AtomicU32, Ordering},
@ -41,6 +41,15 @@ impl SyntaxEditor {
self.annotations.push((element.syntax_element(), annotation))
}
pub fn add_annotation_all(
&mut self,
elements: Vec<impl Element>,
annotation: SyntaxAnnotation,
) {
self.annotations
.extend(elements.into_iter().map(|e| e.syntax_element()).zip(iter::repeat(annotation)));
}
pub fn merge(&mut self, mut other: SyntaxEditor) {
debug_assert!(
self.root == other.root || other.root.ancestors().any(|node| node == self.root),

View File

@ -92,6 +92,42 @@ fn get_or_insert_comma_after(editor: &mut SyntaxEditor, syntax: &SyntaxNode) ->
}
}
impl ast::AssocItemList {
/// Adds a new associated item after all of the existing associated items.
///
/// Attention! This function does align the first line of `item` with respect to `self`,
/// but it does _not_ change indentation of other lines (if any).
pub fn add_items(&self, editor: &mut SyntaxEditor, items: Vec<ast::AssocItem>) {
let (indent, position, whitespace) = match self.assoc_items().last() {
Some(last_item) => (
IndentLevel::from_node(last_item.syntax()),
Position::after(last_item.syntax()),
"\n\n",
),
None => match self.l_curly_token() {
Some(l_curly) => {
normalize_ws_between_braces(editor, self.syntax());
(IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
}
None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
},
};
let elements: Vec<SyntaxElement> = items
.into_iter()
.enumerate()
.flat_map(|(i, item)| {
let whitespace = if i != 0 { "\n\n" } else { whitespace };
vec![
make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
item.syntax().clone().into(),
]
})
.collect();
editor.insert_all(position, elements);
}
}
impl ast::VariantList {
pub fn add_variant(&self, editor: &mut SyntaxEditor, variant: &ast::Variant) {
let make = SyntaxFactory::without_mappings();

View File

@ -252,18 +252,8 @@ Release steps:
4. Commit & push the changelog.
5. Run `cargo xtask publish-release-notes <CHANGELOG>` -- this will convert the changelog entry in AsciiDoc to Markdown and update the body of GitHub Releases entry.
6. Tweet.
7. Make a new branch and run `cargo xtask rustc-pull`, open a PR, and merge it.
This will pull any changes from `rust-lang/rust` into `rust-analyzer`.
8. Switch to `master`, pull, then run `cargo xtask rustc-push --rust-path ../rust-rust-analyzer --rust-fork matklad/rust`.
Replace `matklad/rust` with your own fork of `rust-lang/rust`.
You can use the token to authenticate when you get prompted for a password, since `josh` will push over HTTPS, not SSH.
This will push the `rust-analyzer` changes to your fork.
You can then open a PR against `rust-lang/rust`.
Note: besides the `rust-rust-analyzer` clone, the Josh cache (stored under `~/.cache/rust-analyzer-josh`) will contain a bare clone of `rust-lang/rust`.
This currently takes about 3.5 GB.
This [HackMD](https://hackmd.io/7pOuxnkdQDaL1Y1FQr65xg) has details about how `josh` syncs work.
7. Perform a subtree [pull](#performing-a-pull).
8. Perform a subtree [push](#performing-a-push).
If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console.
If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over.
@ -288,3 +278,43 @@ There are two sets of people with extra permissions:
If you don't feel like reviewing for whatever reason, someone else will pick the review up (but please speak up if you don't feel like it)!
* The [rust-lang](https://github.com/rust-lang) team [t-rust-analyzer-contributors]([https://github.com/orgs/rust-analyzer/teams/triage](https://github.com/rust-lang/team/blob/master/teams/rust-analyzer-contributors.toml)).
This team has general triaging permissions allowing to label, close and re-open issues.
## Synchronizing subtree changes
`rust-analyzer` is a [josh](https://josh-project.github.io/josh/intro.html) subtree of the [rust-lang/rust](https://github.com/rust-lang/rust)
repository. We use the [rustc-josh-sync](https://github.com/rust-lang/josh-sync) tool to perform synchronization between these two
repositories. You can find documentation of the tool [here](https://github.com/rust-lang/josh-sync).
You can install the synchronization tool using the following commands:
```
cargo install --locked --git https://github.com/rust-lang/josh-sync
```
Both pulls (synchronizing changes from rust-lang/rust into rust-analyzer) and pushes (synchronizing
changes from rust-analyzer into rust-lang/rust) are performed from this repository.
changes from rust-analyzer to rust-lang/rust) are performed from this repository.
Usually we first perform a pull, wait for it to be merged, and then perform a push.
### Performing a pull
1) Checkout a new branch that will be used to create a PR against rust-analyzer
2) Run the pull command
```
rustc-josh-sync pull
```
3) Push the branch to your fork of `rust-analyzer` and create a PR
- If you have the `gh` CLI installed, `rustc-josh-sync` can create the PR for you.
### Performing a push
Wait for the previous pull to be merged.
1) Switch to `master` and pull
2) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account
```
rustc-josh-sync push <branch-name> <gh-username>
```
- The push will ask you to download a checkout of the `rust-lang/rust` repository.
- If you get prompted for a password, see [this](https://github.com/rust-lang/josh-sync?tab=readme-ov-file#git-peculiarities).
3) Create a PR from `<branch-name>` into `rust-lang/rust`
> Besides the `rust` checkout, the Josh cache (stored under `~/.cache/rustc-josh`) will contain a bare clone of `rust-lang/rust`. This currently takes several GBs.

View File

@ -3336,15 +3336,16 @@
}
},
"node_modules/form-data": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"dev": true,
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {

View File

@ -8,10 +8,9 @@ import type { Disposable } from "vscode";
export type RunnableEnvCfgItem = {
mask?: string;
env: Record<string, string>;
env: { [key: string]: { toString(): string } | null };
platform?: string | string[];
};
export type RunnableEnvCfg = Record<string, string> | RunnableEnvCfgItem[];
type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSelector };
@ -261,18 +260,13 @@ export class Config {
return this.get<boolean | undefined>("testExplorer");
}
runnablesExtraEnv(label: string): Record<string, string> | undefined {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const item = this.get<any>("runnables.extraEnv") ?? this.get<any>("runnableEnv");
if (!item) return undefined;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const fixRecord = (r: Record<string, any>) => {
for (const key in r) {
if (typeof r[key] !== "string") {
r[key] = String(r[key]);
}
}
};
runnablesExtraEnv(label: string): Env {
const serverEnv = this.serverExtraEnv;
let extraEnv =
this.get<
RunnableEnvCfgItem[] | { [key: string]: { toString(): string } | null } | null
>("runnables.extraEnv") ?? {};
if (!extraEnv) return serverEnv;
const platform = process.platform;
const checkPlatform = (it: RunnableEnvCfgItem) => {
@ -283,19 +277,25 @@ export class Config {
return true;
};
if (item instanceof Array) {
if (extraEnv instanceof Array) {
const env = {};
for (const it of item) {
for (const it of extraEnv) {
const masked = !it.mask || new RegExp(it.mask).test(label);
if (masked && checkPlatform(it)) {
Object.assign(env, it.env);
}
}
fixRecord(env);
return env;
extraEnv = env;
}
fixRecord(item);
return item;
const runnableExtraEnv = substituteVariablesInEnv(
Object.fromEntries(
Object.entries(extraEnv).map(([k, v]) => [
k,
typeof v === "string" ? v : v?.toString(),
]),
),
);
return { ...runnableExtraEnv, ...serverEnv };
}
get restartServerOnConfigChange() {

View File

@ -6,7 +6,14 @@ import type * as ra from "./lsp_ext";
import { Cargo } from "./toolchain";
import type { Ctx } from "./ctx";
import { createTaskFromRunnable, prepareEnv } from "./run";
import { execute, isCargoRunnableArgs, unwrapUndefinable, log, normalizeDriveLetter } from "./util";
import {
execute,
isCargoRunnableArgs,
unwrapUndefinable,
log,
normalizeDriveLetter,
Env,
} from "./util";
import type { Config } from "./config";
// Here we want to keep track on everything that's currently running
@ -206,10 +213,7 @@ type SourceFileMap = {
destination: string;
};
async function discoverSourceFileMap(
env: Record<string, string>,
cwd: string,
): Promise<SourceFileMap | undefined> {
async function discoverSourceFileMap(env: Env, cwd: string): Promise<SourceFileMap | undefined> {
const sysroot = env["RUSTC_TOOLCHAIN"];
if (sysroot) {
// let's try to use the default toolchain
@ -232,7 +236,7 @@ type PropertyFetcher<Config, Input, Key extends keyof Config> = (
type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfig<Type>> = {
executableProperty: keyof DebugConfig;
environmentProperty: PropertyFetcher<DebugConfig, Record<string, string>, keyof DebugConfig>;
environmentProperty: PropertyFetcher<DebugConfig, Env, keyof DebugConfig>;
runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>;
sourceFileMapProperty?: keyof DebugConfig;
type: Type;
@ -276,7 +280,7 @@ const knownEngines: {
"environment",
Object.entries(env).map((entry) => ({
name: entry[0],
value: entry[1],
value: entry[1] ?? "",
})),
],
runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [
@ -304,10 +308,7 @@ const knownEngines: {
},
};
async function getDebugExecutable(
runnableArgs: ra.CargoRunnableArgs,
env: Record<string, string>,
): Promise<string> {
async function getDebugExecutable(runnableArgs: ra.CargoRunnableArgs, env: Env): Promise<string> {
const cargo = new Cargo(runnableArgs.workspaceRoot || ".", env);
const executable = await cargo.executableFromArgs(runnableArgs);
@ -328,7 +329,7 @@ function getDebugConfig(
runnable: ra.Runnable,
runnableArgs: ra.CargoRunnableArgs,
executable: string,
env: Record<string, string>,
env: Env,
sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
const {
@ -380,14 +381,14 @@ type CodeLldbDebugConfig = {
args: string[];
sourceMap: Record<string, string> | undefined;
sourceLanguages: ["rust"];
env: Record<string, string>;
env: Env;
} & BaseDebugConfig<"lldb">;
type NativeDebugConfig = {
target: string;
// See https://github.com/WebFreak001/code-debug/issues/359
arguments: string;
env: Record<string, string>;
env: Env;
valuesFormatting: "prettyPrinters";
} & BaseDebugConfig<"gdb">;

View File

@ -7,7 +7,7 @@ import type { CtxInit } from "./ctx";
import { makeDebugConfig } from "./debug";
import type { Config } from "./config";
import type { LanguageClient } from "vscode-languageclient/node";
import { log, unwrapUndefinable, type RustEditor } from "./util";
import { Env, log, unwrapUndefinable, type RustEditor } from "./util";
const quickPickButtons = [
{ iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." },
@ -122,11 +122,8 @@ export class RunnableQuickPick implements vscode.QuickPickItem {
}
}
export function prepareBaseEnv(
inheritEnv: boolean,
base?: Record<string, string>,
): Record<string, string> {
const env: Record<string, string> = { RUST_BACKTRACE: "short" };
export function prepareBaseEnv(inheritEnv: boolean, base?: Env): Env {
const env: Env = { RUST_BACKTRACE: "short" };
if (inheritEnv) {
Object.assign(env, process.env);
}
@ -136,11 +133,7 @@ export function prepareBaseEnv(
return env;
}
export function prepareEnv(
inheritEnv: boolean,
runnableEnv?: Record<string, string>,
runnableEnvCfg?: Record<string, string>,
): Record<string, string> {
export function prepareEnv(inheritEnv: boolean, runnableEnv?: Env, runnableEnvCfg?: Env): Env {
const env = prepareBaseEnv(inheritEnv, runnableEnv);
if (runnableEnvCfg) {

View File

@ -1,6 +1,7 @@
import * as vscode from "vscode";
import type { Config } from "./config";
import * as toolchain from "./toolchain";
import { Env } from "./util";
// This ends up as the `type` key in tasks.json. RLS also uses `cargo` and
// our configuration should be compatible with it so use the same key.
@ -117,8 +118,8 @@ export async function buildRustTask(
export async function targetToExecution(
definition: TaskDefinition,
options?: {
env?: { [key: string]: string };
cwd?: string;
env?: Env;
},
cargo?: string,
): Promise<vscode.ProcessExecution | vscode.ShellExecution> {
@ -131,7 +132,12 @@ export async function targetToExecution(
command = definition.command;
args = definition.args || [];
}
return new vscode.ProcessExecution(command, args, options);
return new vscode.ProcessExecution(command, args, {
cwd: options?.cwd,
env: Object.fromEntries(
Object.entries(options?.env ?? {}).map(([key, value]) => [key, value ?? ""]),
),
});
}
export function activateTaskProvider(config: Config): vscode.Disposable {

View File

@ -3,7 +3,7 @@ import * as os from "os";
import * as path from "path";
import * as readline from "readline";
import * as vscode from "vscode";
import { log, memoizeAsync, unwrapUndefinable } from "./util";
import { Env, log, memoizeAsync, unwrapUndefinable } from "./util";
import type { CargoRunnableArgs } from "./lsp_ext";
interface CompilationArtifact {
@ -37,7 +37,7 @@ interface CompilerMessage {
export class Cargo {
constructor(
readonly rootFolder: string,
readonly env: Record<string, string>,
readonly env: Env,
) {}
// Made public for testing purposes
@ -156,7 +156,7 @@ export class Cargo {
/** Mirrors `toolchain::cargo()` implementation */
// FIXME: The server should provide this
export function cargoPath(env?: Record<string, string>): Promise<string> {
export function cargoPath(env?: Env): Promise<string> {
if (env?.["RUSTC_TOOLCHAIN"]) {
return Promise.resolve("cargo");
}

2
josh-sync.toml Normal file
View File

@ -0,0 +1,2 @@
repo = "rust-analyzer"
filter = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer"

View File

@ -1 +1 @@
e05ab47e6c418fb2b9faa2eae9a7e70c65c98eaa
733dab558992d902d6d17576de1da768094e2cf3

View File

@ -17,6 +17,7 @@ exclude_titles = [ # exclude syncs from subtree in rust-lang/rust
"sync from downstream",
"Sync from rust",
"sync from rust",
"Rustc pull update",
]
labels = ["has-merge-commits", "S-waiting-on-author"]
@ -27,3 +28,6 @@ labels = ["has-merge-commits", "S-waiting-on-author"]
# Prevents mentions in commits to avoid users being spammed
[no-mentions]
# Automatically close and reopen PRs made by bots to run CI on them
[bot-pull-requests]

View File

@ -8,7 +8,6 @@ rust-version.workspace = true
[dependencies]
anyhow.workspace = true
directories = "6.0"
flate2 = "1.1.2"
write-json = "0.1.4"
xshell.workspace = true

View File

@ -59,20 +59,6 @@ xflags::xflags! {
optional --dry-run
}
cmd rustc-pull {
/// rustc commit to pull.
optional --commit refspec: String
}
cmd rustc-push {
/// rust local path, e.g. `../rust-rust-analyzer`.
required --rust-path rust_path: String
/// rust fork name, e.g. `matklad/rust`.
required --rust-fork rust_fork: String
/// branch name.
optional --branch branch: String
}
cmd dist {
/// Use mimalloc allocator for server
optional --mimalloc
@ -121,8 +107,6 @@ pub enum XtaskCmd {
Install(Install),
FuzzTests(FuzzTests),
Release(Release),
RustcPull(RustcPull),
RustcPush(RustcPush),
Dist(Dist),
PublishReleaseNotes(PublishReleaseNotes),
Metrics(Metrics),
@ -151,18 +135,6 @@ pub struct Release {
pub dry_run: bool,
}
#[derive(Debug)]
pub struct RustcPull {
pub commit: Option<String>,
}
#[derive(Debug)]
pub struct RustcPush {
pub rust_path: String,
pub rust_fork: String,
pub branch: Option<String>,
}
#[derive(Debug)]
pub struct Dist {
pub mimalloc: bool,

View File

@ -42,8 +42,6 @@ fn main() -> anyhow::Result<()> {
flags::XtaskCmd::Install(cmd) => cmd.run(sh),
flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh),
flags::XtaskCmd::Release(cmd) => cmd.run(sh),
flags::XtaskCmd::RustcPull(cmd) => cmd.run(sh),
flags::XtaskCmd::RustcPush(cmd) => cmd.run(sh),
flags::XtaskCmd::Dist(cmd) => cmd.run(sh),
flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh),
flags::XtaskCmd::Metrics(cmd) => cmd.run(sh),

View File

@ -1,12 +1,5 @@
mod changelog;
use std::process::{Command, Stdio};
use std::thread;
use std::time::Duration;
use anyhow::{Context as _, bail};
use directories::ProjectDirs;
use stdx::JodChild;
use xshell::{Shell, cmd};
use crate::{date_iso, flags, is_release_tag, project_root};
@ -59,171 +52,3 @@ impl flags::Release {
Ok(())
}
}
// git sync implementation adapted from https://github.com/rust-lang/miri/blob/62039ac/miri-script/src/commands.rs
impl flags::RustcPull {
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
sh.change_dir(project_root());
let commit = self.commit.map(Result::Ok).unwrap_or_else(|| {
let rust_repo_head =
cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?;
rust_repo_head
.split_whitespace()
.next()
.map(|front| front.trim().to_owned())
.ok_or_else(|| anyhow::format_err!("Could not obtain Rust repo HEAD from remote."))
})?;
// Make sure the repo is clean.
if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() {
bail!("working directory must be clean before running `cargo xtask pull`");
}
// This should not add any new root commits. So count those before and after merging.
let num_roots = || -> anyhow::Result<u32> {
Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count")
.read()
.context("failed to determine the number of root commits")?
.parse::<u32>()?)
};
let num_roots_before = num_roots()?;
// Make sure josh is running.
let josh = start_josh()?;
// Update rust-version file. As a separate commit, since making it part of
// the merge has confused the heck out of josh in the past.
// We pass `--no-verify` to avoid running any git hooks that might exist,
// in case they dirty the repository.
sh.write_file("rust-version", format!("{commit}\n"))?;
const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rust-lang/rust";
cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}")
.run()
.context("FAILED to commit rust-version file, something went wrong")?;
// Fetch given rustc commit.
cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git")
.run()
.inspect_err(|_| {
// Try to un-do the previous `git commit`, to leave the repo in the state we found it it.
cmd!(sh, "git reset --hard HEAD^")
.run()
.expect("FAILED to clean up again after failed `git fetch`, sorry for that");
})
.context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;
// Merge the fetched commit.
const MERGE_COMMIT_MESSAGE: &str = "Merge from rust-lang/rust";
cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}")
.run()
.context("FAILED to merge new commits, something went wrong")?;
// Check that the number of roots did not increase.
if num_roots()? != num_roots_before {
bail!("Josh created a new root commit. This is probably not the history you want.");
}
drop(josh);
Ok(())
}
}
impl flags::RustcPush {
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
let branch = self.branch.as_deref().unwrap_or("sync-from-ra");
let rust_path = self.rust_path;
let rust_fork = self.rust_fork;
sh.change_dir(project_root());
let base = sh.read_file("rust-version")?.trim().to_owned();
// Make sure the repo is clean.
if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() {
bail!("working directory must be clean before running `cargo xtask push`");
}
// Make sure josh is running.
let josh = start_josh()?;
// Find a repo we can do our preparation in.
sh.change_dir(rust_path);
// Prepare the branch. Pushing works much better if we use as base exactly
// the commit that we pulled from last time, so we use the `rust-version`
// file to find out which commit that would be.
println!("Preparing {rust_fork} (base: {base})...");
if cmd!(sh, "git fetch https://github.com/{rust_fork} {branch}")
.ignore_stderr()
.read()
.is_ok()
{
bail!(
"The branch `{branch}` seems to already exist in `https://github.com/{rust_fork}`. Please delete it and try again."
);
}
cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?;
cmd!(sh, "git push https://github.com/{rust_fork} {base}:refs/heads/{branch}")
.ignore_stdout()
.ignore_stderr() // silence the "create GitHub PR" message
.run()?;
println!();
// Do the actual push.
sh.change_dir(project_root());
println!("Pushing rust-analyzer changes...");
cmd!(
sh,
"git push http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git HEAD:{branch}"
)
.run()?;
println!();
// Do a round-trip check to make sure the push worked as expected.
cmd!(
sh,
"git fetch http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git {branch}"
)
.ignore_stderr()
.read()?;
let head = cmd!(sh, "git rev-parse HEAD").read()?;
let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?;
if head != fetch_head {
bail!(
"Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\
Expected {head}, got {fetch_head}."
);
}
println!(
"Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:"
);
// https://github.com/github-linguist/linguist/compare/master...octocat:linguist:master
let fork_path = rust_fork.replace('/', ":");
println!(
" https://github.com/rust-lang/rust/compare/{fork_path}:{branch}?quick_pull=1&title=Subtree+update+of+rust-analyzer&body=r?+@ghost"
);
drop(josh);
Ok(())
}
}
/// Used for rustc syncs.
const JOSH_FILTER: &str = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer";
const JOSH_PORT: &str = "42042";
fn start_josh() -> anyhow::Result<impl Drop> {
// Determine cache directory.
let local_dir = {
let user_dirs = ProjectDirs::from("org", "rust-lang", "rust-analyzer-josh").unwrap();
user_dirs.cache_dir().to_owned()
};
// Start josh, silencing its output.
let mut cmd = Command::new("josh-proxy");
cmd.arg("--local").arg(local_dir);
cmd.arg("--remote").arg("https://github.com");
cmd.arg("--port").arg(JOSH_PORT);
cmd.arg("--no-background");
cmd.stdout(Stdio::null());
cmd.stderr(Stdio::null());
let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?;
// Give it some time so hopefully the port is open. (100ms was not enough.)
thread::sleep(Duration::from_millis(200));
Ok(JodChild(josh))
}