Auto merge of #137523 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2025-02-24 13:45:40 +00:00
commit d6d6d19841
69 changed files with 724 additions and 504 deletions

View File

@ -34,14 +34,14 @@ jobs:
- os: windows-latest
target: aarch64-pc-windows-msvc
code-target: win32-arm64
- os: ubuntu-20.04
- os: ubuntu-latest
target: x86_64-unknown-linux-gnu
code-target: linux-x64
container: rockylinux:8
- os: ubuntu-20.04
- os: ubuntu-latest
target: aarch64-unknown-linux-gnu
code-target: linux-arm64
- os: ubuntu-20.04
- os: ubuntu-latest
target: arm-unknown-linux-gnueabihf
code-target: linux-armhf
- os: macos-13

View File

@ -4,7 +4,7 @@ Thank you for your interest in contributing to rust-analyzer! There are many way
and we appreciate all of them.
To get a quick overview of the crates and structure of the project take a look at the
[./docs/dev](./docs/dev) folder.
[Contributing](https://rust-analyzer.github.io/book/contributing) section of the manual.
If you have any questions please ask them in the [rust-analyzer zulip stream](
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer) or if unsure where

53
Cargo.lock generated
View File

@ -559,9 +559,9 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"ra-ap-rustc_abi",
"ra-ap-rustc_hashes",
"ra-ap-rustc_parse_format",
"rustc-hash 2.0.0",
"rustc_apfloat",
@ -591,7 +591,6 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"parser",
"rustc-hash 2.0.0",
@ -626,11 +625,11 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"nohash-hasher",
"oorandom",
"project-model",
"ra-ap-rustc_abi",
"ra-ap-rustc_hashes",
"ra-ap-rustc_index",
"ra-ap-rustc_pattern_analysis",
"rustc-hash 2.0.0",
@ -744,7 +743,6 @@ dependencies = [
"hir",
"indexmap",
"itertools",
"limit",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
@ -943,10 +941,6 @@ dependencies = [
"redox_syscall",
]
[[package]]
name = "limit"
version = "0.0.0"
[[package]]
name = "line-index"
version = "0.1.2"
@ -1279,7 +1273,6 @@ dependencies = [
"drop_bomb",
"edition",
"expect-test",
"limit",
"ra-ap-rustc_lexer",
"stdx",
"tracing",
@ -1514,20 +1507,30 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.95.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b40c4e339b71a8f075a829b1acaf32f870a11b466d9b8623d50b0ce33e65af95"
checksum = "3829c3355d1681ffeaf1450ec71edcdace6820fe2e86469d8fc1ad45e2c96460"
dependencies = [
"bitflags 2.7.0",
"ra-ap-rustc_hashes",
"ra-ap-rustc_index",
"tracing",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.95.0"
name = "ra-ap-rustc_hashes"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "872072e2ba11d11147ebe9fde1608fe7f7d9b5c51dac524af28ee07c6dade468"
checksum = "1bd4d6d4c434bec08e02370a4f64a4985312097215a62e82d0f757f3a98e502e"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bad6fc4bd7522e31096e2de5b0351144fe0684b608791ee26c842bf2da1b19ae"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@ -1535,9 +1538,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.95.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffcd77debcaf2ad690a57c2d041c11eb33fe66869754b2c5f35c52954b46af0c"
checksum = "cfb234e1f84b92be45276c3025bee18789e9bc95bec8789bec961e78edb01c52"
dependencies = [
"proc-macro2",
"quote",
@ -1546,9 +1549,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.95.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49265cdf8823f8d246e476c79c60bd6e5b551c81ae76e1c8d6a5e0dc73df0bca"
checksum = "7a3a40bd11dc43d1cb110e730b80620cf8102f4cca8920a02b65954da0ed931f"
dependencies = [
"memchr",
"unicode-properties",
@ -1557,9 +1560,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.95.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3da239fdc971176de0db45cb631d71475b52033a3d0027d91964da7be89eee6"
checksum = "5feb877478994cb4c0c0c7a5116a352eefc0634aefc8636feb00a893fa5b7135"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@ -1567,9 +1570,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.95.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56057d08fdfa0d95494e461bbdd5d4b3fdb349cca6be05ad7759bc964be1b8d4"
checksum = "a76774d35934d464c4115908cde16f76a4f7e540fe1eea6b79336c556e37bdd3"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.0.0",
@ -1744,6 +1747,12 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
[[package]]
name = "rustc-stable-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1"
[[package]]
name = "rustc_apfloat"
version = "0.2.1+llvm-462a31f5a5ab"

View File

@ -64,7 +64,6 @@ ide-db = { path = "./crates/ide-db", version = "0.0.0" }
ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
intern = { path = "./crates/intern", version = "0.0.0" }
limit = { path = "./crates/limit", version = "0.0.0" }
load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
mbe = { path = "./crates/mbe", version = "0.0.0" }
parser = { path = "./crates/parser", version = "0.0.0" }
@ -87,11 +86,12 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.95", default-features = false }
ra-ap-rustc_parse_format = { version = "0.95", default-features = false }
ra-ap-rustc_index = { version = "0.95", default-features = false }
ra-ap-rustc_abi = { version = "0.95", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.95", default-features = false }
ra-ap-rustc_hashes = { version = "0.97", default-features = false }
ra-ap-rustc_lexer = { version = "0.97", default-features = false }
ra-ap-rustc_parse_format = { version = "0.97", default-features = false }
ra-ap-rustc_index = { version = "0.97", default-features = false }
ra-ap-rustc_abi = { version = "0.97", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.97", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.

View File

@ -14,8 +14,8 @@ https://rust-analyzer.github.io/book/installation.html
## Documentation
If you want to **contribute** to rust-analyzer check out the [CONTRIBUTING.md](./CONTRIBUTING.md) or
if you are just curious about how things work under the hood, check the [./docs/dev](./docs/dev)
folder.
if you are just curious about how things work under the hood, see the
[Contributing](https://rust-analyzer.github.io/book/contributing) section of the manual.
If you want to **use** rust-analyzer's language server with your editor of
choice, check [the manual](https://rust-analyzer.github.io/book/).

View File

@ -296,6 +296,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
/// The working directory to run proc-macros in. This is the workspace root of the cargo workspace
/// for workspace members, the crate manifest dir otherwise.
pub proc_macro_cwd: Option<AbsPathBuf>,
}
#[derive(Default, Clone, PartialEq, Eq)]
@ -360,8 +363,9 @@ impl CrateGraph {
cfg_options: Arc<CfgOptions>,
potential_cfg_options: Option<Arc<CfgOptions>>,
mut env: Env,
is_proc_macro: bool,
origin: CrateOrigin,
is_proc_macro: bool,
proc_macro_cwd: Option<AbsPathBuf>,
) -> CrateId {
env.entries.shrink_to_fit();
let data = CrateData {
@ -375,6 +379,7 @@ impl CrateGraph {
dependencies: Vec::new(),
origin,
is_proc_macro,
proc_macro_cwd,
};
self.arena.alloc(data)
}
@ -698,8 +703,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -709,8 +715,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
let crate3 = graph.add_crate_root(
FileId::from_raw(3u32),
@ -720,8 +727,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
@ -745,8 +753,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -756,8 +765,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
@ -778,8 +788,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -789,8 +800,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
let crate3 = graph.add_crate_root(
FileId::from_raw(3u32),
@ -800,8 +812,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
@ -822,8 +835,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -833,8 +847,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
assert!(graph
.add_dep(

View File

@ -10,7 +10,7 @@ use rustc_hash::FxHashMap;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
use vfs::{AbsPathBuf, FileId};
use vfs::FileId;
pub use crate::{
change::FileChange,
@ -85,8 +85,6 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// Crate related data shared by the whole workspace.
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct CrateWorkspaceData {
/// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
pub proc_macro_cwd: Option<AbsPathBuf>,
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
pub data_layout: TargetLayoutLoadResult,
/// Toolchain version used to compile the crate.

View File

@ -31,6 +31,7 @@ triomphe.workspace = true
rustc_apfloat = "0.2.0"
text-size.workspace = true
ra-ap-rustc_hashes.workspace = true
ra-ap-rustc_parse_format.workspace = true
ra-ap-rustc_abi.workspace = true
@ -43,7 +44,6 @@ hir-expand.workspace = true
mbe.workspace = true
cfg.workspace = true
tt.workspace = true
limit.workspace = true
span.workspace = true

View File

@ -173,7 +173,13 @@ fn parse_repr_tt(tt: &TopSubtree) -> Option<ReprOptions> {
}
}
Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: Hash64::ZERO })
Some(ReprOptions {
int,
align: max_align,
pack: min_pack,
flags,
field_shuffle_seed: Hash64::ZERO,
})
}
impl StructData {

View File

@ -9,7 +9,6 @@ use hir_expand::{
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
};
use limit::Limit;
use span::{Edition, SyntaxContextId};
use syntax::{ast, Parse};
use triomphe::Arc;
@ -28,18 +27,18 @@ pub struct Expander {
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
recursion_depth: u32,
recursion_limit: Limit,
recursion_limit: usize,
}
impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
let recursion_limit = module.def_map(db).recursion_limit() as usize;
let recursion_limit = Limit::new(if cfg!(test) {
let recursion_limit = if cfg!(test) {
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
std::cmp::min(32, recursion_limit)
} else {
recursion_limit
});
};
Expander {
current_file_id,
module,
@ -194,7 +193,7 @@ impl Expander {
let Some(call_id) = value else {
return ExpandResult { value: None, err };
};
if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
if self.recursion_depth as usize > self.recursion_limit {
self.recursion_depth = u32::MAX;
cov_mark::hit!(your_stack_belongs_to_me);
return ExpandResult::only_err(ExpandError::new(

View File

@ -19,7 +19,6 @@ use hir_expand::{
use intern::{sym, Interned};
use itertools::{izip, Itertools};
use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId};
use syntax::ast;
@ -55,8 +54,8 @@ use crate::{
UnresolvedMacro, UseId, UseLoc,
};
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
const GLOB_RECURSION_LIMIT: usize = 100;
const FIXED_POINT_LIMIT: usize = 8192;
pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
let crate_graph = db.crate_graph();
@ -393,7 +392,7 @@ impl DefCollector<'_> {
}
i += 1;
if FIXED_POINT_LIMIT.check(i).is_err() {
if i > FIXED_POINT_LIMIT {
tracing::error!("name resolution is stuck");
break 'resolve_attr;
}
@ -993,7 +992,7 @@ impl DefCollector<'_> {
import: Option<ImportOrExternCrate>,
depth: usize,
) {
if GLOB_RECURSION_LIMIT.check(depth).is_err() {
if depth > GLOB_RECURSION_LIMIT {
// prevent stack overflows (but this shouldn't be possible)
panic!("infinite recursion in glob imports!");
}
@ -1470,8 +1469,7 @@ impl DefCollector<'_> {
depth: usize,
container: ItemContainerId,
) {
let recursion_limit = Limit::new(self.def_map.recursion_limit() as usize);
if recursion_limit.check(depth).is_err() {
if depth > self.def_map.recursion_limit() as usize {
cov_mark::hit!(macro_expansion_overflow);
tracing::warn!("macro expansion is too deep");
return;
@ -1499,7 +1497,6 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros.
let _p = tracing::info_span!("DefCollector::finish").entered();
for directive in &self.unresolved_macros {

View File

@ -2,12 +2,11 @@
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use hir_expand::{name::Name, HirFileIdExt};
use limit::Limit;
use span::EditionedFileId;
use crate::{db::DefDatabase, HirFileId};
static MOD_DEPTH_LIMIT: Limit = Limit::new(32);
const MOD_DEPTH_LIMIT: usize = 32;
#[derive(Clone, Debug)]
pub(super) struct ModDir {
@ -50,7 +49,7 @@ impl ModDir {
fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
let depth = self.depth + 1;
if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
if depth as usize > MOD_DEPTH_LIMIT {
tracing::error!("MOD_DEPTH_LIMIT exceeded");
cov_mark::hit!(circular_mods);
return None;

View File

@ -31,7 +31,6 @@ cfg.workspace = true
syntax.workspace = true
tt.workspace = true
mbe.workspace = true
limit.workspace = true
span.workspace = true
parser.workspace = true
syntax-bridge.workspace = true

View File

@ -833,7 +833,7 @@ fn env_expand(
if key.as_str() == "OUT_DIR" {
err = Some(ExpandError::other(
span,
r#"`OUT_DIR` not set, enable "build scripts" to fix"#,
r#"`OUT_DIR` not set, build scripts may have failed to run"#,
));
}

View File

@ -2,7 +2,6 @@
use base_db::{ra_salsa, CrateId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
@ -35,7 +34,7 @@ type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
/// an error will be emitted.
///
/// Actual max for `analysis-stats .` at some point: 30672.
static TOKEN_LIMIT: Limit = Limit::new(2_097_152);
const TOKEN_LIMIT: usize = 2_097_152;
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
@ -740,20 +739,19 @@ pub(crate) fn token_tree_to_syntax_node(
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
let tt = tt.top_subtree();
let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() {
if count <= TOKEN_LIMIT {
Ok(())
} else {
Err(ExpandResult {
value: (),
err: Some(ExpandError::other(
tt.delimiter.open,
format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
count,
TOKEN_LIMIT.inner(),
count, TOKEN_LIMIT,
),
)),
})
} else {
Ok(())
}
}

View File

@ -238,6 +238,9 @@ impl CustomProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
let current_dir =
krate_graph[calling_crate].proc_macro_cwd.as_deref().map(ToString::to_string);
match proc_macro.expander.expand(
tt,
attr_arg,
@ -245,10 +248,7 @@ impl CustomProcMacroExpander {
def_site,
call_site,
mixed_site,
db.crate_workspace_data()[&calling_crate]
.proc_macro_cwd
.as_ref()
.map(ToString::to_string),
current_dir,
) {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {

View File

@ -36,6 +36,7 @@ indexmap.workspace = true
rustc_apfloat = "0.2.0"
ra-ap-rustc_abi.workspace = true
ra-ap-rustc_hashes.workspace = true
ra-ap-rustc_index.workspace = true
ra-ap-rustc_pattern_analysis.workspace = true
@ -47,7 +48,6 @@ hir-def.workspace = true
hir-expand.workspace = true
base-db.workspace = true
syntax.workspace = true
limit.workspace = true
span.workspace = true
[dev-dependencies]

View File

@ -9,7 +9,6 @@ use chalk_ir::cast::Cast;
use hir_def::lang_item::LangItem;
use hir_expand::name::Name;
use intern::sym;
use limit::Limit;
use triomphe::Arc;
use crate::{
@ -17,7 +16,7 @@ use crate::{
TraitEnvironment, Ty, TyBuilder, TyKind,
};
static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(20);
const AUTODEREF_RECURSION_LIMIT: usize = 20;
#[derive(Debug)]
pub(crate) enum AutoderefKind {
@ -140,7 +139,7 @@ impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
return Some((self.ty.clone(), 0));
}
if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
if self.steps.len() > AUTODEREF_RECURSION_LIMIT {
return None;
}

View File

@ -768,21 +768,23 @@ pub(crate) fn adt_datum_query(
phantom_data,
};
let variant_id_to_fields = |id: VariantId| {
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
let _variant_id_to_fields = |id: VariantId| {
let variant_data = &id.variant_data(db.upcast());
let fields = if variant_data.fields().is_empty() || bound_vars_subst.is_empty(Interner) {
let fields = if variant_data.fields().is_empty() {
vec![]
} else {
// HACK: provide full struct type info slows down rust-analyzer by quite a bit unfortunately,
// so we trick chalk into thinking that our struct impl Unsize
if let Some(ty) = bound_vars_subst.at(Interner, 0).ty(Interner) {
vec![ty.clone()]
} else {
vec![]
}
let field_types = db.field_types(id);
variant_data
.fields()
.iter()
.map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst))
.filter(|it| !it.contains_unknown())
.collect()
};
rust_ir::AdtVariantDatum { fields }
};
let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] };
let (kind, variants) = match adt_id {
hir_def::AdtId::StructId(id) => {

View File

@ -14,8 +14,8 @@ use hir_def::{
};
use la_arena::{Idx, RawIdx};
use rustc_abi::AddressSpace;
use rustc_index::{IndexSlice, IndexVec};
use rustc_hashes::Hash64;
use rustc_index::{IndexSlice, IndexVec};
use triomphe::Arc;

View File

@ -10,7 +10,7 @@ use hir_def::{
generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments},
resolver::{ResolveValueResult, TypeNs, ValueNs},
type_ref::{TypeBound, TypeRef},
type_ref::{TypeBound, TypeRef, TypesMap},
GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId,
};
use smallvec::SmallVec;
@ -838,15 +838,21 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
(_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
// Find the generic index for the target of our `bound`
let target_param_idx =
self.ctx.resolver.where_predicates_in_scope().find_map(|(p, _)| {
match p {
self.ctx.resolver.where_predicates_in_scope().find_map(
|(p, (_, types_map))| match p {
WherePredicate::TypeBound {
target: WherePredicateTypeTarget::TypeOrConstParam(idx),
bound: b,
} if b == bound => Some(idx),
} if std::ptr::eq::<TypesMap>(
self.ctx.types_map,
types_map,
) && bound == b =>
{
Some(idx)
}
_ => None,
}
});
},
);
let ty = if let Some(target_param_idx) = target_param_idx {
let mut counter = 0;
let generics = self.ctx.generics().expect("generics in scope");

View File

@ -3,7 +3,7 @@ use span::{Edition, EditionedFileId};
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
use crate::{db::HirDatabase, mir::MirLowerError, test_db::TestDB, Interner, Substitution};
use super::{interpret_mir, MirEvalError};
@ -84,6 +84,16 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic:
assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
}
fn check_error_with(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expect_err: impl FnOnce(MirEvalError) -> bool,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();
assert!(expect_err(e));
}
#[test]
fn function_with_extern_c_abi() {
check_pass(
@ -945,3 +955,27 @@ fn main() {
"#,
);
}
#[test]
fn regression_19177() {
check_error_with(
r#"
//- minicore: copy
trait Foo {}
trait Bar {}
trait Baz {}
trait Qux {
type Assoc;
}
fn main<'a, T: Foo + Bar + Baz>(
x: &T,
y: (),
z: &'a dyn Qux<Assoc = T>,
w: impl Foo + Bar,
) {
}
"#,
|e| matches!(e, MirEvalError::MirLowerError(_, MirLowerError::GenericArgNotProvided(..))),
);
}

View File

@ -535,7 +535,7 @@ fn test() {
#[test]
fn coerce_unsize_generic() {
check_no_mismatches(
check(
r#"
//- minicore: coerce_unsized
struct Foo<T> { t: T };
@ -543,7 +543,9 @@ struct Bar<T>(Foo<T>);
fn test() {
let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
//^^^^^^^^^^^^^^^^^^^^^ expected &'? Foo<[usize]>, got &'? Foo<[i32; 3]>
let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
//^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &'? Bar<[usize]>, got &'? Bar<[i32; 3]>
}
"#,
);
@ -955,24 +957,3 @@ fn f() {
"#,
);
}
#[test]
fn coerce_nested_unsized_struct() {
check_types(
r#"
//- minicore: fn, coerce_unsized, dispatch_from_dyn, sized
use core::marker::Unsize;
struct Foo<T: ?Sized>(T);
fn need(_: &Foo<dyn Fn(i32) -> i32>) {
}
fn test() {
let callback = |x| x;
//^ i32
need(&Foo(callback));
}
"#,
)
}

View File

@ -4694,21 +4694,21 @@ fn f<T: Send, U>() {
Struct::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^Yes
Struct::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^{unknown}
//^^^^^^^^^^^^^^^^^^^^Yes
Struct::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
Enum::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^Yes
Enum::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^{unknown}
//^^^^^^^^^^^^^^^^^^Yes
Enum::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
Union::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^Yes
Union::<U>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^{unknown}
//^^^^^^^^^^^^^^^^^^^Yes
Union::<*const T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
//^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
PhantomData::<T>::IS_SEND;
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
PhantomData::<U>::IS_SEND;

View File

@ -20,6 +20,7 @@ pub struct AssistConfig {
pub assist_emit_must_use: bool,
pub term_search_fuel: u64,
pub term_search_borrowck: bool,
pub code_action_grouping: bool,
}
impl AssistConfig {

View File

@ -48,6 +48,10 @@ use crate::{
// }
// ```
pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
if !ctx.config.code_action_grouping {
return None;
}
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let strukt_name = strukt.name()?;
let current_module = ctx.sema.scope(strukt.syntax())?.module();
@ -213,7 +217,9 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use crate::tests::{
check_assist, check_assist_not_applicable, check_assist_not_applicable_no_grouping,
};
use super::*;
@ -717,4 +723,21 @@ impl Person {
"#,
);
}
#[test]
fn delegate_method_skipped_when_no_grouping() {
check_assist_not_applicable_no_grouping(
generate_delegate_methods,
r#"
struct Age(u8);
impl Age {
fn age(&self) -> u8 {
self.0
}
}
struct Person {
ag$0e: Age,
}"#,
);
}
}

View File

@ -88,6 +88,10 @@ use syntax::{
// }
// ```
pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
if !ctx.config.code_action_grouping {
return None;
}
let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
@ -788,7 +792,9 @@ fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Pat
mod test {
use super::*;
use crate::tests::{check_assist, check_assist_not_applicable};
use crate::tests::{
check_assist, check_assist_not_applicable, check_assist_not_applicable_no_grouping,
};
#[test]
fn test_tuple_struct_basic() {
@ -1836,4 +1842,33 @@ impl<D, T: C<A>> C<D> for B<T> {
"#,
)
}
#[test]
fn delegate_trait_skipped_when_no_grouping() {
check_assist_not_applicable_no_grouping(
generate_delegate_trait,
r#"
trait SomeTrait {
type T;
fn fn_(arg: u32) -> u32;
fn method_(&mut self) -> bool;
}
struct A;
impl SomeTrait for A {
type T = u32;
fn fn_(arg: u32) -> u32 {
42
}
fn method_(&mut self) -> bool {
false
}
}
struct B {
a$0 : A,
}
"#,
);
}
}

View File

@ -34,6 +34,26 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
code_action_grouping: true,
};
pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
snippet_cap: SnippetCap::new(true),
allowed: None,
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: hir::PrefixKind::Plain,
enforce_granularity: true,
group: true,
skip_glob_imports: true,
},
prefer_no_std: false,
prefer_prelude: true,
prefer_absolute: false,
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
code_action_grouping: false,
};
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
@ -52,6 +72,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
code_action_grouping: true,
};
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
@ -70,6 +91,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
code_action_grouping: true,
};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
@ -173,6 +195,20 @@ pub(crate) fn check_assist_not_applicable_for_import_one(
);
}
#[track_caller]
pub(crate) fn check_assist_not_applicable_no_grouping(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) {
check_with_config(
TEST_CONFIG_NO_GROUPING,
assist,
ra_fixture,
ExpectedResult::NotApplicable,
None,
);
}
/// Check assist in unresolved state. Useful to check assists for lazy computation.
#[track_caller]
pub(crate) fn check_assist_unresolved(

View File

@ -30,7 +30,6 @@ bitflags.workspace = true
# local deps
base-db.workspace = true
limit.workspace = true
parser.workspace = true
profile.workspace = true
stdx.workspace = true

View File

@ -357,7 +357,7 @@ fn path_applicable_imports(
let mod_path = mod_path(item)?;
Some(LocatedImport::new(mod_path, item, item))
})
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect()
}
// we have some unresolved qualifier that we search an import for
@ -383,7 +383,7 @@ fn path_applicable_imports(
qualifier_rest,
)
})
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect(),
}
}

View File

@ -6,7 +6,6 @@ use std::ops::ControlFlow;
use either::Either;
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
use limit::Limit;
use crate::{
imports::import_assets::NameToImport,
@ -15,7 +14,7 @@ use crate::{
};
/// A value to use, when uncertain which limit to pick.
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
pub const DEFAULT_QUERY_SEARCH_LIMIT: usize = 100;
pub use import_map::AssocSearchMode;

View File

@ -133,7 +133,7 @@ macro_rules! env { () => {} }
macro_rules! concat { () => {} }
include!(concat!(env!("OUT_DIR"), "/out.rs"));
//^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
//^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
"#,
);
}
@ -186,7 +186,7 @@ fn main() {
//^^^^^^^ error: expected string literal
env!("OUT_DIR");
//^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
//^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
compile_error!("compile_error works");
//^^^^^^^^^^^^^ error: compile_error works

View File

@ -252,14 +252,14 @@ impl Analysis {
Arc::new(cfg_options),
None,
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
change.change_file(file_id, Some(text));
let ws_data = crate_graph
.iter()
.zip(iter::repeat(Arc::new(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: Err("fixture has no layout".into()),
toolchain: None,
})))

View File

@ -68,6 +68,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
dependencies,
origin,
is_proc_macro,
proc_macro_cwd,
} = &crate_graph[crate_id];
format_to!(
buf,
@ -85,6 +86,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
format_to!(buf, " Env: {:?}\n", env);
format_to!(buf, " Origin: {:?}\n", origin);
format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro);
format_to!(buf, " Proc macro cwd: {:?}\n", proc_macro_cwd);
let deps = dependencies
.iter()
.map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))

View File

@ -1,16 +0,0 @@
[package]
name = "limit"
version = "0.0.0"
repository.workspace = true
description = "A struct to enforce limits for rust-analyzer."
authors.workspace = true
edition.workspace = true
license.workspace = true
rust-version.workspace = true
[features]
tracking = []
[lints]
workspace = true

View File

@ -1,67 +0,0 @@
//! limit defines a struct to enforce limits.
#[cfg(feature = "tracking")]
use std::sync::atomic::AtomicUsize;
/// Represents a struct used to enforce a numerical limit.
#[derive(Debug)]
pub struct Limit {
upper_bound: usize,
#[cfg(feature = "tracking")]
max: AtomicUsize,
}
impl Limit {
/// Creates a new limit.
#[inline]
pub const fn new(upper_bound: usize) -> Self {
Self {
upper_bound,
#[cfg(feature = "tracking")]
max: AtomicUsize::new(0),
}
}
/// Creates a new limit.
#[inline]
#[cfg(feature = "tracking")]
pub const fn new_tracking(upper_bound: usize) -> Self {
Self {
upper_bound,
#[cfg(feature = "tracking")]
max: AtomicUsize::new(1),
}
}
/// Gets the underlying numeric limit.
#[inline]
pub const fn inner(&self) -> usize {
self.upper_bound
}
/// Checks whether the given value is below the limit.
/// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
#[inline]
pub fn check(&self, other: usize) -> Result<(), ()> {
if other > self.upper_bound {
Err(())
} else {
#[cfg(feature = "tracking")]
loop {
use std::sync::atomic::Ordering;
let old_max = self.max.load(Ordering::Relaxed);
if other <= old_max || old_max == 0 {
break;
}
_ = self.max.compare_exchange_weak(
old_max,
other,
Ordering::Relaxed,
Ordering::Relaxed,
);
}
Ok(())
}
}
}

View File

@ -456,7 +456,6 @@ fn load_crate_graph(
let ws_data = crate_graph
.iter()
.zip(iter::repeat(From::from(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: target_layout.clone(),
toolchain: toolchain.clone(),
})))

View File

@ -15,7 +15,6 @@ doctest = false
[dependencies]
drop_bomb = "0.1.5"
ra-ap-rustc_lexer.workspace = true
limit.workspace = true
tracing = { workspace = true, optional = true }
edition.workspace = true

View File

@ -3,7 +3,6 @@
use std::cell::Cell;
use drop_bomb::DropBomb;
use limit::Limit;
use crate::{
event::Event,
@ -30,7 +29,7 @@ pub(crate) struct Parser<'t> {
edition: Edition,
}
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
const PARSER_STEP_LIMIT: usize = 15_000_000;
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
@ -54,7 +53,7 @@ impl<'t> Parser<'t> {
assert!(n <= 3);
let steps = self.steps.get();
assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
assert!((steps as usize) < PARSER_STEP_LIMIT, "the parser seems stuck");
self.steps.set(steps + 1);
self.inp.kind(self.pos + n)

View File

@ -721,16 +721,16 @@ mod err {
#[test]
fn bad_asm_expr() { run_and_expect_errors("test_data/parser/inline/err/bad_asm_expr.rs"); }
#[test]
fn comma_after_default_values_syntax() {
run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs");
}
#[test]
fn comma_after_functional_update_syntax() {
run_and_expect_errors(
"test_data/parser/inline/err/comma_after_functional_update_syntax.rs",
);
}
#[test]
fn comma_after_default_values_syntax() {
run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs");
}
#[test]
fn crate_visibility_empty_recover() {
run_and_expect_errors("test_data/parser/inline/err/crate_visibility_empty_recover.rs");
}

View File

@ -164,6 +164,7 @@ impl ProjectJson {
is_proc_macro: crate_data.is_proc_macro,
repository: crate_data.repository,
build,
proc_macro_cwd: crate_data.proc_macro_cwd.map(absolutize_on_base),
}
})
.collect(),
@ -240,6 +241,8 @@ pub struct Crate {
pub(crate) include: Vec<AbsPathBuf>,
pub(crate) exclude: Vec<AbsPathBuf>,
pub(crate) is_proc_macro: bool,
/// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
pub(crate) proc_macro_cwd: Option<AbsPathBuf>,
pub(crate) repository: Option<String>,
pub build: Option<Build>,
}
@ -362,6 +365,8 @@ struct CrateData {
repository: Option<String>,
#[serde(default)]
build: Option<BuildData>,
#[serde(default)]
proc_macro_cwd: Option<Utf8PathBuf>,
}
mod cfg_ {

View File

@ -312,8 +312,8 @@ impl Sysroot {
RustLibSrcWorkspace::Empty => true,
};
if !has_core {
let var_note = if env::var_os("rust_lib_src_PATH").is_some() {
" (env var `rust_lib_src_PATH` is set and may be incorrect, try unsetting it)"
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
} else {
", try running `rustup component add rust-src` to possibly fix this"
};
@ -422,18 +422,16 @@ fn discover_sysroot_dir(
}
fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
if let Ok(path) = env::var("rust_lib_src_PATH") {
if let Ok(path) = env::var("RUST_SRC_PATH") {
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
let core = path.join("core");
if fs::metadata(&core).is_ok() {
tracing::debug!("Discovered sysroot by rust_lib_src_PATH: {path}");
tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}");
return Some(path);
}
tracing::debug!(
"rust_lib_src_PATH is set, but is invalid (no core: {core:?}), ignoring"
);
tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring");
} else {
tracing::debug!("rust_lib_src_PATH is set, but is invalid, ignoring");
tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
}
}

View File

@ -958,6 +958,7 @@ fn project_json_to_crate_graph(
is_proc_macro,
repository,
is_workspace_member,
proc_macro_cwd,
..
},
file_id,
@ -1005,7 +1006,6 @@ fn project_json_to_crate_graph(
Arc::new(cfg_options),
None,
env,
*is_proc_macro,
if let Some(name) = display_name.clone() {
CrateOrigin::Local {
repo: repository.clone(),
@ -1014,6 +1014,8 @@ fn project_json_to_crate_graph(
} else {
CrateOrigin::Local { repo: None, name: None }
},
*is_proc_macro,
proc_macro_cwd.clone(),
);
debug!(
?crate_graph_crate_id,
@ -1283,11 +1285,12 @@ fn detached_file_to_crate_graph(
cfg_options.clone(),
None,
Env::default(),
false,
CrateOrigin::Local {
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
false,
None,
);
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
@ -1448,8 +1451,13 @@ fn add_target_crate_root(
Arc::new(cfg_options),
potential_cfg_options.map(Arc::new),
env,
matches!(kind, TargetKind::Lib { is_proc_macro: true }),
origin,
matches!(kind, TargetKind::Lib { is_proc_macro: true }),
Some(if pkg.is_member {
cargo.workspace_root().to_path_buf()
} else {
pkg.manifest.parent().to_path_buf()
}),
);
if let TargetKind::Lib { is_proc_macro: true } = kind {
let proc_macro = match build_data {
@ -1587,8 +1595,9 @@ fn sysroot_to_crate_graph(
cfg_options.clone(),
None,
Env::default(),
false,
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
false,
None,
);
Some((krate, crate_id))
})

View File

@ -61,6 +61,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
1: CrateData {
root_file_id: FileId(
@ -132,6 +137,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
2: CrateData {
root_file_id: FileId(
@ -203,6 +213,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
3: CrateData {
root_file_id: FileId(
@ -274,6 +289,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
4: CrateData {
root_file_id: FileId(
@ -341,5 +361,10 @@
name: "libc",
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
),
),
},
}

View File

@ -61,6 +61,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
1: CrateData {
root_file_id: FileId(
@ -132,6 +137,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
2: CrateData {
root_file_id: FileId(
@ -203,6 +213,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
3: CrateData {
root_file_id: FileId(
@ -274,6 +289,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
4: CrateData {
root_file_id: FileId(
@ -341,5 +361,10 @@
name: "libc",
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
),
),
},
}

View File

@ -60,6 +60,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
1: CrateData {
root_file_id: FileId(
@ -130,6 +135,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
2: CrateData {
root_file_id: FileId(
@ -200,6 +210,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
3: CrateData {
root_file_id: FileId(
@ -270,6 +285,11 @@
),
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$hello-world",
),
),
},
4: CrateData {
root_file_id: FileId(
@ -337,5 +357,10 @@
name: "libc",
},
is_proc_macro: false,
proc_macro_cwd: Some(
AbsPathBuf(
"$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
),
),
},
}

View File

@ -38,6 +38,7 @@
Alloc,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
1: CrateData {
root_file_id: FileId(
@ -69,6 +70,7 @@
Core,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
2: CrateData {
root_file_id: FileId(
@ -100,6 +102,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
3: CrateData {
root_file_id: FileId(
@ -131,6 +134,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
4: CrateData {
root_file_id: FileId(
@ -179,6 +183,7 @@
ProcMacro,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
5: CrateData {
root_file_id: FileId(
@ -210,6 +215,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
6: CrateData {
root_file_id: FileId(
@ -306,6 +312,7 @@
Std,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
7: CrateData {
root_file_id: FileId(
@ -337,6 +344,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
8: CrateData {
root_file_id: FileId(
@ -368,6 +376,7 @@
Test,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
9: CrateData {
root_file_id: FileId(
@ -399,6 +408,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
10: CrateData {
root_file_id: FileId(
@ -477,6 +487,7 @@
),
},
is_proc_macro: false,
proc_macro_cwd: None,
},
11: CrateData {
root_file_id: FileId(
@ -555,5 +566,6 @@
),
},
is_proc_macro: false,
proc_macro_cwd: None,
},
}

View File

@ -38,6 +38,7 @@
Alloc,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
1: CrateData {
root_file_id: FileId(
@ -69,6 +70,7 @@
Core,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
2: CrateData {
root_file_id: FileId(
@ -100,6 +102,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
3: CrateData {
root_file_id: FileId(
@ -131,6 +134,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
4: CrateData {
root_file_id: FileId(
@ -179,6 +183,7 @@
ProcMacro,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
5: CrateData {
root_file_id: FileId(
@ -210,6 +215,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
6: CrateData {
root_file_id: FileId(
@ -306,6 +312,7 @@
Std,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
7: CrateData {
root_file_id: FileId(
@ -337,6 +344,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
8: CrateData {
root_file_id: FileId(
@ -368,6 +376,7 @@
Test,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
9: CrateData {
root_file_id: FileId(
@ -399,6 +408,7 @@
Other,
),
is_proc_macro: false,
proc_macro_cwd: None,
},
10: CrateData {
root_file_id: FileId(
@ -474,5 +484,6 @@
),
},
is_proc_macro: false,
proc_macro_cwd: None,
},
}

View File

@ -1476,6 +1476,7 @@ impl Config {
prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(),
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
code_action_grouping: self.code_action_group(),
}
}

View File

@ -885,7 +885,6 @@ pub fn ws_to_crate_graph(
ws_data.extend(mapping.values().copied().zip(iter::repeat(Arc::new(CrateWorkspaceData {
toolchain: toolchain.clone(),
data_layout: target_layout.clone(),
proc_macro_cwd: Some(ws.workspace_root().to_owned()),
}))));
proc_macro_paths.push(crate_proc_macros);
}

View File

@ -211,8 +211,9 @@ impl ChangeFixture {
From::from(meta.cfg.clone()),
Some(From::from(meta.cfg)),
meta.env,
false,
origin,
false,
None,
);
let prev = crates.insert(crate_name.clone(), crate_id);
assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
@ -249,8 +250,9 @@ impl ChangeFixture {
From::from(default_cfg.clone()),
Some(From::from(default_cfg)),
default_env,
false,
CrateOrigin::Local { repo: None, name: None },
false,
None,
);
} else {
for (from, to, prelude) in crate_deps {
@ -286,8 +288,9 @@ impl ChangeFixture {
String::from("__ra_is_test_fixture"),
String::from("__ra_is_test_fixture"),
)]),
false,
CrateOrigin::Lang(LangCrateOrigin::Core),
false,
None,
);
for krate in all_crates {
@ -333,8 +336,9 @@ impl ChangeFixture {
String::from("__ra_is_test_fixture"),
String::from("__ra_is_test_fixture"),
)]),
true,
CrateOrigin::Local { repo: None, name: None },
true,
None,
);
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
@ -362,7 +366,6 @@ impl ChangeFixture {
crate_graph
.iter()
.zip(iter::repeat(From::from(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: target_data_layout,
toolchain,
})))

View File

@ -34,3 +34,8 @@ use-boolean-and = true
[output.html.fold]
enable = true
level = 3
[preprocessor.toc]
command = "mdbook-toc"
renderer = ["html"]
max-level = 3

View File

@ -14,3 +14,11 @@
- [Assists (Code Actions)](assists.md)
- [Diagnostics](diagnostics.md)
- [Editor Features](editor_features.md)
- [Contributing](contributing/README.md)
- [Architecture](contributing/architecture.md)
- [Debugging](contributing/debugging.md)
- [Guide](contributing/guide.md)
- [LSP Extensions](contributing/lsp-extensions.md)
- [Setup](contributing/setup.md)
- [Style](contributing/style.md)
- [Syntax](contributing/syntax.md)

File diff suppressed because it is too large Load Diff

View File

@ -3,13 +3,13 @@
**Source:**
[config.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs)
The [Installation](#_installation) section contains details on
The [Installation](./installation.md) section contains details on
configuration for some of the editors. In general `rust-analyzer` is
configured via LSP messages, which means that its up to the editor to
decide on the exact format and location of configuration files.
Some clients, such as [VS Code](#vs-code) or [COC plugin in
Vim](#coc-rust-analyzer) provide `rust-analyzer` specific configuration
Some clients, such as [VS Code](./vs_code.md) or [COC plugin in
Vim](./other_editors.md#coc-rust-analyzer) provide `rust-analyzer` specific configuration
UIs. Others may require you to know a bit more about the interaction
with `rust-analyzer`.

View File

@ -9,7 +9,7 @@ $ cargo test
should be enough to get you started!
To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md).
To learn more about how rust-analyzer works, see [Architecture](architecture.md).
It also explains the high-level layout of the source code.
Do skim through that document.
@ -24,7 +24,9 @@ rust-analyzer is a part of the [RLS-2.0 working
group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
Discussion happens in this Zulip stream:
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
<https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer>
<!-- toc -->
# Issue Labels
@ -54,7 +56,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
# Code Style & Review Process
Do see [./style.md](./style.md).
See the [Style Guide](style.md).
# Cookbook
@ -88,11 +90,13 @@ As a sanity check after I'm done, I use `cargo xtask install --server` and **Rel
If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
Notably, this uses the usual `rust-analyzer` binary from `PATH`.
For this, it is important to have the following in your `settings.json` file:
```json
{
"rust-analyzer.server.path": "rust-analyzer"
}
```
After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
@ -117,6 +121,7 @@ cd editors/code
npm ci
npm run lint
```
## How to ...
* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
@ -142,14 +147,15 @@ Note that `stdout` is used for the actual protocol, so `println!` will break thi
To log all communication between the server and the client, there are two choices:
* You can log on the server side, by running something like
```
env RA_LOG=lsp_server=debug code .
```
* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
These logs are shown in a separate tab in the output and could be used with LSP inspector.
Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
There are also several VS Code commands which might be of interest:
* `rust-analyzer: Status` shows some memory-usage statistics.

View File

@ -8,19 +8,20 @@ It goes deeper than what is covered in this document, but will take some time to
See also these implementation-related blog posts:
* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
* <https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html>
* <https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html>
* <https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html>
* <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
* <https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html>
For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
## Bird's Eye View
![](https://user-images.githubusercontent.com/4789492/107129398-0ab70f00-687a-11eb-9bfc-d4eb023aec06.png)
<!-- toc -->
On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
@ -295,7 +296,7 @@ For this reason, all path APIs generally take some existing path as a "file syst
### `crates/stdx`
This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
as copies of unstable std items we would like to make use of already.
### `crates/profile`

View File

@ -8,6 +8,7 @@
<img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
- Install all TypeScript dependencies
```bash
cd editors/code
npm ci
@ -19,7 +20,6 @@
where **only** the `rust-analyzer` extension being debugged is enabled.
* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
## Debug TypeScript VSCode extension
- `Run Installed Extension` - runs the extension with the globally installed `rust-analyzer` binary.
@ -36,12 +36,12 @@ To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</k
- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
```
```bash
echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
```
- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
```toml
[profile.dev]
debug = 2
@ -58,6 +58,7 @@ To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</k
- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
```rust
let mut d = 4;
while d == 4 { // set a breakpoint here and change the value
@ -66,6 +67,7 @@ If you need to debug the server from the very beginning, including its initializ
```
However for this to work, you will need to enable debug_assertions in your build
```rust
RUSTFLAGS='--cfg debug_assertions' cargo build --release
```

View File

@ -12,6 +12,8 @@ https://youtu.be/ANKBNiSWyfc.
[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
[2024-01-01]: https://github.com/rust-lang/rust-analyzer/tree/2024-01-01
<!-- toc -->
## The big picture
On the highest possible level, rust-analyzer is a stateful component. A client may
@ -152,7 +154,6 @@ the loop is the [`GlobalState::run`] function initiated by [`main_loop`] after
[`GlobalState::new`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L148-L215
[`GlobalState::run`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L114-L140
Let's walk through a typical analyzer session!
First, we need to figure out what to analyze. To do this, we run `cargo

View File

@ -19,6 +19,8 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
<!-- toc -->
## Configuration in `initializationOptions`
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567

View File

@ -1,3 +1,5 @@
# Style
Our approach to "clean code" is two-fold:
* We generally don't block PRs on style changes.
@ -274,7 +276,7 @@ fn f() {
Assert liberally.
Prefer [`stdx::never!`](https://docs.rs/always-assert/0.1.2/always_assert/macro.never.html) to standard `assert!`.
**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#error-handling).
**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/architecture.md#error-handling).
## Getters & Setters

View File

@ -1,6 +1,5 @@
# Editor Features
## VS Code
### Color configurations
@ -118,7 +117,7 @@ Or it is possible to specify vars more granularly:
"rust-analyzer.runnables.extraEnv": [
{
// "mask": null, // null mask means that this rule will be applied for all runnables
env: {
"env": {
"APP_ID": "1",
"APP_DATA": "asdf"
}
@ -145,7 +144,7 @@ If needed, you can set different values for different platforms:
"rust-analyzer.runnables.extraEnv": [
{
"platform": "win32", // windows only
env: {
"env": {
"APP_DATA": "windows specific data"
}
},

View File

@ -6,6 +6,8 @@ Protocol](https://microsoft.github.io/language-server-protocol/).
This page assumes that you have already [installed the rust-analyzer
binary](./rust_analyzer_binary.html).
<!-- toc -->
## Emacs
To use `rust-analyzer`, you need to install and enable one of the two

View File

@ -1 +1 @@
273465e1f2932a30a5b56ac95859cdc86f3f33fa
e0be1a02626abef2878cb7f4aaef7ae409477112

View File

@ -117,7 +117,13 @@ impl fmt::Display for Location {
let path = self.file.strip_prefix(project_root()).unwrap().display().to_string();
let path = path.replace('\\', "/");
let name = self.file.file_name().unwrap();
write!(f, " [{}](/{}#{}) ", name.to_str().unwrap(), path, self.line)
write!(
f,
" [{}](https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}) ",
name.to_str().unwrap(),
path,
self.line
)
}
}

View File

@ -18,92 +18,87 @@ use crate::{
util::list_rust_files,
};
const PARSER_CRATE_ROOT: &str = "crates/parser";
const PARSER_TEST_DATA: &str = "crates/parser/test_data";
const PARSER_TEST_DATA_INLINE: &str = "crates/parser/test_data/parser/inline";
pub(crate) fn generate(check: bool) {
let tests = tests_from_dir(
&project_root().join(Path::new(&format!("{PARSER_CRATE_ROOT}/src/grammar"))),
);
let parser_crate_root = project_root().join("crates/parser");
let parser_test_data = parser_crate_root.join("test_data");
let parser_test_data_inline = parser_test_data.join("parser/inline");
let tests = tests_from_dir(&parser_crate_root.join("src/grammar"));
let mut some_file_was_updated = false;
some_file_was_updated |=
install_tests(&tests.ok, &format!("{PARSER_TEST_DATA_INLINE}/ok"), check).unwrap();
install_tests(&tests.ok, parser_test_data_inline.join("ok"), check).unwrap();
some_file_was_updated |=
install_tests(&tests.err, &format!("{PARSER_TEST_DATA_INLINE}/err"), check).unwrap();
install_tests(&tests.err, parser_test_data_inline.join("err"), check).unwrap();
if some_file_was_updated {
let _ = fs::File::open(format!("{PARSER_CRATE_ROOT}/src/tests.rs"))
let _ = fs::File::open(parser_crate_root.join("src/tests.rs"))
.unwrap()
.set_modified(SystemTime::now());
let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
let test_name = quote::format_ident!("{}", test.name);
let test_file = format!("test_data/parser/inline/ok/{test_name}.rs");
let (test_func, args) = match &test.edition {
Some(edition) => {
let edition = quote::format_ident!("Edition{edition}");
(
quote::format_ident!("run_and_expect_no_errors_with_edition"),
quote::quote! {#test_file, crate::Edition::#edition},
)
}
None => {
(quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file})
}
};
quote::quote! {
#[test]
fn #test_name() {
#test_func(#args);
}
}
});
let err_tests = tests.err.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
let test_name = quote::format_ident!("{}", test.name);
let test_file = format!("test_data/parser/inline/err/{test_name}.rs");
let (test_func, args) = match &test.edition {
Some(edition) => {
let edition = quote::format_ident!("Edition{edition}");
(
quote::format_ident!("run_and_expect_errors_with_edition"),
quote::quote! {#test_file, crate::Edition::#edition},
)
}
None => (quote::format_ident!("run_and_expect_errors"), quote::quote! {#test_file}),
};
quote::quote! {
#[test]
fn #test_name() {
#test_func(#args);
}
}
});
let output = quote::quote! {
mod ok {
use crate::tests::*;
#(#ok_tests)*
}
mod err {
use crate::tests::*;
#(#err_tests)*
}
};
let pretty = reformat(output.to_string());
ensure_file_contents(
crate::flags::CodegenType::ParserTests,
format!("{PARSER_TEST_DATA}/generated/runner.rs").as_ref(),
&pretty,
check,
);
}
let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
let test_name = quote::format_ident!("{}", test.name);
let test_file = format!("test_data/parser/inline/ok/{test_name}.rs");
let (test_func, args) = match &test.edition {
Some(edition) => {
let edition = quote::format_ident!("Edition{edition}");
(
quote::format_ident!("run_and_expect_no_errors_with_edition"),
quote::quote! {#test_file, crate::Edition::#edition},
)
}
None => (quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file}),
};
quote::quote! {
#[test]
fn #test_name() {
#test_func(#args);
}
}
});
let err_tests = tests.err.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
let test_name = quote::format_ident!("{}", test.name);
let test_file = format!("test_data/parser/inline/err/{test_name}.rs");
let (test_func, args) = match &test.edition {
Some(edition) => {
let edition = quote::format_ident!("Edition{edition}");
(
quote::format_ident!("run_and_expect_errors_with_edition"),
quote::quote! {#test_file, crate::Edition::#edition},
)
}
None => (quote::format_ident!("run_and_expect_errors"), quote::quote! {#test_file}),
};
quote::quote! {
#[test]
fn #test_name() {
#test_func(#args);
}
}
});
let output = quote::quote! {
mod ok {
use crate::tests::*;
#(#ok_tests)*
}
mod err {
use crate::tests::*;
#(#err_tests)*
}
};
let pretty = reformat(output.to_string());
ensure_file_contents(
crate::flags::CodegenType::ParserTests,
parser_test_data.join("generated/runner.rs").as_ref(),
&pretty,
check,
);
}
fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) -> Result<bool> {
let tests_dir = project_root().join(into);
fn install_tests(tests: &HashMap<String, Test>, tests_dir: PathBuf, check: bool) -> Result<bool> {
if !tests_dir.is_dir() {
fs::create_dir_all(&tests_dir)?;
}

View File

@ -27,8 +27,9 @@ fn check_lsp_extensions_docs(sh: &Shell) {
};
let actual_hash = {
let lsp_extensions_md =
sh.read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
let lsp_extensions_md = sh
.read_file(project_root().join("docs/book/src/contributing/lsp-extensions.md"))
.unwrap();
let text = lsp_extensions_md
.lines()
.find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
@ -185,7 +186,7 @@ Zlib OR Apache-2.0 OR MIT
fn check_test_attrs(path: &Path, text: &str) {
let panic_rule =
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/style.md#should_panic";
let need_panic: &[&str] = &[
// This file.
"slow-tests/tidy.rs",