mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
Auto merge of #137523 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
d6d6d19841
6
.github/workflows/release.yaml
vendored
6
.github/workflows/release.yaml
vendored
@ -34,14 +34,14 @@ jobs:
|
|||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
target: aarch64-pc-windows-msvc
|
target: aarch64-pc-windows-msvc
|
||||||
code-target: win32-arm64
|
code-target: win32-arm64
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-latest
|
||||||
target: x86_64-unknown-linux-gnu
|
target: x86_64-unknown-linux-gnu
|
||||||
code-target: linux-x64
|
code-target: linux-x64
|
||||||
container: rockylinux:8
|
container: rockylinux:8
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-latest
|
||||||
target: aarch64-unknown-linux-gnu
|
target: aarch64-unknown-linux-gnu
|
||||||
code-target: linux-arm64
|
code-target: linux-arm64
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-latest
|
||||||
target: arm-unknown-linux-gnueabihf
|
target: arm-unknown-linux-gnueabihf
|
||||||
code-target: linux-armhf
|
code-target: linux-armhf
|
||||||
- os: macos-13
|
- os: macos-13
|
||||||
|
@ -4,7 +4,7 @@ Thank you for your interest in contributing to rust-analyzer! There are many way
|
|||||||
and we appreciate all of them.
|
and we appreciate all of them.
|
||||||
|
|
||||||
To get a quick overview of the crates and structure of the project take a look at the
|
To get a quick overview of the crates and structure of the project take a look at the
|
||||||
[./docs/dev](./docs/dev) folder.
|
[Contributing](https://rust-analyzer.github.io/book/contributing) section of the manual.
|
||||||
|
|
||||||
If you have any questions please ask them in the [rust-analyzer zulip stream](
|
If you have any questions please ask them in the [rust-analyzer zulip stream](
|
||||||
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer) or if unsure where
|
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer) or if unsure where
|
||||||
|
53
Cargo.lock
generated
53
Cargo.lock
generated
@ -559,9 +559,9 @@ dependencies = [
|
|||||||
"intern",
|
"intern",
|
||||||
"itertools",
|
"itertools",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"limit",
|
|
||||||
"mbe",
|
"mbe",
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
|
"ra-ap-rustc_hashes",
|
||||||
"ra-ap-rustc_parse_format",
|
"ra-ap-rustc_parse_format",
|
||||||
"rustc-hash 2.0.0",
|
"rustc-hash 2.0.0",
|
||||||
"rustc_apfloat",
|
"rustc_apfloat",
|
||||||
@ -591,7 +591,6 @@ dependencies = [
|
|||||||
"intern",
|
"intern",
|
||||||
"itertools",
|
"itertools",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"limit",
|
|
||||||
"mbe",
|
"mbe",
|
||||||
"parser",
|
"parser",
|
||||||
"rustc-hash 2.0.0",
|
"rustc-hash 2.0.0",
|
||||||
@ -626,11 +625,11 @@ dependencies = [
|
|||||||
"intern",
|
"intern",
|
||||||
"itertools",
|
"itertools",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"limit",
|
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"project-model",
|
"project-model",
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
|
"ra-ap-rustc_hashes",
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"ra-ap-rustc_pattern_analysis",
|
"ra-ap-rustc_pattern_analysis",
|
||||||
"rustc-hash 2.0.0",
|
"rustc-hash 2.0.0",
|
||||||
@ -744,7 +743,6 @@ dependencies = [
|
|||||||
"hir",
|
"hir",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools",
|
"itertools",
|
||||||
"limit",
|
|
||||||
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr",
|
"memchr",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
@ -943,10 +941,6 @@ dependencies = [
|
|||||||
"redox_syscall",
|
"redox_syscall",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "limit"
|
|
||||||
version = "0.0.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.2"
|
version = "0.1.2"
|
||||||
@ -1279,7 +1273,6 @@ dependencies = [
|
|||||||
"drop_bomb",
|
"drop_bomb",
|
||||||
"edition",
|
"edition",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"limit",
|
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
"stdx",
|
"stdx",
|
||||||
"tracing",
|
"tracing",
|
||||||
@ -1514,20 +1507,30 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_abi"
|
name = "ra-ap-rustc_abi"
|
||||||
version = "0.95.0"
|
version = "0.97.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b40c4e339b71a8f075a829b1acaf32f870a11b466d9b8623d50b0ce33e65af95"
|
checksum = "3829c3355d1681ffeaf1450ec71edcdace6820fe2e86469d8fc1ad45e2c96460"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.7.0",
|
"bitflags 2.7.0",
|
||||||
|
"ra-ap-rustc_hashes",
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index"
|
name = "ra-ap-rustc_hashes"
|
||||||
version = "0.95.0"
|
version = "0.97.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "872072e2ba11d11147ebe9fde1608fe7f7d9b5c51dac524af28ee07c6dade468"
|
checksum = "1bd4d6d4c434bec08e02370a4f64a4985312097215a62e82d0f757f3a98e502e"
|
||||||
|
dependencies = [
|
||||||
|
"rustc-stable-hash",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ra-ap-rustc_index"
|
||||||
|
version = "0.97.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bad6fc4bd7522e31096e2de5b0351144fe0684b608791ee26c842bf2da1b19ae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index_macros",
|
"ra-ap-rustc_index_macros",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
@ -1535,9 +1538,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index_macros"
|
name = "ra-ap-rustc_index_macros"
|
||||||
version = "0.95.0"
|
version = "0.97.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ffcd77debcaf2ad690a57c2d041c11eb33fe66869754b2c5f35c52954b46af0c"
|
checksum = "cfb234e1f84b92be45276c3025bee18789e9bc95bec8789bec961e78edb01c52"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -1546,9 +1549,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_lexer"
|
name = "ra-ap-rustc_lexer"
|
||||||
version = "0.95.0"
|
version = "0.97.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49265cdf8823f8d246e476c79c60bd6e5b551c81ae76e1c8d6a5e0dc73df0bca"
|
checksum = "7a3a40bd11dc43d1cb110e730b80620cf8102f4cca8920a02b65954da0ed931f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
"unicode-properties",
|
"unicode-properties",
|
||||||
@ -1557,9 +1560,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_parse_format"
|
name = "ra-ap-rustc_parse_format"
|
||||||
version = "0.95.0"
|
version = "0.97.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b3da239fdc971176de0db45cb631d71475b52033a3d0027d91964da7be89eee6"
|
checksum = "5feb877478994cb4c0c0c7a5116a352eefc0634aefc8636feb00a893fa5b7135"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
@ -1567,9 +1570,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_pattern_analysis"
|
name = "ra-ap-rustc_pattern_analysis"
|
||||||
version = "0.95.0"
|
version = "0.97.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "56057d08fdfa0d95494e461bbdd5d4b3fdb349cca6be05ad7759bc964be1b8d4"
|
checksum = "a76774d35934d464c4115908cde16f76a4f7e540fe1eea6b79336c556e37bdd3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"rustc-hash 2.0.0",
|
"rustc-hash 2.0.0",
|
||||||
@ -1744,6 +1747,12 @@ version = "2.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-stable-hash"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc_apfloat"
|
name = "rustc_apfloat"
|
||||||
version = "0.2.1+llvm-462a31f5a5ab"
|
version = "0.2.1+llvm-462a31f5a5ab"
|
||||||
|
12
Cargo.toml
12
Cargo.toml
@ -64,7 +64,6 @@ ide-db = { path = "./crates/ide-db", version = "0.0.0" }
|
|||||||
ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
|
ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
|
||||||
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
|
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
|
||||||
intern = { path = "./crates/intern", version = "0.0.0" }
|
intern = { path = "./crates/intern", version = "0.0.0" }
|
||||||
limit = { path = "./crates/limit", version = "0.0.0" }
|
|
||||||
load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
|
load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
|
||||||
mbe = { path = "./crates/mbe", version = "0.0.0" }
|
mbe = { path = "./crates/mbe", version = "0.0.0" }
|
||||||
parser = { path = "./crates/parser", version = "0.0.0" }
|
parser = { path = "./crates/parser", version = "0.0.0" }
|
||||||
@ -87,11 +86,12 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
|||||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||||
edition = { path = "./crates/edition", version = "0.0.0" }
|
edition = { path = "./crates/edition", version = "0.0.0" }
|
||||||
|
|
||||||
ra-ap-rustc_lexer = { version = "0.95", default-features = false }
|
ra-ap-rustc_hashes = { version = "0.97", default-features = false }
|
||||||
ra-ap-rustc_parse_format = { version = "0.95", default-features = false }
|
ra-ap-rustc_lexer = { version = "0.97", default-features = false }
|
||||||
ra-ap-rustc_index = { version = "0.95", default-features = false }
|
ra-ap-rustc_parse_format = { version = "0.97", default-features = false }
|
||||||
ra-ap-rustc_abi = { version = "0.95", default-features = false }
|
ra-ap-rustc_index = { version = "0.97", default-features = false }
|
||||||
ra-ap-rustc_pattern_analysis = { version = "0.95", default-features = false }
|
ra-ap-rustc_abi = { version = "0.97", default-features = false }
|
||||||
|
ra-ap-rustc_pattern_analysis = { version = "0.97", default-features = false }
|
||||||
|
|
||||||
# local crates that aren't published to crates.io. These should not have versions.
|
# local crates that aren't published to crates.io. These should not have versions.
|
||||||
|
|
||||||
|
@ -14,8 +14,8 @@ https://rust-analyzer.github.io/book/installation.html
|
|||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
If you want to **contribute** to rust-analyzer check out the [CONTRIBUTING.md](./CONTRIBUTING.md) or
|
If you want to **contribute** to rust-analyzer check out the [CONTRIBUTING.md](./CONTRIBUTING.md) or
|
||||||
if you are just curious about how things work under the hood, check the [./docs/dev](./docs/dev)
|
if you are just curious about how things work under the hood, see the
|
||||||
folder.
|
[Contributing](https://rust-analyzer.github.io/book/contributing) section of the manual.
|
||||||
|
|
||||||
If you want to **use** rust-analyzer's language server with your editor of
|
If you want to **use** rust-analyzer's language server with your editor of
|
||||||
choice, check [the manual](https://rust-analyzer.github.io/book/).
|
choice, check [the manual](https://rust-analyzer.github.io/book/).
|
||||||
|
@ -296,6 +296,9 @@ pub struct CrateData {
|
|||||||
pub dependencies: Vec<Dependency>,
|
pub dependencies: Vec<Dependency>,
|
||||||
pub origin: CrateOrigin,
|
pub origin: CrateOrigin,
|
||||||
pub is_proc_macro: bool,
|
pub is_proc_macro: bool,
|
||||||
|
/// The working directory to run proc-macros in. This is the workspace root of the cargo workspace
|
||||||
|
/// for workspace members, the crate manifest dir otherwise.
|
||||||
|
pub proc_macro_cwd: Option<AbsPathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Clone, PartialEq, Eq)]
|
#[derive(Default, Clone, PartialEq, Eq)]
|
||||||
@ -360,8 +363,9 @@ impl CrateGraph {
|
|||||||
cfg_options: Arc<CfgOptions>,
|
cfg_options: Arc<CfgOptions>,
|
||||||
potential_cfg_options: Option<Arc<CfgOptions>>,
|
potential_cfg_options: Option<Arc<CfgOptions>>,
|
||||||
mut env: Env,
|
mut env: Env,
|
||||||
is_proc_macro: bool,
|
|
||||||
origin: CrateOrigin,
|
origin: CrateOrigin,
|
||||||
|
is_proc_macro: bool,
|
||||||
|
proc_macro_cwd: Option<AbsPathBuf>,
|
||||||
) -> CrateId {
|
) -> CrateId {
|
||||||
env.entries.shrink_to_fit();
|
env.entries.shrink_to_fit();
|
||||||
let data = CrateData {
|
let data = CrateData {
|
||||||
@ -375,6 +379,7 @@ impl CrateGraph {
|
|||||||
dependencies: Vec::new(),
|
dependencies: Vec::new(),
|
||||||
origin,
|
origin,
|
||||||
is_proc_macro,
|
is_proc_macro,
|
||||||
|
proc_macro_cwd,
|
||||||
};
|
};
|
||||||
self.arena.alloc(data)
|
self.arena.alloc(data)
|
||||||
}
|
}
|
||||||
@ -698,8 +703,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let crate2 = graph.add_crate_root(
|
let crate2 = graph.add_crate_root(
|
||||||
FileId::from_raw(2u32),
|
FileId::from_raw(2u32),
|
||||||
@ -709,8 +715,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let crate3 = graph.add_crate_root(
|
let crate3 = graph.add_crate_root(
|
||||||
FileId::from_raw(3u32),
|
FileId::from_raw(3u32),
|
||||||
@ -720,8 +727,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
assert!(graph
|
assert!(graph
|
||||||
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
|
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
|
||||||
@ -745,8 +753,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let crate2 = graph.add_crate_root(
|
let crate2 = graph.add_crate_root(
|
||||||
FileId::from_raw(2u32),
|
FileId::from_raw(2u32),
|
||||||
@ -756,8 +765,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
assert!(graph
|
assert!(graph
|
||||||
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
|
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
|
||||||
@ -778,8 +788,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let crate2 = graph.add_crate_root(
|
let crate2 = graph.add_crate_root(
|
||||||
FileId::from_raw(2u32),
|
FileId::from_raw(2u32),
|
||||||
@ -789,8 +800,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let crate3 = graph.add_crate_root(
|
let crate3 = graph.add_crate_root(
|
||||||
FileId::from_raw(3u32),
|
FileId::from_raw(3u32),
|
||||||
@ -800,8 +812,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
assert!(graph
|
assert!(graph
|
||||||
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
|
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
|
||||||
@ -822,8 +835,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let crate2 = graph.add_crate_root(
|
let crate2 = graph.add_crate_root(
|
||||||
FileId::from_raw(2u32),
|
FileId::from_raw(2u32),
|
||||||
@ -833,8 +847,9 @@ mod tests {
|
|||||||
Default::default(),
|
Default::default(),
|
||||||
Default::default(),
|
Default::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
assert!(graph
|
assert!(graph
|
||||||
.add_dep(
|
.add_dep(
|
||||||
|
@ -10,7 +10,7 @@ use rustc_hash::FxHashMap;
|
|||||||
use span::EditionedFileId;
|
use span::EditionedFileId;
|
||||||
use syntax::{ast, Parse, SourceFile, SyntaxError};
|
use syntax::{ast, Parse, SourceFile, SyntaxError};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use vfs::{AbsPathBuf, FileId};
|
use vfs::FileId;
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
change::FileChange,
|
change::FileChange,
|
||||||
@ -85,8 +85,6 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
|||||||
/// Crate related data shared by the whole workspace.
|
/// Crate related data shared by the whole workspace.
|
||||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||||
pub struct CrateWorkspaceData {
|
pub struct CrateWorkspaceData {
|
||||||
/// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
|
|
||||||
pub proc_macro_cwd: Option<AbsPathBuf>,
|
|
||||||
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
|
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
|
||||||
pub data_layout: TargetLayoutLoadResult,
|
pub data_layout: TargetLayoutLoadResult,
|
||||||
/// Toolchain version used to compile the crate.
|
/// Toolchain version used to compile the crate.
|
||||||
|
@ -31,6 +31,7 @@ triomphe.workspace = true
|
|||||||
rustc_apfloat = "0.2.0"
|
rustc_apfloat = "0.2.0"
|
||||||
text-size.workspace = true
|
text-size.workspace = true
|
||||||
|
|
||||||
|
ra-ap-rustc_hashes.workspace = true
|
||||||
ra-ap-rustc_parse_format.workspace = true
|
ra-ap-rustc_parse_format.workspace = true
|
||||||
ra-ap-rustc_abi.workspace = true
|
ra-ap-rustc_abi.workspace = true
|
||||||
|
|
||||||
@ -43,7 +44,6 @@ hir-expand.workspace = true
|
|||||||
mbe.workspace = true
|
mbe.workspace = true
|
||||||
cfg.workspace = true
|
cfg.workspace = true
|
||||||
tt.workspace = true
|
tt.workspace = true
|
||||||
limit.workspace = true
|
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
|
|
||||||
|
|
||||||
|
@ -173,7 +173,13 @@ fn parse_repr_tt(tt: &TopSubtree) -> Option<ReprOptions> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: Hash64::ZERO })
|
Some(ReprOptions {
|
||||||
|
int,
|
||||||
|
align: max_align,
|
||||||
|
pack: min_pack,
|
||||||
|
flags,
|
||||||
|
field_shuffle_seed: Hash64::ZERO,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StructData {
|
impl StructData {
|
||||||
|
@ -9,7 +9,6 @@ use hir_expand::{
|
|||||||
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
|
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
|
||||||
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
||||||
};
|
};
|
||||||
use limit::Limit;
|
|
||||||
use span::{Edition, SyntaxContextId};
|
use span::{Edition, SyntaxContextId};
|
||||||
use syntax::{ast, Parse};
|
use syntax::{ast, Parse};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
@ -28,18 +27,18 @@ pub struct Expander {
|
|||||||
pub(crate) module: ModuleId,
|
pub(crate) module: ModuleId,
|
||||||
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
|
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
|
||||||
recursion_depth: u32,
|
recursion_depth: u32,
|
||||||
recursion_limit: Limit,
|
recursion_limit: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Expander {
|
impl Expander {
|
||||||
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
|
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
|
||||||
let recursion_limit = module.def_map(db).recursion_limit() as usize;
|
let recursion_limit = module.def_map(db).recursion_limit() as usize;
|
||||||
let recursion_limit = Limit::new(if cfg!(test) {
|
let recursion_limit = if cfg!(test) {
|
||||||
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
|
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
|
||||||
std::cmp::min(32, recursion_limit)
|
std::cmp::min(32, recursion_limit)
|
||||||
} else {
|
} else {
|
||||||
recursion_limit
|
recursion_limit
|
||||||
});
|
};
|
||||||
Expander {
|
Expander {
|
||||||
current_file_id,
|
current_file_id,
|
||||||
module,
|
module,
|
||||||
@ -194,7 +193,7 @@ impl Expander {
|
|||||||
let Some(call_id) = value else {
|
let Some(call_id) = value else {
|
||||||
return ExpandResult { value: None, err };
|
return ExpandResult { value: None, err };
|
||||||
};
|
};
|
||||||
if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
|
if self.recursion_depth as usize > self.recursion_limit {
|
||||||
self.recursion_depth = u32::MAX;
|
self.recursion_depth = u32::MAX;
|
||||||
cov_mark::hit!(your_stack_belongs_to_me);
|
cov_mark::hit!(your_stack_belongs_to_me);
|
||||||
return ExpandResult::only_err(ExpandError::new(
|
return ExpandResult::only_err(ExpandError::new(
|
||||||
|
@ -19,7 +19,6 @@ use hir_expand::{
|
|||||||
use intern::{sym, Interned};
|
use intern::{sym, Interned};
|
||||||
use itertools::{izip, Itertools};
|
use itertools::{izip, Itertools};
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use limit::Limit;
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId};
|
use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
@ -55,8 +54,8 @@ use crate::{
|
|||||||
UnresolvedMacro, UseId, UseLoc,
|
UnresolvedMacro, UseId, UseLoc,
|
||||||
};
|
};
|
||||||
|
|
||||||
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
|
const GLOB_RECURSION_LIMIT: usize = 100;
|
||||||
static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
|
const FIXED_POINT_LIMIT: usize = 8192;
|
||||||
|
|
||||||
pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
|
pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
@ -393,7 +392,7 @@ impl DefCollector<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
i += 1;
|
i += 1;
|
||||||
if FIXED_POINT_LIMIT.check(i).is_err() {
|
if i > FIXED_POINT_LIMIT {
|
||||||
tracing::error!("name resolution is stuck");
|
tracing::error!("name resolution is stuck");
|
||||||
break 'resolve_attr;
|
break 'resolve_attr;
|
||||||
}
|
}
|
||||||
@ -993,7 +992,7 @@ impl DefCollector<'_> {
|
|||||||
import: Option<ImportOrExternCrate>,
|
import: Option<ImportOrExternCrate>,
|
||||||
depth: usize,
|
depth: usize,
|
||||||
) {
|
) {
|
||||||
if GLOB_RECURSION_LIMIT.check(depth).is_err() {
|
if depth > GLOB_RECURSION_LIMIT {
|
||||||
// prevent stack overflows (but this shouldn't be possible)
|
// prevent stack overflows (but this shouldn't be possible)
|
||||||
panic!("infinite recursion in glob imports!");
|
panic!("infinite recursion in glob imports!");
|
||||||
}
|
}
|
||||||
@ -1470,8 +1469,7 @@ impl DefCollector<'_> {
|
|||||||
depth: usize,
|
depth: usize,
|
||||||
container: ItemContainerId,
|
container: ItemContainerId,
|
||||||
) {
|
) {
|
||||||
let recursion_limit = Limit::new(self.def_map.recursion_limit() as usize);
|
if depth > self.def_map.recursion_limit() as usize {
|
||||||
if recursion_limit.check(depth).is_err() {
|
|
||||||
cov_mark::hit!(macro_expansion_overflow);
|
cov_mark::hit!(macro_expansion_overflow);
|
||||||
tracing::warn!("macro expansion is too deep");
|
tracing::warn!("macro expansion is too deep");
|
||||||
return;
|
return;
|
||||||
@ -1499,7 +1497,6 @@ impl DefCollector<'_> {
|
|||||||
|
|
||||||
fn finish(mut self) -> DefMap {
|
fn finish(mut self) -> DefMap {
|
||||||
// Emit diagnostics for all remaining unexpanded macros.
|
// Emit diagnostics for all remaining unexpanded macros.
|
||||||
|
|
||||||
let _p = tracing::info_span!("DefCollector::finish").entered();
|
let _p = tracing::info_span!("DefCollector::finish").entered();
|
||||||
|
|
||||||
for directive in &self.unresolved_macros {
|
for directive in &self.unresolved_macros {
|
||||||
|
@ -2,12 +2,11 @@
|
|||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use base_db::AnchoredPath;
|
use base_db::AnchoredPath;
|
||||||
use hir_expand::{name::Name, HirFileIdExt};
|
use hir_expand::{name::Name, HirFileIdExt};
|
||||||
use limit::Limit;
|
|
||||||
use span::EditionedFileId;
|
use span::EditionedFileId;
|
||||||
|
|
||||||
use crate::{db::DefDatabase, HirFileId};
|
use crate::{db::DefDatabase, HirFileId};
|
||||||
|
|
||||||
static MOD_DEPTH_LIMIT: Limit = Limit::new(32);
|
const MOD_DEPTH_LIMIT: usize = 32;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(super) struct ModDir {
|
pub(super) struct ModDir {
|
||||||
@ -50,7 +49,7 @@ impl ModDir {
|
|||||||
|
|
||||||
fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
|
fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
|
||||||
let depth = self.depth + 1;
|
let depth = self.depth + 1;
|
||||||
if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
|
if depth as usize > MOD_DEPTH_LIMIT {
|
||||||
tracing::error!("MOD_DEPTH_LIMIT exceeded");
|
tracing::error!("MOD_DEPTH_LIMIT exceeded");
|
||||||
cov_mark::hit!(circular_mods);
|
cov_mark::hit!(circular_mods);
|
||||||
return None;
|
return None;
|
||||||
|
@ -31,7 +31,6 @@ cfg.workspace = true
|
|||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
tt.workspace = true
|
tt.workspace = true
|
||||||
mbe.workspace = true
|
mbe.workspace = true
|
||||||
limit.workspace = true
|
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
parser.workspace = true
|
parser.workspace = true
|
||||||
syntax-bridge.workspace = true
|
syntax-bridge.workspace = true
|
||||||
|
@ -833,7 +833,7 @@ fn env_expand(
|
|||||||
if key.as_str() == "OUT_DIR" {
|
if key.as_str() == "OUT_DIR" {
|
||||||
err = Some(ExpandError::other(
|
err = Some(ExpandError::other(
|
||||||
span,
|
span,
|
||||||
r#"`OUT_DIR` not set, enable "build scripts" to fix"#,
|
r#"`OUT_DIR` not set, build scripts may have failed to run"#,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
use base_db::{ra_salsa, CrateId, SourceDatabase};
|
use base_db::{ra_salsa, CrateId, SourceDatabase};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
|
||||||
use mbe::MatchedArmIndex;
|
use mbe::MatchedArmIndex;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
|
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
|
||||||
@ -35,7 +34,7 @@ type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
|
|||||||
/// an error will be emitted.
|
/// an error will be emitted.
|
||||||
///
|
///
|
||||||
/// Actual max for `analysis-stats .` at some point: 30672.
|
/// Actual max for `analysis-stats .` at some point: 30672.
|
||||||
static TOKEN_LIMIT: Limit = Limit::new(2_097_152);
|
const TOKEN_LIMIT: usize = 2_097_152;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum TokenExpander {
|
pub enum TokenExpander {
|
||||||
@ -740,20 +739,19 @@ pub(crate) fn token_tree_to_syntax_node(
|
|||||||
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
|
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
|
||||||
let tt = tt.top_subtree();
|
let tt = tt.top_subtree();
|
||||||
let count = tt.count();
|
let count = tt.count();
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
if count <= TOKEN_LIMIT {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
Err(ExpandResult {
|
Err(ExpandResult {
|
||||||
value: (),
|
value: (),
|
||||||
err: Some(ExpandError::other(
|
err: Some(ExpandError::other(
|
||||||
tt.delimiter.open,
|
tt.delimiter.open,
|
||||||
format!(
|
format!(
|
||||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||||
count,
|
count, TOKEN_LIMIT,
|
||||||
TOKEN_LIMIT.inner(),
|
|
||||||
),
|
),
|
||||||
)),
|
)),
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,6 +238,9 @@ impl CustomProcMacroExpander {
|
|||||||
let krate_graph = db.crate_graph();
|
let krate_graph = db.crate_graph();
|
||||||
// Proc macros have access to the environment variables of the invoking crate.
|
// Proc macros have access to the environment variables of the invoking crate.
|
||||||
let env = &krate_graph[calling_crate].env;
|
let env = &krate_graph[calling_crate].env;
|
||||||
|
let current_dir =
|
||||||
|
krate_graph[calling_crate].proc_macro_cwd.as_deref().map(ToString::to_string);
|
||||||
|
|
||||||
match proc_macro.expander.expand(
|
match proc_macro.expander.expand(
|
||||||
tt,
|
tt,
|
||||||
attr_arg,
|
attr_arg,
|
||||||
@ -245,10 +248,7 @@ impl CustomProcMacroExpander {
|
|||||||
def_site,
|
def_site,
|
||||||
call_site,
|
call_site,
|
||||||
mixed_site,
|
mixed_site,
|
||||||
db.crate_workspace_data()[&calling_crate]
|
current_dir,
|
||||||
.proc_macro_cwd
|
|
||||||
.as_ref()
|
|
||||||
.map(ToString::to_string),
|
|
||||||
) {
|
) {
|
||||||
Ok(t) => ExpandResult::ok(t),
|
Ok(t) => ExpandResult::ok(t),
|
||||||
Err(err) => match err {
|
Err(err) => match err {
|
||||||
|
@ -36,6 +36,7 @@ indexmap.workspace = true
|
|||||||
rustc_apfloat = "0.2.0"
|
rustc_apfloat = "0.2.0"
|
||||||
|
|
||||||
ra-ap-rustc_abi.workspace = true
|
ra-ap-rustc_abi.workspace = true
|
||||||
|
ra-ap-rustc_hashes.workspace = true
|
||||||
ra-ap-rustc_index.workspace = true
|
ra-ap-rustc_index.workspace = true
|
||||||
ra-ap-rustc_pattern_analysis.workspace = true
|
ra-ap-rustc_pattern_analysis.workspace = true
|
||||||
|
|
||||||
@ -47,7 +48,6 @@ hir-def.workspace = true
|
|||||||
hir-expand.workspace = true
|
hir-expand.workspace = true
|
||||||
base-db.workspace = true
|
base-db.workspace = true
|
||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
limit.workspace = true
|
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -9,7 +9,6 @@ use chalk_ir::cast::Cast;
|
|||||||
use hir_def::lang_item::LangItem;
|
use hir_def::lang_item::LangItem;
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use limit::Limit;
|
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -17,7 +16,7 @@ use crate::{
|
|||||||
TraitEnvironment, Ty, TyBuilder, TyKind,
|
TraitEnvironment, Ty, TyBuilder, TyKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(20);
|
const AUTODEREF_RECURSION_LIMIT: usize = 20;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) enum AutoderefKind {
|
pub(crate) enum AutoderefKind {
|
||||||
@ -140,7 +139,7 @@ impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
|
|||||||
return Some((self.ty.clone(), 0));
|
return Some((self.ty.clone(), 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
|
if self.steps.len() > AUTODEREF_RECURSION_LIMIT {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -768,21 +768,23 @@ pub(crate) fn adt_datum_query(
|
|||||||
phantom_data,
|
phantom_data,
|
||||||
};
|
};
|
||||||
|
|
||||||
let variant_id_to_fields = |id: VariantId| {
|
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
|
||||||
|
let _variant_id_to_fields = |id: VariantId| {
|
||||||
let variant_data = &id.variant_data(db.upcast());
|
let variant_data = &id.variant_data(db.upcast());
|
||||||
let fields = if variant_data.fields().is_empty() || bound_vars_subst.is_empty(Interner) {
|
let fields = if variant_data.fields().is_empty() {
|
||||||
vec![]
|
vec![]
|
||||||
} else {
|
} else {
|
||||||
// HACK: provide full struct type info slows down rust-analyzer by quite a bit unfortunately,
|
let field_types = db.field_types(id);
|
||||||
// so we trick chalk into thinking that our struct impl Unsize
|
variant_data
|
||||||
if let Some(ty) = bound_vars_subst.at(Interner, 0).ty(Interner) {
|
.fields()
|
||||||
vec![ty.clone()]
|
.iter()
|
||||||
} else {
|
.map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst))
|
||||||
vec![]
|
.filter(|it| !it.contains_unknown())
|
||||||
}
|
.collect()
|
||||||
};
|
};
|
||||||
rust_ir::AdtVariantDatum { fields }
|
rust_ir::AdtVariantDatum { fields }
|
||||||
};
|
};
|
||||||
|
let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] };
|
||||||
|
|
||||||
let (kind, variants) = match adt_id {
|
let (kind, variants) = match adt_id {
|
||||||
hir_def::AdtId::StructId(id) => {
|
hir_def::AdtId::StructId(id) => {
|
||||||
|
@ -14,8 +14,8 @@ use hir_def::{
|
|||||||
};
|
};
|
||||||
use la_arena::{Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
use rustc_abi::AddressSpace;
|
use rustc_abi::AddressSpace;
|
||||||
use rustc_index::{IndexSlice, IndexVec};
|
|
||||||
use rustc_hashes::Hash64;
|
use rustc_hashes::Hash64;
|
||||||
|
use rustc_index::{IndexSlice, IndexVec};
|
||||||
|
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ use hir_def::{
|
|||||||
generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
|
generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
|
||||||
path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments},
|
path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments},
|
||||||
resolver::{ResolveValueResult, TypeNs, ValueNs},
|
resolver::{ResolveValueResult, TypeNs, ValueNs},
|
||||||
type_ref::{TypeBound, TypeRef},
|
type_ref::{TypeBound, TypeRef, TypesMap},
|
||||||
GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId,
|
GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId,
|
||||||
};
|
};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
@ -838,15 +838,21 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||||||
(_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
|
(_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
|
||||||
// Find the generic index for the target of our `bound`
|
// Find the generic index for the target of our `bound`
|
||||||
let target_param_idx =
|
let target_param_idx =
|
||||||
self.ctx.resolver.where_predicates_in_scope().find_map(|(p, _)| {
|
self.ctx.resolver.where_predicates_in_scope().find_map(
|
||||||
match p {
|
|(p, (_, types_map))| match p {
|
||||||
WherePredicate::TypeBound {
|
WherePredicate::TypeBound {
|
||||||
target: WherePredicateTypeTarget::TypeOrConstParam(idx),
|
target: WherePredicateTypeTarget::TypeOrConstParam(idx),
|
||||||
bound: b,
|
bound: b,
|
||||||
} if b == bound => Some(idx),
|
} if std::ptr::eq::<TypesMap>(
|
||||||
_ => None,
|
self.ctx.types_map,
|
||||||
|
types_map,
|
||||||
|
) && bound == b =>
|
||||||
|
{
|
||||||
|
Some(idx)
|
||||||
}
|
}
|
||||||
});
|
_ => None,
|
||||||
|
},
|
||||||
|
);
|
||||||
let ty = if let Some(target_param_idx) = target_param_idx {
|
let ty = if let Some(target_param_idx) = target_param_idx {
|
||||||
let mut counter = 0;
|
let mut counter = 0;
|
||||||
let generics = self.ctx.generics().expect("generics in scope");
|
let generics = self.ctx.generics().expect("generics in scope");
|
||||||
|
@ -3,7 +3,7 @@ use span::{Edition, EditionedFileId};
|
|||||||
use syntax::{TextRange, TextSize};
|
use syntax::{TextRange, TextSize};
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
|
use crate::{db::HirDatabase, mir::MirLowerError, test_db::TestDB, Interner, Substitution};
|
||||||
|
|
||||||
use super::{interpret_mir, MirEvalError};
|
use super::{interpret_mir, MirEvalError};
|
||||||
|
|
||||||
@ -84,6 +84,16 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic:
|
|||||||
assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
|
assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn check_error_with(
|
||||||
|
#[rust_analyzer::rust_fixture] ra_fixture: &str,
|
||||||
|
expect_err: impl FnOnce(MirEvalError) -> bool,
|
||||||
|
) {
|
||||||
|
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
|
||||||
|
let file_id = *file_ids.last().unwrap();
|
||||||
|
let e = eval_main(&db, file_id).unwrap_err();
|
||||||
|
assert!(expect_err(e));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn function_with_extern_c_abi() {
|
fn function_with_extern_c_abi() {
|
||||||
check_pass(
|
check_pass(
|
||||||
@ -945,3 +955,27 @@ fn main() {
|
|||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn regression_19177() {
|
||||||
|
check_error_with(
|
||||||
|
r#"
|
||||||
|
//- minicore: copy
|
||||||
|
trait Foo {}
|
||||||
|
trait Bar {}
|
||||||
|
trait Baz {}
|
||||||
|
trait Qux {
|
||||||
|
type Assoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main<'a, T: Foo + Bar + Baz>(
|
||||||
|
x: &T,
|
||||||
|
y: (),
|
||||||
|
z: &'a dyn Qux<Assoc = T>,
|
||||||
|
w: impl Foo + Bar,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
|e| matches!(e, MirEvalError::MirLowerError(_, MirLowerError::GenericArgNotProvided(..))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ -535,7 +535,7 @@ fn test() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn coerce_unsize_generic() {
|
fn coerce_unsize_generic() {
|
||||||
check_no_mismatches(
|
check(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: coerce_unsized
|
//- minicore: coerce_unsized
|
||||||
struct Foo<T> { t: T };
|
struct Foo<T> { t: T };
|
||||||
@ -543,7 +543,9 @@ struct Bar<T>(Foo<T>);
|
|||||||
|
|
||||||
fn test() {
|
fn test() {
|
||||||
let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
|
let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
|
||||||
|
//^^^^^^^^^^^^^^^^^^^^^ expected &'? Foo<[usize]>, got &'? Foo<[i32; 3]>
|
||||||
let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
|
let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
|
||||||
|
//^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &'? Bar<[usize]>, got &'? Bar<[i32; 3]>
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
@ -955,24 +957,3 @@ fn f() {
|
|||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn coerce_nested_unsized_struct() {
|
|
||||||
check_types(
|
|
||||||
r#"
|
|
||||||
//- minicore: fn, coerce_unsized, dispatch_from_dyn, sized
|
|
||||||
use core::marker::Unsize;
|
|
||||||
|
|
||||||
struct Foo<T: ?Sized>(T);
|
|
||||||
|
|
||||||
fn need(_: &Foo<dyn Fn(i32) -> i32>) {
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test() {
|
|
||||||
let callback = |x| x;
|
|
||||||
//^ i32
|
|
||||||
need(&Foo(callback));
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
@ -4694,21 +4694,21 @@ fn f<T: Send, U>() {
|
|||||||
Struct::<T>::IS_SEND;
|
Struct::<T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^^Yes
|
//^^^^^^^^^^^^^^^^^^^^Yes
|
||||||
Struct::<U>::IS_SEND;
|
Struct::<U>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^^{unknown}
|
//^^^^^^^^^^^^^^^^^^^^Yes
|
||||||
Struct::<*const T>::IS_SEND;
|
Struct::<*const T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
|
//^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||||
Enum::<T>::IS_SEND;
|
Enum::<T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^Yes
|
//^^^^^^^^^^^^^^^^^^Yes
|
||||||
Enum::<U>::IS_SEND;
|
Enum::<U>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^{unknown}
|
//^^^^^^^^^^^^^^^^^^Yes
|
||||||
Enum::<*const T>::IS_SEND;
|
Enum::<*const T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
|
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||||
Union::<T>::IS_SEND;
|
Union::<T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^Yes
|
//^^^^^^^^^^^^^^^^^^^Yes
|
||||||
Union::<U>::IS_SEND;
|
Union::<U>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^{unknown}
|
//^^^^^^^^^^^^^^^^^^^Yes
|
||||||
Union::<*const T>::IS_SEND;
|
Union::<*const T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
|
//^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||||
PhantomData::<T>::IS_SEND;
|
PhantomData::<T>::IS_SEND;
|
||||||
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||||
PhantomData::<U>::IS_SEND;
|
PhantomData::<U>::IS_SEND;
|
||||||
|
@ -20,6 +20,7 @@ pub struct AssistConfig {
|
|||||||
pub assist_emit_must_use: bool,
|
pub assist_emit_must_use: bool,
|
||||||
pub term_search_fuel: u64,
|
pub term_search_fuel: u64,
|
||||||
pub term_search_borrowck: bool,
|
pub term_search_borrowck: bool,
|
||||||
|
pub code_action_grouping: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AssistConfig {
|
impl AssistConfig {
|
||||||
|
@ -48,6 +48,10 @@ use crate::{
|
|||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
|
if !ctx.config.code_action_grouping {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
||||||
let strukt_name = strukt.name()?;
|
let strukt_name = strukt.name()?;
|
||||||
let current_module = ctx.sema.scope(strukt.syntax())?.module();
|
let current_module = ctx.sema.scope(strukt.syntax())?.module();
|
||||||
@ -213,7 +217,9 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
use crate::tests::{
|
||||||
|
check_assist, check_assist_not_applicable, check_assist_not_applicable_no_grouping,
|
||||||
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
@ -717,4 +723,21 @@ impl Person {
|
|||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn delegate_method_skipped_when_no_grouping() {
|
||||||
|
check_assist_not_applicable_no_grouping(
|
||||||
|
generate_delegate_methods,
|
||||||
|
r#"
|
||||||
|
struct Age(u8);
|
||||||
|
impl Age {
|
||||||
|
fn age(&self) -> u8 {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
struct Person {
|
||||||
|
ag$0e: Age,
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -88,6 +88,10 @@ use syntax::{
|
|||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
|
if !ctx.config.code_action_grouping {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
|
let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
|
||||||
|
|
||||||
let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
|
let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
|
||||||
@ -788,7 +792,9 @@ fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Pat
|
|||||||
mod test {
|
mod test {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
use crate::tests::{
|
||||||
|
check_assist, check_assist_not_applicable, check_assist_not_applicable_no_grouping,
|
||||||
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tuple_struct_basic() {
|
fn test_tuple_struct_basic() {
|
||||||
@ -1836,4 +1842,33 @@ impl<D, T: C<A>> C<D> for B<T> {
|
|||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn delegate_trait_skipped_when_no_grouping() {
|
||||||
|
check_assist_not_applicable_no_grouping(
|
||||||
|
generate_delegate_trait,
|
||||||
|
r#"
|
||||||
|
trait SomeTrait {
|
||||||
|
type T;
|
||||||
|
fn fn_(arg: u32) -> u32;
|
||||||
|
fn method_(&mut self) -> bool;
|
||||||
|
}
|
||||||
|
struct A;
|
||||||
|
impl SomeTrait for A {
|
||||||
|
type T = u32;
|
||||||
|
|
||||||
|
fn fn_(arg: u32) -> u32 {
|
||||||
|
42
|
||||||
|
}
|
||||||
|
|
||||||
|
fn method_(&mut self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
struct B {
|
||||||
|
a$0 : A,
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,26 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
|
|||||||
assist_emit_must_use: false,
|
assist_emit_must_use: false,
|
||||||
term_search_fuel: 400,
|
term_search_fuel: 400,
|
||||||
term_search_borrowck: true,
|
term_search_borrowck: true,
|
||||||
|
code_action_grouping: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
|
||||||
|
snippet_cap: SnippetCap::new(true),
|
||||||
|
allowed: None,
|
||||||
|
insert_use: InsertUseConfig {
|
||||||
|
granularity: ImportGranularity::Crate,
|
||||||
|
prefix_kind: hir::PrefixKind::Plain,
|
||||||
|
enforce_granularity: true,
|
||||||
|
group: true,
|
||||||
|
skip_glob_imports: true,
|
||||||
|
},
|
||||||
|
prefer_no_std: false,
|
||||||
|
prefer_prelude: true,
|
||||||
|
prefer_absolute: false,
|
||||||
|
assist_emit_must_use: false,
|
||||||
|
term_search_fuel: 400,
|
||||||
|
term_search_borrowck: true,
|
||||||
|
code_action_grouping: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||||
@ -52,6 +72,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
|||||||
assist_emit_must_use: false,
|
assist_emit_must_use: false,
|
||||||
term_search_fuel: 400,
|
term_search_fuel: 400,
|
||||||
term_search_borrowck: true,
|
term_search_borrowck: true,
|
||||||
|
code_action_grouping: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
||||||
@ -70,6 +91,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
|||||||
assist_emit_must_use: false,
|
assist_emit_must_use: false,
|
||||||
term_search_fuel: 400,
|
term_search_fuel: 400,
|
||||||
term_search_borrowck: true,
|
term_search_borrowck: true,
|
||||||
|
code_action_grouping: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
|
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
|
||||||
@ -173,6 +195,20 @@ pub(crate) fn check_assist_not_applicable_for_import_one(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
pub(crate) fn check_assist_not_applicable_no_grouping(
|
||||||
|
assist: Handler,
|
||||||
|
#[rust_analyzer::rust_fixture] ra_fixture: &str,
|
||||||
|
) {
|
||||||
|
check_with_config(
|
||||||
|
TEST_CONFIG_NO_GROUPING,
|
||||||
|
assist,
|
||||||
|
ra_fixture,
|
||||||
|
ExpectedResult::NotApplicable,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/// Check assist in unresolved state. Useful to check assists for lazy computation.
|
/// Check assist in unresolved state. Useful to check assists for lazy computation.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub(crate) fn check_assist_unresolved(
|
pub(crate) fn check_assist_unresolved(
|
||||||
|
@ -30,7 +30,6 @@ bitflags.workspace = true
|
|||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
base-db.workspace = true
|
base-db.workspace = true
|
||||||
limit.workspace = true
|
|
||||||
parser.workspace = true
|
parser.workspace = true
|
||||||
profile.workspace = true
|
profile.workspace = true
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
|
@ -357,7 +357,7 @@ fn path_applicable_imports(
|
|||||||
let mod_path = mod_path(item)?;
|
let mod_path = mod_path(item)?;
|
||||||
Some(LocatedImport::new(mod_path, item, item))
|
Some(LocatedImport::new(mod_path, item, item))
|
||||||
})
|
})
|
||||||
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
|
.take(DEFAULT_QUERY_SEARCH_LIMIT)
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
// we have some unresolved qualifier that we search an import for
|
// we have some unresolved qualifier that we search an import for
|
||||||
@ -383,7 +383,7 @@ fn path_applicable_imports(
|
|||||||
qualifier_rest,
|
qualifier_rest,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
|
.take(DEFAULT_QUERY_SEARCH_LIMIT)
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,6 @@ use std::ops::ControlFlow;
|
|||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
|
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
|
||||||
use limit::Limit;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
imports::import_assets::NameToImport,
|
imports::import_assets::NameToImport,
|
||||||
@ -15,7 +14,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
/// A value to use, when uncertain which limit to pick.
|
/// A value to use, when uncertain which limit to pick.
|
||||||
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
|
pub const DEFAULT_QUERY_SEARCH_LIMIT: usize = 100;
|
||||||
|
|
||||||
pub use import_map::AssocSearchMode;
|
pub use import_map::AssocSearchMode;
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ macro_rules! env { () => {} }
|
|||||||
macro_rules! concat { () => {} }
|
macro_rules! concat { () => {} }
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/out.rs"));
|
include!(concat!(env!("OUT_DIR"), "/out.rs"));
|
||||||
//^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
//^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -186,7 +186,7 @@ fn main() {
|
|||||||
//^^^^^^^ error: expected string literal
|
//^^^^^^^ error: expected string literal
|
||||||
|
|
||||||
env!("OUT_DIR");
|
env!("OUT_DIR");
|
||||||
//^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
|
//^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
|
||||||
|
|
||||||
compile_error!("compile_error works");
|
compile_error!("compile_error works");
|
||||||
//^^^^^^^^^^^^^ error: compile_error works
|
//^^^^^^^^^^^^^ error: compile_error works
|
||||||
|
@ -252,14 +252,14 @@ impl Analysis {
|
|||||||
Arc::new(cfg_options),
|
Arc::new(cfg_options),
|
||||||
None,
|
None,
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
change.change_file(file_id, Some(text));
|
change.change_file(file_id, Some(text));
|
||||||
let ws_data = crate_graph
|
let ws_data = crate_graph
|
||||||
.iter()
|
.iter()
|
||||||
.zip(iter::repeat(Arc::new(CrateWorkspaceData {
|
.zip(iter::repeat(Arc::new(CrateWorkspaceData {
|
||||||
proc_macro_cwd: None,
|
|
||||||
data_layout: Err("fixture has no layout".into()),
|
data_layout: Err("fixture has no layout".into()),
|
||||||
toolchain: None,
|
toolchain: None,
|
||||||
})))
|
})))
|
||||||
|
@ -68,6 +68,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
|
|||||||
dependencies,
|
dependencies,
|
||||||
origin,
|
origin,
|
||||||
is_proc_macro,
|
is_proc_macro,
|
||||||
|
proc_macro_cwd,
|
||||||
} = &crate_graph[crate_id];
|
} = &crate_graph[crate_id];
|
||||||
format_to!(
|
format_to!(
|
||||||
buf,
|
buf,
|
||||||
@ -85,6 +86,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
|
|||||||
format_to!(buf, " Env: {:?}\n", env);
|
format_to!(buf, " Env: {:?}\n", env);
|
||||||
format_to!(buf, " Origin: {:?}\n", origin);
|
format_to!(buf, " Origin: {:?}\n", origin);
|
||||||
format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro);
|
format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro);
|
||||||
|
format_to!(buf, " Proc macro cwd: {:?}\n", proc_macro_cwd);
|
||||||
let deps = dependencies
|
let deps = dependencies
|
||||||
.iter()
|
.iter()
|
||||||
.map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))
|
.map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "limit"
|
|
||||||
version = "0.0.0"
|
|
||||||
repository.workspace = true
|
|
||||||
description = "A struct to enforce limits for rust-analyzer."
|
|
||||||
|
|
||||||
authors.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
rust-version.workspace = true
|
|
||||||
|
|
||||||
[features]
|
|
||||||
tracking = []
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
@ -1,67 +0,0 @@
|
|||||||
//! limit defines a struct to enforce limits.
|
|
||||||
|
|
||||||
#[cfg(feature = "tracking")]
|
|
||||||
use std::sync::atomic::AtomicUsize;
|
|
||||||
|
|
||||||
/// Represents a struct used to enforce a numerical limit.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Limit {
|
|
||||||
upper_bound: usize,
|
|
||||||
#[cfg(feature = "tracking")]
|
|
||||||
max: AtomicUsize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Limit {
|
|
||||||
/// Creates a new limit.
|
|
||||||
#[inline]
|
|
||||||
pub const fn new(upper_bound: usize) -> Self {
|
|
||||||
Self {
|
|
||||||
upper_bound,
|
|
||||||
#[cfg(feature = "tracking")]
|
|
||||||
max: AtomicUsize::new(0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new limit.
|
|
||||||
#[inline]
|
|
||||||
#[cfg(feature = "tracking")]
|
|
||||||
pub const fn new_tracking(upper_bound: usize) -> Self {
|
|
||||||
Self {
|
|
||||||
upper_bound,
|
|
||||||
#[cfg(feature = "tracking")]
|
|
||||||
max: AtomicUsize::new(1),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the underlying numeric limit.
|
|
||||||
#[inline]
|
|
||||||
pub const fn inner(&self) -> usize {
|
|
||||||
self.upper_bound
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks whether the given value is below the limit.
|
|
||||||
/// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
|
|
||||||
#[inline]
|
|
||||||
pub fn check(&self, other: usize) -> Result<(), ()> {
|
|
||||||
if other > self.upper_bound {
|
|
||||||
Err(())
|
|
||||||
} else {
|
|
||||||
#[cfg(feature = "tracking")]
|
|
||||||
loop {
|
|
||||||
use std::sync::atomic::Ordering;
|
|
||||||
let old_max = self.max.load(Ordering::Relaxed);
|
|
||||||
if other <= old_max || old_max == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
_ = self.max.compare_exchange_weak(
|
|
||||||
old_max,
|
|
||||||
other,
|
|
||||||
Ordering::Relaxed,
|
|
||||||
Ordering::Relaxed,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -456,7 +456,6 @@ fn load_crate_graph(
|
|||||||
let ws_data = crate_graph
|
let ws_data = crate_graph
|
||||||
.iter()
|
.iter()
|
||||||
.zip(iter::repeat(From::from(CrateWorkspaceData {
|
.zip(iter::repeat(From::from(CrateWorkspaceData {
|
||||||
proc_macro_cwd: None,
|
|
||||||
data_layout: target_layout.clone(),
|
data_layout: target_layout.clone(),
|
||||||
toolchain: toolchain.clone(),
|
toolchain: toolchain.clone(),
|
||||||
})))
|
})))
|
||||||
|
@ -15,7 +15,6 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
drop_bomb = "0.1.5"
|
drop_bomb = "0.1.5"
|
||||||
ra-ap-rustc_lexer.workspace = true
|
ra-ap-rustc_lexer.workspace = true
|
||||||
limit.workspace = true
|
|
||||||
tracing = { workspace = true, optional = true }
|
tracing = { workspace = true, optional = true }
|
||||||
|
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
|
||||||
use drop_bomb::DropBomb;
|
use drop_bomb::DropBomb;
|
||||||
use limit::Limit;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
event::Event,
|
event::Event,
|
||||||
@ -30,7 +29,7 @@ pub(crate) struct Parser<'t> {
|
|||||||
edition: Edition,
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
|
const PARSER_STEP_LIMIT: usize = 15_000_000;
|
||||||
|
|
||||||
impl<'t> Parser<'t> {
|
impl<'t> Parser<'t> {
|
||||||
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
|
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
|
||||||
@ -54,7 +53,7 @@ impl<'t> Parser<'t> {
|
|||||||
assert!(n <= 3);
|
assert!(n <= 3);
|
||||||
|
|
||||||
let steps = self.steps.get();
|
let steps = self.steps.get();
|
||||||
assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
|
assert!((steps as usize) < PARSER_STEP_LIMIT, "the parser seems stuck");
|
||||||
self.steps.set(steps + 1);
|
self.steps.set(steps + 1);
|
||||||
|
|
||||||
self.inp.kind(self.pos + n)
|
self.inp.kind(self.pos + n)
|
||||||
|
@ -721,16 +721,16 @@ mod err {
|
|||||||
#[test]
|
#[test]
|
||||||
fn bad_asm_expr() { run_and_expect_errors("test_data/parser/inline/err/bad_asm_expr.rs"); }
|
fn bad_asm_expr() { run_and_expect_errors("test_data/parser/inline/err/bad_asm_expr.rs"); }
|
||||||
#[test]
|
#[test]
|
||||||
|
fn comma_after_default_values_syntax() {
|
||||||
|
run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs");
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
fn comma_after_functional_update_syntax() {
|
fn comma_after_functional_update_syntax() {
|
||||||
run_and_expect_errors(
|
run_and_expect_errors(
|
||||||
"test_data/parser/inline/err/comma_after_functional_update_syntax.rs",
|
"test_data/parser/inline/err/comma_after_functional_update_syntax.rs",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn comma_after_default_values_syntax() {
|
|
||||||
run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs");
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn crate_visibility_empty_recover() {
|
fn crate_visibility_empty_recover() {
|
||||||
run_and_expect_errors("test_data/parser/inline/err/crate_visibility_empty_recover.rs");
|
run_and_expect_errors("test_data/parser/inline/err/crate_visibility_empty_recover.rs");
|
||||||
}
|
}
|
||||||
|
@ -164,6 +164,7 @@ impl ProjectJson {
|
|||||||
is_proc_macro: crate_data.is_proc_macro,
|
is_proc_macro: crate_data.is_proc_macro,
|
||||||
repository: crate_data.repository,
|
repository: crate_data.repository,
|
||||||
build,
|
build,
|
||||||
|
proc_macro_cwd: crate_data.proc_macro_cwd.map(absolutize_on_base),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
@ -240,6 +241,8 @@ pub struct Crate {
|
|||||||
pub(crate) include: Vec<AbsPathBuf>,
|
pub(crate) include: Vec<AbsPathBuf>,
|
||||||
pub(crate) exclude: Vec<AbsPathBuf>,
|
pub(crate) exclude: Vec<AbsPathBuf>,
|
||||||
pub(crate) is_proc_macro: bool,
|
pub(crate) is_proc_macro: bool,
|
||||||
|
/// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
|
||||||
|
pub(crate) proc_macro_cwd: Option<AbsPathBuf>,
|
||||||
pub(crate) repository: Option<String>,
|
pub(crate) repository: Option<String>,
|
||||||
pub build: Option<Build>,
|
pub build: Option<Build>,
|
||||||
}
|
}
|
||||||
@ -362,6 +365,8 @@ struct CrateData {
|
|||||||
repository: Option<String>,
|
repository: Option<String>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
build: Option<BuildData>,
|
build: Option<BuildData>,
|
||||||
|
#[serde(default)]
|
||||||
|
proc_macro_cwd: Option<Utf8PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
mod cfg_ {
|
mod cfg_ {
|
||||||
|
@ -312,8 +312,8 @@ impl Sysroot {
|
|||||||
RustLibSrcWorkspace::Empty => true,
|
RustLibSrcWorkspace::Empty => true,
|
||||||
};
|
};
|
||||||
if !has_core {
|
if !has_core {
|
||||||
let var_note = if env::var_os("rust_lib_src_PATH").is_some() {
|
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
|
||||||
" (env var `rust_lib_src_PATH` is set and may be incorrect, try unsetting it)"
|
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
|
||||||
} else {
|
} else {
|
||||||
", try running `rustup component add rust-src` to possibly fix this"
|
", try running `rustup component add rust-src` to possibly fix this"
|
||||||
};
|
};
|
||||||
@ -422,18 +422,16 @@ fn discover_sysroot_dir(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
|
fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
|
||||||
if let Ok(path) = env::var("rust_lib_src_PATH") {
|
if let Ok(path) = env::var("RUST_SRC_PATH") {
|
||||||
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
|
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
|
||||||
let core = path.join("core");
|
let core = path.join("core");
|
||||||
if fs::metadata(&core).is_ok() {
|
if fs::metadata(&core).is_ok() {
|
||||||
tracing::debug!("Discovered sysroot by rust_lib_src_PATH: {path}");
|
tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}");
|
||||||
return Some(path);
|
return Some(path);
|
||||||
}
|
}
|
||||||
tracing::debug!(
|
tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring");
|
||||||
"rust_lib_src_PATH is set, but is invalid (no core: {core:?}), ignoring"
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
tracing::debug!("rust_lib_src_PATH is set, but is invalid, ignoring");
|
tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -958,6 +958,7 @@ fn project_json_to_crate_graph(
|
|||||||
is_proc_macro,
|
is_proc_macro,
|
||||||
repository,
|
repository,
|
||||||
is_workspace_member,
|
is_workspace_member,
|
||||||
|
proc_macro_cwd,
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
file_id,
|
file_id,
|
||||||
@ -1005,7 +1006,6 @@ fn project_json_to_crate_graph(
|
|||||||
Arc::new(cfg_options),
|
Arc::new(cfg_options),
|
||||||
None,
|
None,
|
||||||
env,
|
env,
|
||||||
*is_proc_macro,
|
|
||||||
if let Some(name) = display_name.clone() {
|
if let Some(name) = display_name.clone() {
|
||||||
CrateOrigin::Local {
|
CrateOrigin::Local {
|
||||||
repo: repository.clone(),
|
repo: repository.clone(),
|
||||||
@ -1014,6 +1014,8 @@ fn project_json_to_crate_graph(
|
|||||||
} else {
|
} else {
|
||||||
CrateOrigin::Local { repo: None, name: None }
|
CrateOrigin::Local { repo: None, name: None }
|
||||||
},
|
},
|
||||||
|
*is_proc_macro,
|
||||||
|
proc_macro_cwd.clone(),
|
||||||
);
|
);
|
||||||
debug!(
|
debug!(
|
||||||
?crate_graph_crate_id,
|
?crate_graph_crate_id,
|
||||||
@ -1283,11 +1285,12 @@ fn detached_file_to_crate_graph(
|
|||||||
cfg_options.clone(),
|
cfg_options.clone(),
|
||||||
None,
|
None,
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Local {
|
CrateOrigin::Local {
|
||||||
repo: None,
|
repo: None,
|
||||||
name: display_name.map(|n| n.canonical_name().to_owned()),
|
name: display_name.map(|n| n.canonical_name().to_owned()),
|
||||||
},
|
},
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
|
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
|
||||||
@ -1448,8 +1451,13 @@ fn add_target_crate_root(
|
|||||||
Arc::new(cfg_options),
|
Arc::new(cfg_options),
|
||||||
potential_cfg_options.map(Arc::new),
|
potential_cfg_options.map(Arc::new),
|
||||||
env,
|
env,
|
||||||
matches!(kind, TargetKind::Lib { is_proc_macro: true }),
|
|
||||||
origin,
|
origin,
|
||||||
|
matches!(kind, TargetKind::Lib { is_proc_macro: true }),
|
||||||
|
Some(if pkg.is_member {
|
||||||
|
cargo.workspace_root().to_path_buf()
|
||||||
|
} else {
|
||||||
|
pkg.manifest.parent().to_path_buf()
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
if let TargetKind::Lib { is_proc_macro: true } = kind {
|
if let TargetKind::Lib { is_proc_macro: true } = kind {
|
||||||
let proc_macro = match build_data {
|
let proc_macro = match build_data {
|
||||||
@ -1587,8 +1595,9 @@ fn sysroot_to_crate_graph(
|
|||||||
cfg_options.clone(),
|
cfg_options.clone(),
|
||||||
None,
|
None,
|
||||||
Env::default(),
|
Env::default(),
|
||||||
false,
|
|
||||||
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
|
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
Some((krate, crate_id))
|
Some((krate, crate_id))
|
||||||
})
|
})
|
||||||
|
@ -61,6 +61,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
1: CrateData {
|
1: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -132,6 +137,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
2: CrateData {
|
2: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -203,6 +213,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
3: CrateData {
|
3: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -274,6 +289,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
4: CrateData {
|
4: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -341,5 +361,10 @@
|
|||||||
name: "libc",
|
name: "libc",
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
@ -61,6 +61,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
1: CrateData {
|
1: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -132,6 +137,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
2: CrateData {
|
2: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -203,6 +213,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
3: CrateData {
|
3: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -274,6 +289,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
4: CrateData {
|
4: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -341,5 +361,10 @@
|
|||||||
name: "libc",
|
name: "libc",
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
@ -60,6 +60,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
1: CrateData {
|
1: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -130,6 +135,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
2: CrateData {
|
2: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -200,6 +210,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
3: CrateData {
|
3: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -270,6 +285,11 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$hello-world",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
4: CrateData {
|
4: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -337,5 +357,10 @@
|
|||||||
name: "libc",
|
name: "libc",
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: Some(
|
||||||
|
AbsPathBuf(
|
||||||
|
"$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
|
||||||
|
),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
@ -38,6 +38,7 @@
|
|||||||
Alloc,
|
Alloc,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
1: CrateData {
|
1: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -69,6 +70,7 @@
|
|||||||
Core,
|
Core,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
2: CrateData {
|
2: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -100,6 +102,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
3: CrateData {
|
3: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -131,6 +134,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
4: CrateData {
|
4: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -179,6 +183,7 @@
|
|||||||
ProcMacro,
|
ProcMacro,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
5: CrateData {
|
5: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -210,6 +215,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
6: CrateData {
|
6: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -306,6 +312,7 @@
|
|||||||
Std,
|
Std,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
7: CrateData {
|
7: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -337,6 +344,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
8: CrateData {
|
8: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -368,6 +376,7 @@
|
|||||||
Test,
|
Test,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
9: CrateData {
|
9: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -399,6 +408,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
10: CrateData {
|
10: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -477,6 +487,7 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
11: CrateData {
|
11: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -555,5 +566,6 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
}
|
}
|
@ -38,6 +38,7 @@
|
|||||||
Alloc,
|
Alloc,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
1: CrateData {
|
1: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -69,6 +70,7 @@
|
|||||||
Core,
|
Core,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
2: CrateData {
|
2: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -100,6 +102,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
3: CrateData {
|
3: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -131,6 +134,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
4: CrateData {
|
4: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -179,6 +183,7 @@
|
|||||||
ProcMacro,
|
ProcMacro,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
5: CrateData {
|
5: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -210,6 +215,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
6: CrateData {
|
6: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -306,6 +312,7 @@
|
|||||||
Std,
|
Std,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
7: CrateData {
|
7: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -337,6 +344,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
8: CrateData {
|
8: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -368,6 +376,7 @@
|
|||||||
Test,
|
Test,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
9: CrateData {
|
9: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -399,6 +408,7 @@
|
|||||||
Other,
|
Other,
|
||||||
),
|
),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
10: CrateData {
|
10: CrateData {
|
||||||
root_file_id: FileId(
|
root_file_id: FileId(
|
||||||
@ -474,5 +484,6 @@
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
|
proc_macro_cwd: None,
|
||||||
},
|
},
|
||||||
}
|
}
|
@ -1476,6 +1476,7 @@ impl Config {
|
|||||||
prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(),
|
prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(),
|
||||||
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
|
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
|
||||||
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
|
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
|
||||||
|
code_action_grouping: self.code_action_group(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -885,7 +885,6 @@ pub fn ws_to_crate_graph(
|
|||||||
ws_data.extend(mapping.values().copied().zip(iter::repeat(Arc::new(CrateWorkspaceData {
|
ws_data.extend(mapping.values().copied().zip(iter::repeat(Arc::new(CrateWorkspaceData {
|
||||||
toolchain: toolchain.clone(),
|
toolchain: toolchain.clone(),
|
||||||
data_layout: target_layout.clone(),
|
data_layout: target_layout.clone(),
|
||||||
proc_macro_cwd: Some(ws.workspace_root().to_owned()),
|
|
||||||
}))));
|
}))));
|
||||||
proc_macro_paths.push(crate_proc_macros);
|
proc_macro_paths.push(crate_proc_macros);
|
||||||
}
|
}
|
||||||
|
@ -211,8 +211,9 @@ impl ChangeFixture {
|
|||||||
From::from(meta.cfg.clone()),
|
From::from(meta.cfg.clone()),
|
||||||
Some(From::from(meta.cfg)),
|
Some(From::from(meta.cfg)),
|
||||||
meta.env,
|
meta.env,
|
||||||
false,
|
|
||||||
origin,
|
origin,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
let prev = crates.insert(crate_name.clone(), crate_id);
|
let prev = crates.insert(crate_name.clone(), crate_id);
|
||||||
assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
|
assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
|
||||||
@ -249,8 +250,9 @@ impl ChangeFixture {
|
|||||||
From::from(default_cfg.clone()),
|
From::from(default_cfg.clone()),
|
||||||
Some(From::from(default_cfg)),
|
Some(From::from(default_cfg)),
|
||||||
default_env,
|
default_env,
|
||||||
false,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
for (from, to, prelude) in crate_deps {
|
for (from, to, prelude) in crate_deps {
|
||||||
@ -286,8 +288,9 @@ impl ChangeFixture {
|
|||||||
String::from("__ra_is_test_fixture"),
|
String::from("__ra_is_test_fixture"),
|
||||||
String::from("__ra_is_test_fixture"),
|
String::from("__ra_is_test_fixture"),
|
||||||
)]),
|
)]),
|
||||||
false,
|
|
||||||
CrateOrigin::Lang(LangCrateOrigin::Core),
|
CrateOrigin::Lang(LangCrateOrigin::Core),
|
||||||
|
false,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
for krate in all_crates {
|
for krate in all_crates {
|
||||||
@ -333,8 +336,9 @@ impl ChangeFixture {
|
|||||||
String::from("__ra_is_test_fixture"),
|
String::from("__ra_is_test_fixture"),
|
||||||
String::from("__ra_is_test_fixture"),
|
String::from("__ra_is_test_fixture"),
|
||||||
)]),
|
)]),
|
||||||
true,
|
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
|
true,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
|
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
|
||||||
|
|
||||||
@ -362,7 +366,6 @@ impl ChangeFixture {
|
|||||||
crate_graph
|
crate_graph
|
||||||
.iter()
|
.iter()
|
||||||
.zip(iter::repeat(From::from(CrateWorkspaceData {
|
.zip(iter::repeat(From::from(CrateWorkspaceData {
|
||||||
proc_macro_cwd: None,
|
|
||||||
data_layout: target_data_layout,
|
data_layout: target_data_layout,
|
||||||
toolchain,
|
toolchain,
|
||||||
})))
|
})))
|
||||||
|
@ -34,3 +34,8 @@ use-boolean-and = true
|
|||||||
[output.html.fold]
|
[output.html.fold]
|
||||||
enable = true
|
enable = true
|
||||||
level = 3
|
level = 3
|
||||||
|
|
||||||
|
[preprocessor.toc]
|
||||||
|
command = "mdbook-toc"
|
||||||
|
renderer = ["html"]
|
||||||
|
max-level = 3
|
||||||
|
@ -14,3 +14,11 @@
|
|||||||
- [Assists (Code Actions)](assists.md)
|
- [Assists (Code Actions)](assists.md)
|
||||||
- [Diagnostics](diagnostics.md)
|
- [Diagnostics](diagnostics.md)
|
||||||
- [Editor Features](editor_features.md)
|
- [Editor Features](editor_features.md)
|
||||||
|
- [Contributing](contributing/README.md)
|
||||||
|
- [Architecture](contributing/architecture.md)
|
||||||
|
- [Debugging](contributing/debugging.md)
|
||||||
|
- [Guide](contributing/guide.md)
|
||||||
|
- [LSP Extensions](contributing/lsp-extensions.md)
|
||||||
|
- [Setup](contributing/setup.md)
|
||||||
|
- [Style](contributing/style.md)
|
||||||
|
- [Syntax](contributing/syntax.md)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -3,13 +3,13 @@
|
|||||||
**Source:**
|
**Source:**
|
||||||
[config.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs)
|
[config.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs)
|
||||||
|
|
||||||
The [Installation](#_installation) section contains details on
|
The [Installation](./installation.md) section contains details on
|
||||||
configuration for some of the editors. In general `rust-analyzer` is
|
configuration for some of the editors. In general `rust-analyzer` is
|
||||||
configured via LSP messages, which means that it’s up to the editor to
|
configured via LSP messages, which means that it’s up to the editor to
|
||||||
decide on the exact format and location of configuration files.
|
decide on the exact format and location of configuration files.
|
||||||
|
|
||||||
Some clients, such as [VS Code](#vs-code) or [COC plugin in
|
Some clients, such as [VS Code](./vs_code.md) or [COC plugin in
|
||||||
Vim](#coc-rust-analyzer) provide `rust-analyzer` specific configuration
|
Vim](./other_editors.md#coc-rust-analyzer) provide `rust-analyzer` specific configuration
|
||||||
UIs. Others may require you to know a bit more about the interaction
|
UIs. Others may require you to know a bit more about the interaction
|
||||||
with `rust-analyzer`.
|
with `rust-analyzer`.
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ $ cargo test
|
|||||||
|
|
||||||
should be enough to get you started!
|
should be enough to get you started!
|
||||||
|
|
||||||
To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md).
|
To learn more about how rust-analyzer works, see [Architecture](architecture.md).
|
||||||
It also explains the high-level layout of the source code.
|
It also explains the high-level layout of the source code.
|
||||||
Do skim through that document.
|
Do skim through that document.
|
||||||
|
|
||||||
@ -24,7 +24,9 @@ rust-analyzer is a part of the [RLS-2.0 working
|
|||||||
group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
|
group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
|
||||||
Discussion happens in this Zulip stream:
|
Discussion happens in this Zulip stream:
|
||||||
|
|
||||||
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
|
<https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer>
|
||||||
|
|
||||||
|
<!-- toc -->
|
||||||
|
|
||||||
# Issue Labels
|
# Issue Labels
|
||||||
|
|
||||||
@ -54,7 +56,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
|
|||||||
|
|
||||||
# Code Style & Review Process
|
# Code Style & Review Process
|
||||||
|
|
||||||
Do see [./style.md](./style.md).
|
See the [Style Guide](style.md).
|
||||||
|
|
||||||
# Cookbook
|
# Cookbook
|
||||||
|
|
||||||
@ -88,11 +90,13 @@ As a sanity check after I'm done, I use `cargo xtask install --server` and **Rel
|
|||||||
If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
|
If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
|
||||||
Notably, this uses the usual `rust-analyzer` binary from `PATH`.
|
Notably, this uses the usual `rust-analyzer` binary from `PATH`.
|
||||||
For this, it is important to have the following in your `settings.json` file:
|
For this, it is important to have the following in your `settings.json` file:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"rust-analyzer.server.path": "rust-analyzer"
|
"rust-analyzer.server.path": "rust-analyzer"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
|
After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
|
||||||
|
|
||||||
If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
|
If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
|
||||||
@ -117,6 +121,7 @@ cd editors/code
|
|||||||
npm ci
|
npm ci
|
||||||
npm run lint
|
npm run lint
|
||||||
```
|
```
|
||||||
|
|
||||||
## How to ...
|
## How to ...
|
||||||
|
|
||||||
* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
|
* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
|
||||||
@ -142,14 +147,15 @@ Note that `stdout` is used for the actual protocol, so `println!` will break thi
|
|||||||
To log all communication between the server and the client, there are two choices:
|
To log all communication between the server and the client, there are two choices:
|
||||||
|
|
||||||
* You can log on the server side, by running something like
|
* You can log on the server side, by running something like
|
||||||
|
|
||||||
```
|
```
|
||||||
env RA_LOG=lsp_server=debug code .
|
env RA_LOG=lsp_server=debug code .
|
||||||
```
|
```
|
||||||
|
|
||||||
* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
|
* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
|
||||||
These logs are shown in a separate tab in the output and could be used with LSP inspector.
|
These logs are shown in a separate tab in the output and could be used with LSP inspector.
|
||||||
Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
|
Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
|
||||||
|
|
||||||
|
|
||||||
There are also several VS Code commands which might be of interest:
|
There are also several VS Code commands which might be of interest:
|
||||||
|
|
||||||
* `rust-analyzer: Status` shows some memory-usage statistics.
|
* `rust-analyzer: Status` shows some memory-usage statistics.
|
@ -8,19 +8,20 @@ It goes deeper than what is covered in this document, but will take some time to
|
|||||||
|
|
||||||
See also these implementation-related blog posts:
|
See also these implementation-related blog posts:
|
||||||
|
|
||||||
* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
|
* <https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html>
|
||||||
* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
|
* <https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html>
|
||||||
* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
|
* <https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html>
|
||||||
* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
|
* <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
|
||||||
* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
|
* <https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html>
|
||||||
|
|
||||||
For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
|
For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
|
||||||
|
|
||||||
|
|
||||||
## Bird's Eye View
|
## Bird's Eye View
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
<!-- toc -->
|
||||||
|
|
||||||
On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
|
On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
|
||||||
|
|
||||||
More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
|
More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
|
||||||
@ -295,7 +296,7 @@ For this reason, all path APIs generally take some existing path as a "file syst
|
|||||||
### `crates/stdx`
|
### `crates/stdx`
|
||||||
|
|
||||||
This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
|
This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
|
||||||
as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
|
as copies of unstable std items we would like to make use of already.
|
||||||
|
|
||||||
### `crates/profile`
|
### `crates/profile`
|
||||||
|
|
@ -8,6 +8,7 @@
|
|||||||
<img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
|
<img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
|
||||||
|
|
||||||
- Install all TypeScript dependencies
|
- Install all TypeScript dependencies
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd editors/code
|
cd editors/code
|
||||||
npm ci
|
npm ci
|
||||||
@ -19,7 +20,6 @@
|
|||||||
where **only** the `rust-analyzer` extension being debugged is enabled.
|
where **only** the `rust-analyzer` extension being debugged is enabled.
|
||||||
* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
|
* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
|
||||||
|
|
||||||
|
|
||||||
## Debug TypeScript VSCode extension
|
## Debug TypeScript VSCode extension
|
||||||
|
|
||||||
- `Run Installed Extension` - runs the extension with the globally installed `rust-analyzer` binary.
|
- `Run Installed Extension` - runs the extension with the globally installed `rust-analyzer` binary.
|
||||||
@ -36,12 +36,12 @@ To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</k
|
|||||||
|
|
||||||
- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
|
- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
|
echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
|
- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
debug = 2
|
debug = 2
|
||||||
@ -58,6 +58,7 @@ To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</k
|
|||||||
- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
|
- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
|
||||||
|
|
||||||
If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
|
If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
let mut d = 4;
|
let mut d = 4;
|
||||||
while d == 4 { // set a breakpoint here and change the value
|
while d == 4 { // set a breakpoint here and change the value
|
||||||
@ -66,6 +67,7 @@ If you need to debug the server from the very beginning, including its initializ
|
|||||||
```
|
```
|
||||||
|
|
||||||
However for this to work, you will need to enable debug_assertions in your build
|
However for this to work, you will need to enable debug_assertions in your build
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
RUSTFLAGS='--cfg debug_assertions' cargo build --release
|
RUSTFLAGS='--cfg debug_assertions' cargo build --release
|
||||||
```
|
```
|
@ -12,6 +12,8 @@ https://youtu.be/ANKBNiSWyfc.
|
|||||||
[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
|
[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
|
||||||
[2024-01-01]: https://github.com/rust-lang/rust-analyzer/tree/2024-01-01
|
[2024-01-01]: https://github.com/rust-lang/rust-analyzer/tree/2024-01-01
|
||||||
|
|
||||||
|
<!-- toc -->
|
||||||
|
|
||||||
## The big picture
|
## The big picture
|
||||||
|
|
||||||
On the highest possible level, rust-analyzer is a stateful component. A client may
|
On the highest possible level, rust-analyzer is a stateful component. A client may
|
||||||
@ -152,7 +154,6 @@ the loop is the [`GlobalState::run`] function initiated by [`main_loop`] after
|
|||||||
[`GlobalState::new`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L148-L215
|
[`GlobalState::new`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L148-L215
|
||||||
[`GlobalState::run`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L114-L140
|
[`GlobalState::run`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L114-L140
|
||||||
|
|
||||||
|
|
||||||
Let's walk through a typical analyzer session!
|
Let's walk through a typical analyzer session!
|
||||||
|
|
||||||
First, we need to figure out what to analyze. To do this, we run `cargo
|
First, we need to figure out what to analyze. To do this, we run `cargo
|
@ -19,6 +19,8 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
|
|||||||
|
|
||||||
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
|
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
|
||||||
|
|
||||||
|
<!-- toc -->
|
||||||
|
|
||||||
## Configuration in `initializationOptions`
|
## Configuration in `initializationOptions`
|
||||||
|
|
||||||
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
|
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
|
@ -1,3 +1,5 @@
|
|||||||
|
# Style
|
||||||
|
|
||||||
Our approach to "clean code" is two-fold:
|
Our approach to "clean code" is two-fold:
|
||||||
|
|
||||||
* We generally don't block PRs on style changes.
|
* We generally don't block PRs on style changes.
|
||||||
@ -274,7 +276,7 @@ fn f() {
|
|||||||
Assert liberally.
|
Assert liberally.
|
||||||
Prefer [`stdx::never!`](https://docs.rs/always-assert/0.1.2/always_assert/macro.never.html) to standard `assert!`.
|
Prefer [`stdx::never!`](https://docs.rs/always-assert/0.1.2/always_assert/macro.never.html) to standard `assert!`.
|
||||||
|
|
||||||
**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#error-handling).
|
**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/architecture.md#error-handling).
|
||||||
|
|
||||||
## Getters & Setters
|
## Getters & Setters
|
||||||
|
|
@ -1,6 +1,5 @@
|
|||||||
# Editor Features
|
# Editor Features
|
||||||
|
|
||||||
|
|
||||||
## VS Code
|
## VS Code
|
||||||
|
|
||||||
### Color configurations
|
### Color configurations
|
||||||
@ -118,7 +117,7 @@ Or it is possible to specify vars more granularly:
|
|||||||
"rust-analyzer.runnables.extraEnv": [
|
"rust-analyzer.runnables.extraEnv": [
|
||||||
{
|
{
|
||||||
// "mask": null, // null mask means that this rule will be applied for all runnables
|
// "mask": null, // null mask means that this rule will be applied for all runnables
|
||||||
env: {
|
"env": {
|
||||||
"APP_ID": "1",
|
"APP_ID": "1",
|
||||||
"APP_DATA": "asdf"
|
"APP_DATA": "asdf"
|
||||||
}
|
}
|
||||||
@ -145,7 +144,7 @@ If needed, you can set different values for different platforms:
|
|||||||
"rust-analyzer.runnables.extraEnv": [
|
"rust-analyzer.runnables.extraEnv": [
|
||||||
{
|
{
|
||||||
"platform": "win32", // windows only
|
"platform": "win32", // windows only
|
||||||
env: {
|
"env": {
|
||||||
"APP_DATA": "windows specific data"
|
"APP_DATA": "windows specific data"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -6,6 +6,8 @@ Protocol](https://microsoft.github.io/language-server-protocol/).
|
|||||||
This page assumes that you have already [installed the rust-analyzer
|
This page assumes that you have already [installed the rust-analyzer
|
||||||
binary](./rust_analyzer_binary.html).
|
binary](./rust_analyzer_binary.html).
|
||||||
|
|
||||||
|
<!-- toc -->
|
||||||
|
|
||||||
## Emacs
|
## Emacs
|
||||||
|
|
||||||
To use `rust-analyzer`, you need to install and enable one of the two
|
To use `rust-analyzer`, you need to install and enable one of the two
|
||||||
|
@ -1 +1 @@
|
|||||||
273465e1f2932a30a5b56ac95859cdc86f3f33fa
|
e0be1a02626abef2878cb7f4aaef7ae409477112
|
||||||
|
@ -117,7 +117,13 @@ impl fmt::Display for Location {
|
|||||||
let path = self.file.strip_prefix(project_root()).unwrap().display().to_string();
|
let path = self.file.strip_prefix(project_root()).unwrap().display().to_string();
|
||||||
let path = path.replace('\\', "/");
|
let path = path.replace('\\', "/");
|
||||||
let name = self.file.file_name().unwrap();
|
let name = self.file.file_name().unwrap();
|
||||||
write!(f, " [{}](/{}#{}) ", name.to_str().unwrap(), path, self.line)
|
write!(
|
||||||
|
f,
|
||||||
|
" [{}](https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}) ",
|
||||||
|
name.to_str().unwrap(),
|
||||||
|
path,
|
||||||
|
self.line
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,25 +18,24 @@ use crate::{
|
|||||||
util::list_rust_files,
|
util::list_rust_files,
|
||||||
};
|
};
|
||||||
|
|
||||||
const PARSER_CRATE_ROOT: &str = "crates/parser";
|
|
||||||
const PARSER_TEST_DATA: &str = "crates/parser/test_data";
|
|
||||||
const PARSER_TEST_DATA_INLINE: &str = "crates/parser/test_data/parser/inline";
|
|
||||||
|
|
||||||
pub(crate) fn generate(check: bool) {
|
pub(crate) fn generate(check: bool) {
|
||||||
let tests = tests_from_dir(
|
let parser_crate_root = project_root().join("crates/parser");
|
||||||
&project_root().join(Path::new(&format!("{PARSER_CRATE_ROOT}/src/grammar"))),
|
let parser_test_data = parser_crate_root.join("test_data");
|
||||||
);
|
let parser_test_data_inline = parser_test_data.join("parser/inline");
|
||||||
|
|
||||||
|
let tests = tests_from_dir(&parser_crate_root.join("src/grammar"));
|
||||||
|
|
||||||
let mut some_file_was_updated = false;
|
let mut some_file_was_updated = false;
|
||||||
some_file_was_updated |=
|
some_file_was_updated |=
|
||||||
install_tests(&tests.ok, &format!("{PARSER_TEST_DATA_INLINE}/ok"), check).unwrap();
|
install_tests(&tests.ok, parser_test_data_inline.join("ok"), check).unwrap();
|
||||||
some_file_was_updated |=
|
some_file_was_updated |=
|
||||||
install_tests(&tests.err, &format!("{PARSER_TEST_DATA_INLINE}/err"), check).unwrap();
|
install_tests(&tests.err, parser_test_data_inline.join("err"), check).unwrap();
|
||||||
|
|
||||||
if some_file_was_updated {
|
if some_file_was_updated {
|
||||||
let _ = fs::File::open(format!("{PARSER_CRATE_ROOT}/src/tests.rs"))
|
let _ = fs::File::open(parser_crate_root.join("src/tests.rs"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.set_modified(SystemTime::now());
|
.set_modified(SystemTime::now());
|
||||||
|
}
|
||||||
|
|
||||||
let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
|
let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
|
||||||
let test_name = quote::format_ident!("{}", test.name);
|
let test_name = quote::format_ident!("{}", test.name);
|
||||||
@ -49,9 +48,7 @@ pub(crate) fn generate(check: bool) {
|
|||||||
quote::quote! {#test_file, crate::Edition::#edition},
|
quote::quote! {#test_file, crate::Edition::#edition},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
None => {
|
None => (quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file}),
|
||||||
(quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file})
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
quote::quote! {
|
quote::quote! {
|
||||||
#[test]
|
#[test]
|
||||||
@ -95,15 +92,13 @@ pub(crate) fn generate(check: bool) {
|
|||||||
let pretty = reformat(output.to_string());
|
let pretty = reformat(output.to_string());
|
||||||
ensure_file_contents(
|
ensure_file_contents(
|
||||||
crate::flags::CodegenType::ParserTests,
|
crate::flags::CodegenType::ParserTests,
|
||||||
format!("{PARSER_TEST_DATA}/generated/runner.rs").as_ref(),
|
parser_test_data.join("generated/runner.rs").as_ref(),
|
||||||
&pretty,
|
&pretty,
|
||||||
check,
|
check,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) -> Result<bool> {
|
fn install_tests(tests: &HashMap<String, Test>, tests_dir: PathBuf, check: bool) -> Result<bool> {
|
||||||
let tests_dir = project_root().join(into);
|
|
||||||
if !tests_dir.is_dir() {
|
if !tests_dir.is_dir() {
|
||||||
fs::create_dir_all(&tests_dir)?;
|
fs::create_dir_all(&tests_dir)?;
|
||||||
}
|
}
|
||||||
|
@ -27,8 +27,9 @@ fn check_lsp_extensions_docs(sh: &Shell) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let actual_hash = {
|
let actual_hash = {
|
||||||
let lsp_extensions_md =
|
let lsp_extensions_md = sh
|
||||||
sh.read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
|
.read_file(project_root().join("docs/book/src/contributing/lsp-extensions.md"))
|
||||||
|
.unwrap();
|
||||||
let text = lsp_extensions_md
|
let text = lsp_extensions_md
|
||||||
.lines()
|
.lines()
|
||||||
.find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
|
.find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
|
||||||
@ -185,7 +186,7 @@ Zlib OR Apache-2.0 OR MIT
|
|||||||
|
|
||||||
fn check_test_attrs(path: &Path, text: &str) {
|
fn check_test_attrs(path: &Path, text: &str) {
|
||||||
let panic_rule =
|
let panic_rule =
|
||||||
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
|
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/style.md#should_panic";
|
||||||
let need_panic: &[&str] = &[
|
let need_panic: &[&str] = &[
|
||||||
// This file.
|
// This file.
|
||||||
"slow-tests/tidy.rs",
|
"slow-tests/tidy.rs",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user