mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
Merge pull request #19494 from Veykril/push-uzmzppouxuvr
chore: Remove unnecessary `Arc` clones
This commit is contained in:
commit
c7845a6d70
@ -64,8 +64,7 @@ impl Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
|
pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
|
||||||
let files = Arc::clone(&self.files);
|
match self.files.entry(file_id) {
|
||||||
match files.entry(file_id) {
|
|
||||||
Entry::Occupied(mut occupied) => {
|
Entry::Occupied(mut occupied) => {
|
||||||
occupied.get_mut().set_text(db).to(Arc::from(text));
|
occupied.get_mut().set_text(db).to(Arc::from(text));
|
||||||
}
|
}
|
||||||
@ -83,8 +82,7 @@ impl Files {
|
|||||||
text: &str,
|
text: &str,
|
||||||
durability: Durability,
|
durability: Durability,
|
||||||
) {
|
) {
|
||||||
let files = Arc::clone(&self.files);
|
match self.files.entry(file_id) {
|
||||||
match files.entry(file_id) {
|
|
||||||
Entry::Occupied(mut occupied) => {
|
Entry::Occupied(mut occupied) => {
|
||||||
occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text));
|
occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text));
|
||||||
}
|
}
|
||||||
@ -113,8 +111,7 @@ impl Files {
|
|||||||
source_root: Arc<SourceRoot>,
|
source_root: Arc<SourceRoot>,
|
||||||
durability: Durability,
|
durability: Durability,
|
||||||
) {
|
) {
|
||||||
let source_roots = Arc::clone(&self.source_roots);
|
match self.source_roots.entry(source_root_id) {
|
||||||
match source_roots.entry(source_root_id) {
|
|
||||||
Entry::Occupied(mut occupied) => {
|
Entry::Occupied(mut occupied) => {
|
||||||
occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root);
|
occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root);
|
||||||
}
|
}
|
||||||
@ -141,9 +138,7 @@ impl Files {
|
|||||||
source_root_id: SourceRootId,
|
source_root_id: SourceRootId,
|
||||||
durability: Durability,
|
durability: Durability,
|
||||||
) {
|
) {
|
||||||
let file_source_roots = Arc::clone(&self.file_source_roots);
|
match self.file_source_roots.entry(id) {
|
||||||
// let db = self;
|
|
||||||
match file_source_roots.entry(id) {
|
|
||||||
Entry::Occupied(mut occupied) => {
|
Entry::Occupied(mut occupied) => {
|
||||||
occupied
|
occupied
|
||||||
.get_mut()
|
.get_mut()
|
||||||
@ -203,7 +198,8 @@ pub trait RootQueryDb: SourceDatabase + salsa::Database {
|
|||||||
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
|
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
|
||||||
|
|
||||||
/// Returns the set of errors obtained from parsing the file including validation errors.
|
/// Returns the set of errors obtained from parsing the file including validation errors.
|
||||||
fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
|
#[salsa::transparent]
|
||||||
|
fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>;
|
||||||
|
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
|
fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
|
||||||
@ -318,12 +314,16 @@ fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFil
|
|||||||
ast::SourceFile::parse(&text, edition)
|
ast::SourceFile::parse(&text, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
|
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
|
||||||
let errors = db.parse(file_id).errors();
|
#[salsa::tracked(return_ref)]
|
||||||
match &*errors {
|
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
|
||||||
[] => None,
|
let errors = db.parse(file_id).errors();
|
||||||
[..] => Some(errors.into()),
|
match &*errors {
|
||||||
|
[] => None,
|
||||||
|
[..] => Some(errors.into()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
parse_errors(db, file_id).as_ref().map(|it| &**it)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
|
fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
|
||||||
|
@ -1041,19 +1041,6 @@ fn test() -> String {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn closure_mismatch_show_different_type() {
|
|
||||||
check_diagnostics(
|
|
||||||
r#"
|
|
||||||
fn f() {
|
|
||||||
let mut x = (|| 1, 2);
|
|
||||||
x = (|| 3, 4);
|
|
||||||
//^^^^ error: expected {closure#23552}, found {closure#23553}
|
|
||||||
}
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn type_mismatch_range_adjustment() {
|
fn type_mismatch_range_adjustment() {
|
||||||
cov_mark::check!(type_mismatch_range_adjustment);
|
cov_mark::check!(type_mismatch_range_adjustment);
|
||||||
|
@ -332,7 +332,6 @@ pub fn syntax_diagnostics(
|
|||||||
|
|
||||||
// [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
|
// [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
|
||||||
db.parse_errors(editioned_file_id_wrapper)
|
db.parse_errors(editioned_file_id_wrapper)
|
||||||
.as_deref()
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.take(128)
|
.take(128)
|
||||||
@ -409,8 +408,7 @@ pub fn semantic_diagnostics(
|
|||||||
// A bunch of parse errors in a file indicate some bigger structural parse changes in the
|
// A bunch of parse errors in a file indicate some bigger structural parse changes in the
|
||||||
// file, so we skip semantic diagnostics so we can show these faster.
|
// file, so we skip semantic diagnostics so we can show these faster.
|
||||||
Some(m) => {
|
Some(m) => {
|
||||||
if db.parse_errors(editioned_file_id_wrapper).as_deref().is_none_or(|es| es.len() < 16)
|
if db.parse_errors(editioned_file_id_wrapper).is_none_or(|es| es.len() < 16) {
|
||||||
{
|
|
||||||
m.diagnostics(db, &mut diags, config.style_lints);
|
m.diagnostics(db, &mut diags, config.style_lints);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -855,28 +855,6 @@ fn main() {
|
|||||||
//^ |i32| -> ()
|
//^ |i32| -> ()
|
||||||
let p = (y, z);
|
let p = (y, z);
|
||||||
//^ (|i32| -> i32, |i32| -> ())
|
//^ (|i32| -> i32, |i32| -> ())
|
||||||
}
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
check_with_config(
|
|
||||||
InlayHintsConfig {
|
|
||||||
type_hints: true,
|
|
||||||
closure_style: ClosureStyle::ClosureWithId,
|
|
||||||
..DISABLED_CONFIG
|
|
||||||
},
|
|
||||||
r#"
|
|
||||||
//- minicore: fn
|
|
||||||
fn main() {
|
|
||||||
let x = || 2;
|
|
||||||
//^ {closure#25600}
|
|
||||||
let y = |t: i32| x() + t;
|
|
||||||
//^ {closure#25601}
|
|
||||||
let mut t = 5;
|
|
||||||
//^ i32
|
|
||||||
let z = |k: i32| { t += k; };
|
|
||||||
//^ {closure#25602}
|
|
||||||
let p = (y, z);
|
|
||||||
//^ ({closure#25601}, {closure#25602})
|
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user