Skip redundant path search in resolve_completion_edits

This commit is contained in:
Lukas Wirth 2025-01-25 12:30:20 +01:00
parent 5df0b592c1
commit ae74cc3b88
9 changed files with 27 additions and 50 deletions

View File

@ -82,8 +82,7 @@ pub struct CompletionItem {
pub ref_match: Option<(CompletionItemRefMode, TextSize)>,
/// The import data to add to completion's edits.
/// (ImportPath, LastSegment)
pub import_to_add: SmallVec<[(String, String); 1]>,
pub import_to_add: SmallVec<[String; 1]>,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
@ -570,12 +569,7 @@ impl Builder {
let import_to_add = self
.imports_to_add
.into_iter()
.filter_map(|import| {
Some((
import.import_path.display(db, self.edition).to_string(),
import.import_path.segments().last()?.display(db, self.edition).to_string(),
))
})
.map(|import| import.import_path.display(db, self.edition).to_string())
.collect();
CompletionItem {

View File

@ -10,17 +10,13 @@ mod snippet;
#[cfg(test)]
mod tests;
use ide_db::text_edit::TextEdit;
use ide_db::{
helpers::mod_path_to_ast,
imports::{
import_assets::NameToImport,
insert_use::{self, ImportScope},
},
items_locator,
imports::insert_use::{self, ImportScope},
syntax_helpers::tree_diff::diff,
text_edit::TextEdit,
FilePosition, FxHashSet, RootDatabase,
};
use syntax::ast::make;
use crate::{
completions::Completions,
@ -272,7 +268,7 @@ pub fn resolve_completion_edits(
db: &RootDatabase,
config: &CompletionConfig<'_>,
FilePosition { file_id, offset }: FilePosition,
imports: impl IntoIterator<Item = (String, String)>,
imports: impl IntoIterator<Item = String>,
) -> Option<Vec<TextEdit>> {
let _p = tracing::info_span!("resolve_completion_edits").entered();
let sema = hir::Semantics::new(db);
@ -289,27 +285,12 @@ pub fn resolve_completion_edits(
let new_ast = scope.clone_for_update();
let mut import_insert = TextEdit::builder();
let cfg = config.import_path_config(true);
imports.into_iter().for_each(|(full_import_path, imported_name)| {
let items_with_name = items_locator::items_with_name(
&sema,
current_crate,
NameToImport::exact_case_sensitive(imported_name),
items_locator::AssocSearchMode::Include,
imports.into_iter().for_each(|full_import_path| {
insert_use::insert_use(
&new_ast,
make::path_from_text_with_edition(&full_import_path, current_edition),
&config.insert_use,
);
let import = items_with_name
.filter_map(|candidate| {
current_module.find_use_path(db, candidate, config.insert_use.prefix_kind, cfg)
})
.find(|mod_path| mod_path.display(db, current_edition).to_string() == full_import_path);
if let Some(import_path) = import {
insert_use::insert_use(
&new_ast,
mod_path_to_ast(&import_path, current_edition),
&config.insert_use,
);
}
});
diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert);

View File

@ -672,7 +672,7 @@ impl Analysis {
&self,
config: &CompletionConfig<'_>,
position: FilePosition,
imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe,
imports: impl IntoIterator<Item = String> + std::panic::UnwindSafe,
) -> Cancellable<Vec<TextEdit>> {
Ok(self
.with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?

View File

@ -1154,10 +1154,7 @@ pub(crate) fn handle_completion_resolve(
.resolve_completion_edits(
&forced_resolve_completions_config,
position,
resolve_data
.imports
.into_iter()
.map(|import| (import.full_import_path, import.imported_name)),
resolve_data.imports.into_iter().map(|import| import.full_import_path),
)?
.into_iter()
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))

View File

@ -142,9 +142,8 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8;
hasher.update(prefix);
hasher.update(u32::from(*text_size).to_le_bytes());
}
for (import_path, import_name) in &item.import_to_add {
for import_path in &item.import_to_add {
hasher.update(import_path);
hasher.update(import_name);
}
hasher.finalize()
}

View File

@ -850,7 +850,6 @@ pub struct InlayHintResolveData {
#[derive(Debug, Serialize, Deserialize)]
pub struct CompletionImport {
pub full_import_path: String,
pub imported_name: String,
}
#[derive(Debug, Deserialize, Default)]

View File

@ -394,10 +394,7 @@ fn completion_item(
item.import_to_add
.clone()
.into_iter()
.map(|(import_path, import_name)| lsp_ext::CompletionImport {
full_import_path: import_path,
imported_name: import_name,
})
.map(|import_path| lsp_ext::CompletionImport { full_import_path: import_path })
.collect()
} else {
Vec::new()

View File

@ -411,6 +411,11 @@ pub fn path_from_text(text: &str) -> ast::Path {
ast_from_text(&format!("fn main() {{ let test: {text}; }}"))
}
// FIXME: should not be pub
pub fn path_from_text_with_edition(text: &str, edition: Edition) -> ast::Path {
ast_from_text_with_edition(&format!("fn main() {{ let test: {text}; }}"), edition)
}
pub fn use_tree_glob() -> ast::UseTree {
ast_from_text("use *;")
}
@ -1230,7 +1235,12 @@ pub fn token_tree(
#[track_caller]
fn ast_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text, Edition::CURRENT);
ast_from_text_with_edition(text, Edition::CURRENT)
}
#[track_caller]
fn ast_from_text_with_edition<N: AstNode>(text: &str, edition: Edition) -> N {
let parse = SourceFile::parse(text, edition);
let node = match parse.tree().syntax().descendants().find_map(N::cast) {
Some(it) => it,
None => {

View File

@ -1,5 +1,5 @@
<!---
lsp/ext.rs hash: 2d8604825c458288
lsp/ext.rs hash: af70cce5d6905e39
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue: