mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
Implement utf8 offsets
This commit is contained in:
parent
0025836f26
commit
c8b9ec8e62
@ -1,13 +1,16 @@
|
|||||||
//! Conversion lsp_types types to rust-analyzer specific ones.
|
//! Conversion lsp_types types to rust-analyzer specific ones.
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
|
||||||
use ide::{Annotation, AnnotationKind, AssistKind, LineColUtf16};
|
use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16};
|
||||||
use ide_db::base_db::{FileId, FilePosition, FileRange};
|
use ide_db::base_db::{FileId, FilePosition, FileRange};
|
||||||
use syntax::{TextRange, TextSize};
|
use syntax::{TextRange, TextSize};
|
||||||
use vfs::AbsPathBuf;
|
use vfs::AbsPathBuf;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
from_json, global_state::GlobalStateSnapshot, line_endings::LineIndex, lsp_ext, Result,
|
from_json,
|
||||||
|
global_state::GlobalStateSnapshot,
|
||||||
|
line_endings::{LineIndex, OffsetEncoding},
|
||||||
|
lsp_ext, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
|
pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
|
||||||
@ -20,8 +23,16 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
|
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
|
||||||
let line_col = LineColUtf16 { line: position.line as u32, col: position.character as u32 };
|
let line_col = match line_index.encoding {
|
||||||
let line_col = line_index.index.to_utf8(line_col);
|
OffsetEncoding::Utf8 => {
|
||||||
|
LineCol { line: position.line as u32, col: position.character as u32 }
|
||||||
|
}
|
||||||
|
OffsetEncoding::Utf16 => {
|
||||||
|
let line_col =
|
||||||
|
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
|
||||||
|
line_index.index.to_utf8(line_col)
|
||||||
|
}
|
||||||
|
};
|
||||||
line_index.index.offset(line_col)
|
line_index.index.offset(line_col)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ use crate::{
|
|||||||
diagnostics::{CheckFixes, DiagnosticCollection},
|
diagnostics::{CheckFixes, DiagnosticCollection},
|
||||||
document::DocumentData,
|
document::DocumentData,
|
||||||
from_proto,
|
from_proto,
|
||||||
line_endings::{LineEndings, LineIndex},
|
line_endings::{LineEndings, LineIndex, OffsetEncoding},
|
||||||
main_loop::Task,
|
main_loop::Task,
|
||||||
op_queue::OpQueue,
|
op_queue::OpQueue,
|
||||||
reload::SourceRootConfig,
|
reload::SourceRootConfig,
|
||||||
@ -274,7 +274,7 @@ impl GlobalStateSnapshot {
|
|||||||
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancelable<LineIndex> {
|
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancelable<LineIndex> {
|
||||||
let endings = self.vfs.read().1[&file_id];
|
let endings = self.vfs.read().1[&file_id];
|
||||||
let index = self.analysis.file_line_index(file_id)?;
|
let index = self.analysis.file_line_index(file_id)?;
|
||||||
let res = LineIndex { index, endings };
|
let res = LineIndex { index, endings, encoding: OffsetEncoding::Utf16 };
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,9 +4,16 @@
|
|||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
pub(crate) enum OffsetEncoding {
|
||||||
|
#[allow(unused)]
|
||||||
|
Utf8,
|
||||||
|
Utf16,
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) struct LineIndex {
|
pub(crate) struct LineIndex {
|
||||||
pub(crate) index: Arc<ide::LineIndex>,
|
pub(crate) index: Arc<ide::LineIndex>,
|
||||||
pub(crate) endings: LineEndings,
|
pub(crate) endings: LineEndings,
|
||||||
|
pub(crate) encoding: OffsetEncoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -7,7 +7,7 @@ use lsp_server::Notification;
|
|||||||
use crate::{
|
use crate::{
|
||||||
from_proto,
|
from_proto,
|
||||||
global_state::GlobalState,
|
global_state::GlobalState,
|
||||||
line_endings::{LineEndings, LineIndex},
|
line_endings::{LineEndings, LineIndex, OffsetEncoding},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn is_canceled(e: &(dyn Error + 'static)) -> bool {
|
pub(crate) fn is_canceled(e: &(dyn Error + 'static)) -> bool {
|
||||||
@ -95,8 +95,9 @@ pub(crate) fn apply_document_changes(
|
|||||||
) {
|
) {
|
||||||
let mut line_index = LineIndex {
|
let mut line_index = LineIndex {
|
||||||
index: Arc::new(ide::LineIndex::new(old_text)),
|
index: Arc::new(ide::LineIndex::new(old_text)),
|
||||||
// We don't care about line endings here.
|
// We don't care about line endings or offset encoding here.
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
|
encoding: OffsetEncoding::Utf16,
|
||||||
};
|
};
|
||||||
|
|
||||||
// The changes we got must be applied sequentially, but can cross lines so we
|
// The changes we got must be applied sequentially, but can cross lines so we
|
||||||
|
@ -17,15 +17,20 @@ use serde_json::to_value;
|
|||||||
use crate::{
|
use crate::{
|
||||||
cargo_target_spec::CargoTargetSpec,
|
cargo_target_spec::CargoTargetSpec,
|
||||||
global_state::GlobalStateSnapshot,
|
global_state::GlobalStateSnapshot,
|
||||||
line_endings::{LineEndings, LineIndex},
|
line_endings::{LineEndings, LineIndex, OffsetEncoding},
|
||||||
lsp_ext, semantic_tokens, Result,
|
lsp_ext, semantic_tokens, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||||
let line_col = line_index.index.line_col(offset);
|
let line_col = line_index.index.line_col(offset);
|
||||||
|
match line_index.encoding {
|
||||||
|
OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
|
||||||
|
OffsetEncoding::Utf16 => {
|
||||||
let line_col = line_index.index.to_utf16(line_col);
|
let line_col = line_index.index.to_utf16(line_col);
|
||||||
lsp_types::Position::new(line_col.line, line_col.col)
|
lsp_types::Position::new(line_col.line, line_col.col)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
|
pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
|
||||||
let start = position(line_index, range.start());
|
let start = position(line_index, range.start());
|
||||||
@ -1068,8 +1073,11 @@ mod tests {
|
|||||||
}"#;
|
}"#;
|
||||||
|
|
||||||
let (offset, text) = test_utils::extract_offset(fixture);
|
let (offset, text) = test_utils::extract_offset(fixture);
|
||||||
let line_index =
|
let line_index = LineIndex {
|
||||||
LineIndex { index: Arc::new(ide::LineIndex::new(&text)), endings: LineEndings::Unix };
|
index: Arc::new(ide::LineIndex::new(&text)),
|
||||||
|
endings: LineEndings::Unix,
|
||||||
|
encoding: OffsetEncoding::Utf16,
|
||||||
|
};
|
||||||
let (analysis, file_id) = Analysis::from_single_file(text);
|
let (analysis, file_id) = Analysis::from_single_file(text);
|
||||||
let completions: Vec<(String, Option<String>)> = analysis
|
let completions: Vec<(String, Option<String>)> = analysis
|
||||||
.completions(
|
.completions(
|
||||||
@ -1125,8 +1133,11 @@ fn main() {
|
|||||||
let folds = analysis.folding_ranges(file_id).unwrap();
|
let folds = analysis.folding_ranges(file_id).unwrap();
|
||||||
assert_eq!(folds.len(), 4);
|
assert_eq!(folds.len(), 4);
|
||||||
|
|
||||||
let line_index =
|
let line_index = LineIndex {
|
||||||
LineIndex { index: Arc::new(ide::LineIndex::new(&text)), endings: LineEndings::Unix };
|
index: Arc::new(ide::LineIndex::new(&text)),
|
||||||
|
endings: LineEndings::Unix,
|
||||||
|
encoding: OffsetEncoding::Utf16,
|
||||||
|
};
|
||||||
let converted: Vec<lsp_types::FoldingRange> =
|
let converted: Vec<lsp_types::FoldingRange> =
|
||||||
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
|
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user