Make it easy to add additional context for offset conversion
This commit is contained in:
@@ -7,22 +7,23 @@ use std::{
|
||||
use ide::{
|
||||
Annotation, AnnotationKind, Assist, AssistKind, CallInfo, CompletionItem, CompletionItemKind,
|
||||
Documentation, FileId, FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlPunct,
|
||||
HlRange, HlTag, Indel, InlayHint, InlayKind, InsertTextFormat, LineIndex, Markup,
|
||||
NavigationTarget, ReferenceAccess, RenameError, Runnable, Severity, SourceChange, TextEdit,
|
||||
TextRange, TextSize,
|
||||
HlRange, HlTag, Indel, InlayHint, InlayKind, InsertTextFormat, Markup, NavigationTarget,
|
||||
ReferenceAccess, RenameError, Runnable, Severity, SourceChange, TextEdit, TextRange, TextSize,
|
||||
};
|
||||
use ide_db::SymbolKind;
|
||||
use itertools::Itertools;
|
||||
use serde_json::to_value;
|
||||
|
||||
use crate::{
|
||||
cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot,
|
||||
line_endings::LineEndings, lsp_ext, semantic_tokens, Result,
|
||||
cargo_target_spec::CargoTargetSpec,
|
||||
global_state::GlobalStateSnapshot,
|
||||
line_endings::{LineEndings, LineIndex},
|
||||
lsp_ext, semantic_tokens, Result,
|
||||
};
|
||||
|
||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||
let line_col = line_index.line_col(offset);
|
||||
let line_col = line_index.to_utf16(line_col);
|
||||
let line_col = line_index.index.line_col(offset);
|
||||
let line_col = line_index.index.to_utf16(line_col);
|
||||
lsp_types::Position::new(line_col.line, line_col.col)
|
||||
}
|
||||
|
||||
@@ -123,13 +124,9 @@ pub(crate) fn completion_item_kind(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn text_edit(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
indel: Indel,
|
||||
) -> lsp_types::TextEdit {
|
||||
pub(crate) fn text_edit(line_index: &LineIndex, indel: Indel) -> lsp_types::TextEdit {
|
||||
let range = range(line_index, indel.delete);
|
||||
let new_text = match line_endings {
|
||||
let new_text = match line_index.endings {
|
||||
LineEndings::Unix => indel.insert,
|
||||
LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
|
||||
};
|
||||
@@ -138,11 +135,10 @@ pub(crate) fn text_edit(
|
||||
|
||||
pub(crate) fn snippet_text_edit(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
is_snippet: bool,
|
||||
indel: Indel,
|
||||
) -> lsp_ext::SnippetTextEdit {
|
||||
let text_edit = text_edit(line_index, line_endings, indel);
|
||||
let text_edit = text_edit(line_index, indel);
|
||||
let insert_text_format =
|
||||
if is_snippet { Some(lsp_types::InsertTextFormat::Snippet) } else { None };
|
||||
lsp_ext::SnippetTextEdit {
|
||||
@@ -154,27 +150,24 @@ pub(crate) fn snippet_text_edit(
|
||||
|
||||
pub(crate) fn text_edit_vec(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
text_edit: TextEdit,
|
||||
) -> Vec<lsp_types::TextEdit> {
|
||||
text_edit.into_iter().map(|indel| self::text_edit(line_index, line_endings, indel)).collect()
|
||||
text_edit.into_iter().map(|indel| self::text_edit(line_index, indel)).collect()
|
||||
}
|
||||
|
||||
pub(crate) fn snippet_text_edit_vec(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
is_snippet: bool,
|
||||
text_edit: TextEdit,
|
||||
) -> Vec<lsp_ext::SnippetTextEdit> {
|
||||
text_edit
|
||||
.into_iter()
|
||||
.map(|indel| self::snippet_text_edit(line_index, line_endings, is_snippet, indel))
|
||||
.map(|indel| self::snippet_text_edit(line_index, is_snippet, indel))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn completion_item(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
completion_item: CompletionItem,
|
||||
) -> Vec<lsp_types::CompletionItem> {
|
||||
fn set_score(res: &mut lsp_types::CompletionItem, label: &str) {
|
||||
@@ -191,19 +184,19 @@ pub(crate) fn completion_item(
|
||||
for indel in completion_item.text_edit().iter() {
|
||||
if indel.delete.contains_range(source_range) {
|
||||
text_edit = Some(if indel.delete == source_range {
|
||||
self::text_edit(line_index, line_endings, indel.clone())
|
||||
self::text_edit(line_index, indel.clone())
|
||||
} else {
|
||||
assert!(source_range.end() == indel.delete.end());
|
||||
let range1 = TextRange::new(indel.delete.start(), source_range.start());
|
||||
let range2 = source_range;
|
||||
let indel1 = Indel::replace(range1, String::new());
|
||||
let indel2 = Indel::replace(range2, indel.insert.clone());
|
||||
additional_text_edits.push(self::text_edit(line_index, line_endings, indel1));
|
||||
self::text_edit(line_index, line_endings, indel2)
|
||||
additional_text_edits.push(self::text_edit(line_index, indel1));
|
||||
self::text_edit(line_index, indel2)
|
||||
})
|
||||
} else {
|
||||
assert!(source_range.intersect(indel.delete).is_none());
|
||||
let text_edit = self::text_edit(line_index, line_endings, indel.clone());
|
||||
let text_edit = self::text_edit(line_index, indel.clone());
|
||||
additional_text_edits.push(text_edit);
|
||||
}
|
||||
}
|
||||
@@ -359,7 +352,7 @@ pub(crate) fn semantic_tokens(
|
||||
let token_index = semantic_tokens::type_index(type_);
|
||||
let modifier_bitset = mods.0;
|
||||
|
||||
for mut text_range in line_index.lines(highlight_range.range) {
|
||||
for mut text_range in line_index.index.lines(highlight_range.range) {
|
||||
if text[text_range].ends_with('\n') {
|
||||
text_range =
|
||||
TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
|
||||
@@ -566,7 +559,7 @@ pub(crate) fn location(
|
||||
frange: FileRange,
|
||||
) -> Result<lsp_types::Location> {
|
||||
let url = url(snap, frange.file_id);
|
||||
let line_index = snap.analysis.file_line_index(frange.file_id)?;
|
||||
let line_index = snap.file_line_index(frange.file_id)?;
|
||||
let range = range(&line_index, frange.range);
|
||||
let loc = lsp_types::Location::new(url, range);
|
||||
Ok(loc)
|
||||
@@ -578,7 +571,7 @@ pub(crate) fn location_from_nav(
|
||||
nav: NavigationTarget,
|
||||
) -> Result<lsp_types::Location> {
|
||||
let url = url(snap, nav.file_id);
|
||||
let line_index = snap.analysis.file_line_index(nav.file_id)?;
|
||||
let line_index = snap.file_line_index(nav.file_id)?;
|
||||
let range = range(&line_index, nav.full_range);
|
||||
let loc = lsp_types::Location::new(url, range);
|
||||
Ok(loc)
|
||||
@@ -591,7 +584,7 @@ pub(crate) fn location_link(
|
||||
) -> Result<lsp_types::LocationLink> {
|
||||
let origin_selection_range = match src {
|
||||
Some(src) => {
|
||||
let line_index = snap.analysis.file_line_index(src.file_id)?;
|
||||
let line_index = snap.file_line_index(src.file_id)?;
|
||||
let range = range(&line_index, src.range);
|
||||
Some(range)
|
||||
}
|
||||
@@ -611,7 +604,7 @@ fn location_info(
|
||||
snap: &GlobalStateSnapshot,
|
||||
target: NavigationTarget,
|
||||
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
|
||||
let line_index = snap.analysis.file_line_index(target.file_id)?;
|
||||
let line_index = snap.file_line_index(target.file_id)?;
|
||||
|
||||
let target_uri = url(snap, target.file_id);
|
||||
let target_range = range(&line_index, target.full_range);
|
||||
@@ -649,12 +642,8 @@ pub(crate) fn snippet_text_document_edit(
|
||||
edit: TextEdit,
|
||||
) -> Result<lsp_ext::SnippetTextDocumentEdit> {
|
||||
let text_document = optional_versioned_text_document_identifier(snap, file_id);
|
||||
let line_index = snap.analysis.file_line_index(file_id)?;
|
||||
let line_endings = snap.file_line_endings(file_id);
|
||||
let edits = edit
|
||||
.into_iter()
|
||||
.map(|it| snippet_text_edit(&line_index, line_endings, is_snippet, it))
|
||||
.collect();
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let edits = edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
|
||||
Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
|
||||
}
|
||||
|
||||
@@ -675,9 +664,8 @@ pub(crate) fn snippet_text_document_ops(
|
||||
if !initial_contents.is_empty() {
|
||||
let text_document =
|
||||
lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
|
||||
let range = range(&LineIndex::new(""), TextRange::empty(TextSize::from(0)));
|
||||
let text_edit = lsp_ext::SnippetTextEdit {
|
||||
range,
|
||||
range: lsp_types::Range::default(),
|
||||
new_text: initial_contents,
|
||||
insert_text_format: Some(lsp_types::InsertTextFormat::PlainText),
|
||||
};
|
||||
@@ -868,7 +856,7 @@ pub(crate) fn code_lens(
|
||||
) -> Result<lsp_types::CodeLens> {
|
||||
match annotation.kind {
|
||||
AnnotationKind::Runnable { debug, runnable: run } => {
|
||||
let line_index = snap.analysis.file_line_index(run.nav.file_id)?;
|
||||
let line_index = snap.file_line_index(run.nav.file_id)?;
|
||||
let annotation_range = range(&line_index, annotation.range);
|
||||
|
||||
let action = run.action();
|
||||
@@ -884,7 +872,7 @@ pub(crate) fn code_lens(
|
||||
Ok(lsp_types::CodeLens { range: annotation_range, command: Some(command), data: None })
|
||||
}
|
||||
AnnotationKind::HasImpls { position: file_position, data } => {
|
||||
let line_index = snap.analysis.file_line_index(file_position.file_id)?;
|
||||
let line_index = snap.file_line_index(file_position.file_id)?;
|
||||
let annotation_range = range(&line_index, annotation.range);
|
||||
let url = url(snap, file_position.file_id);
|
||||
|
||||
@@ -927,7 +915,7 @@ pub(crate) fn code_lens(
|
||||
})
|
||||
}
|
||||
AnnotationKind::HasReferences { position: file_position, data } => {
|
||||
let line_index = snap.analysis.file_line_index(file_position.file_id)?;
|
||||
let line_index = snap.file_line_index(file_position.file_id)?;
|
||||
let annotation_range = range(&line_index, annotation.range);
|
||||
let url = url(snap, file_position.file_id);
|
||||
|
||||
@@ -1061,6 +1049,8 @@ pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir::PrefixKind;
|
||||
use ide::Analysis;
|
||||
use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap};
|
||||
@@ -1078,7 +1068,8 @@ mod tests {
|
||||
}"#;
|
||||
|
||||
let (offset, text) = test_utils::extract_offset(fixture);
|
||||
let line_index = LineIndex::new(&text);
|
||||
let line_index =
|
||||
LineIndex { index: Arc::new(ide::LineIndex::new(&text)), endings: LineEndings::Unix };
|
||||
let (analysis, file_id) = Analysis::from_single_file(text);
|
||||
let completions: Vec<(String, Option<String>)> = analysis
|
||||
.completions(
|
||||
@@ -1096,7 +1087,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.filter(|c| c.label().ends_with("arg"))
|
||||
.map(|c| completion_item(&line_index, LineEndings::Unix, c))
|
||||
.map(|c| completion_item(&line_index, c))
|
||||
.flat_map(|comps| comps.into_iter().map(|c| (c.label, c.sort_text)))
|
||||
.collect();
|
||||
expect_test::expect![[r#"
|
||||
@@ -1134,7 +1125,8 @@ fn main() {
|
||||
let folds = analysis.folding_ranges(file_id).unwrap();
|
||||
assert_eq!(folds.len(), 4);
|
||||
|
||||
let line_index = LineIndex::new(&text);
|
||||
let line_index =
|
||||
LineIndex { index: Arc::new(ide::LineIndex::new(&text)), endings: LineEndings::Unix };
|
||||
let converted: Vec<lsp_types::FoldingRange> =
|
||||
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user