8795: Allow semantic tokens for strings to be disabled r=matklad a=djrenren

Fixes https://github.com/rust-analyzer/rust-analyzer/issues/7111

Pretty straightforward change, but open to any suggestions if there's a more recommended testing strategy than what I went with.

Co-authored-by: John Renner <john@jrenner.net>
This commit is contained in:
bors[bot]
2021-05-17 14:41:56 +00:00
committed by GitHub
6 changed files with 78 additions and 10 deletions

View File

@@ -208,6 +208,13 @@ config_data! {
/// Advanced option, fully override the command rust-analyzer uses for
/// formatting.
rustfmt_overrideCommand: Option<Vec<String>> = "null",
/// Use semantic tokens for strings.
///
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
/// By disabling semantic tokens for strings, other grammars can be used to highlight
/// their contents.
semanticStringTokens: bool = "true",
}
}
@@ -381,6 +388,9 @@ impl Config {
pub fn line_folding_only(&self) -> bool {
try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false)
}
pub fn semantic_strings(&self) -> bool {
self.data.semanticStringTokens
}
pub fn hierarchical_symbols(&self) -> bool {
try_or!(
self.caps

View File

@@ -1394,7 +1394,9 @@ pub(crate) fn handle_semantic_tokens_full(
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
// Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@@ -1413,8 +1415,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default();
@@ -1443,7 +1446,9 @@ pub(crate) fn handle_semantic_tokens_range(
let line_index = snap.file_line_index(frange.file_id)?;
let highlights = snap.analysis.highlight_range(frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
Ok(Some(semantic_tokens.into()))
}

View File

@@ -381,6 +381,7 @@ pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HlRange>,
include_strings: bool,
) -> lsp_types::SemanticTokens {
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@@ -389,8 +390,11 @@ pub(crate) fn semantic_tokens(
if highlight_range.highlight.is_empty() {
continue;
}
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
let token_index = semantic_tokens::type_index(type_);
let (typ, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if !include_strings && typ == lsp_types::SemanticTokenType::STRING {
continue;
}
let token_index = semantic_tokens::type_index(typ);
let modifier_bitset = mods.0;
for mut text_range in line_index.index.lines(highlight_range.range) {