2020-04-28 11:01:51 +02:00
|
|
|
//! Syntax highlighting injections such as highlighting of documentation tests.
|
|
|
|
|
|
|
|
|
|
use std::{collections::BTreeMap, convert::TryFrom};
|
|
|
|
|
|
|
|
|
|
use hir::Semantics;
|
2020-10-24 11:07:10 +03:00
|
|
|
use ide_db::call_info::ActiveParameter;
|
2020-08-12 15:04:06 +02:00
|
|
|
use itertools::Itertools;
|
2020-08-12 18:26:51 +02:00
|
|
|
use syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2020-10-18 13:09:00 +03:00
|
|
|
use crate::{Analysis, Highlight, HighlightModifier, HighlightTag, HighlightedRange, RootDatabase};
|
2020-04-28 11:01:51 +02:00
|
|
|
|
|
|
|
|
use super::HighlightedRangeStack;
|
|
|
|
|
|
|
|
|
|
pub(super) fn highlight_injection(
|
|
|
|
|
acc: &mut HighlightedRangeStack,
|
|
|
|
|
sema: &Semantics<RootDatabase>,
|
2020-11-06 22:21:56 +01:00
|
|
|
literal: ast::String,
|
2020-04-28 11:01:51 +02:00
|
|
|
expanded: SyntaxToken,
|
|
|
|
|
) -> Option<()> {
|
|
|
|
|
let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
|
|
|
|
|
if !active_parameter.name.starts_with("ra_fixture") {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
let value = literal.value()?;
|
2021-01-07 18:21:00 +03:00
|
|
|
let marker_info = MarkerInfo::new(&*value);
|
|
|
|
|
let (analysis, tmp_file_id) = Analysis::from_single_file(marker_info.cleaned_text.clone());
|
2020-04-28 11:01:51 +02:00
|
|
|
|
|
|
|
|
if let Some(range) = literal.open_quote_text_range() {
|
|
|
|
|
acc.add(HighlightedRange {
|
|
|
|
|
range,
|
|
|
|
|
highlight: HighlightTag::StringLiteral.into(),
|
|
|
|
|
binding_hash: None,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for mut h in analysis.highlight(tmp_file_id).unwrap() {
|
2021-01-07 18:21:00 +03:00
|
|
|
let range = marker_info.map_range_up(h.range);
|
|
|
|
|
if let Some(range) = literal.map_range_up(range) {
|
|
|
|
|
h.range = range;
|
|
|
|
|
acc.add(h);
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Some(range) = literal.close_quote_text_range() {
|
|
|
|
|
acc.add(HighlightedRange {
|
|
|
|
|
range,
|
|
|
|
|
highlight: HighlightTag::StringLiteral.into(),
|
|
|
|
|
binding_hash: None,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Some(())
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-07 18:21:00 +03:00
|
|
|
/// Data to remove `$0` from string and map ranges
|
|
|
|
|
#[derive(Default, Debug)]
|
|
|
|
|
struct MarkerInfo {
|
|
|
|
|
cleaned_text: String,
|
|
|
|
|
markers: Vec<TextRange>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl MarkerInfo {
|
|
|
|
|
fn new(mut text: &str) -> Self {
|
|
|
|
|
let marker = "$0";
|
|
|
|
|
|
|
|
|
|
let mut res = MarkerInfo::default();
|
|
|
|
|
let mut offset: TextSize = 0.into();
|
|
|
|
|
while !text.is_empty() {
|
|
|
|
|
let idx = text.find(marker).unwrap_or(text.len());
|
|
|
|
|
let (chunk, next) = text.split_at(idx);
|
|
|
|
|
text = next;
|
|
|
|
|
res.cleaned_text.push_str(chunk);
|
|
|
|
|
offset += TextSize::of(chunk);
|
|
|
|
|
|
|
|
|
|
if let Some(next) = text.strip_prefix(marker) {
|
|
|
|
|
text = next;
|
|
|
|
|
|
|
|
|
|
let marker_len = TextSize::of(marker);
|
|
|
|
|
res.markers.push(TextRange::at(offset, marker_len));
|
|
|
|
|
offset += marker_len;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
res
|
|
|
|
|
}
|
|
|
|
|
fn map_range_up(&self, range: TextRange) -> TextRange {
|
|
|
|
|
TextRange::new(
|
|
|
|
|
self.map_offset_up(range.start(), true),
|
|
|
|
|
self.map_offset_up(range.end(), false),
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
fn map_offset_up(&self, mut offset: TextSize, start: bool) -> TextSize {
|
|
|
|
|
for r in &self.markers {
|
|
|
|
|
if r.start() < offset || (start && r.start() == offset) {
|
|
|
|
|
offset += r.len()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
offset
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-28 11:01:51 +02:00
|
|
|
/// Mapping from extracted documentation code to original code
|
|
|
|
|
type RangesMap = BTreeMap<TextSize, TextSize>;
|
|
|
|
|
|
2020-06-16 23:03:59 +02:00
|
|
|
const RUSTDOC_FENCE: &'static str = "```";
|
2021-01-01 17:31:32 +01:00
|
|
|
const RUSTDOC_FENCE_TOKENS: &[&'static str] = &[
|
|
|
|
|
"",
|
|
|
|
|
"rust",
|
|
|
|
|
"should_panic",
|
|
|
|
|
"ignore",
|
|
|
|
|
"no_run",
|
|
|
|
|
"compile_fail",
|
|
|
|
|
"edition2015",
|
|
|
|
|
"edition2018",
|
|
|
|
|
"edition2021",
|
|
|
|
|
];
|
2020-06-16 23:03:59 +02:00
|
|
|
|
2020-04-28 11:01:51 +02:00
|
|
|
/// Extracts Rust code from documentation comments as well as a mapping from
|
|
|
|
|
/// the extracted source code back to the original source ranges.
|
|
|
|
|
/// Lastly, a vector of new comment highlight ranges (spanning only the
|
|
|
|
|
/// comment prefix) is returned which is used in the syntax highlighting
|
|
|
|
|
/// injection to replace the previous (line-spanning) comment ranges.
|
|
|
|
|
pub(super) fn extract_doc_comments(
|
|
|
|
|
node: &SyntaxNode,
|
|
|
|
|
) -> Option<(String, RangesMap, Vec<HighlightedRange>)> {
|
|
|
|
|
// wrap the doctest into function body to get correct syntax highlighting
|
|
|
|
|
let prefix = "fn doctest() {\n";
|
|
|
|
|
let suffix = "}\n";
|
|
|
|
|
// Mapping from extracted documentation code to original code
|
|
|
|
|
let mut range_mapping: RangesMap = BTreeMap::new();
|
|
|
|
|
let mut line_start = TextSize::try_from(prefix.len()).unwrap();
|
2020-06-16 23:03:59 +02:00
|
|
|
let mut is_codeblock = false;
|
2020-04-28 11:01:51 +02:00
|
|
|
let mut is_doctest = false;
|
|
|
|
|
// Replace the original, line-spanning comment ranges by new, only comment-prefix
|
|
|
|
|
// spanning comment ranges.
|
|
|
|
|
let mut new_comments = Vec::new();
|
|
|
|
|
let doctest = node
|
|
|
|
|
.children_with_tokens()
|
|
|
|
|
.filter_map(|el| el.into_token().and_then(ast::Comment::cast))
|
|
|
|
|
.filter(|comment| comment.kind().doc.is_some())
|
|
|
|
|
.filter(|comment| {
|
2020-06-16 23:03:59 +02:00
|
|
|
if let Some(idx) = comment.text().find(RUSTDOC_FENCE) {
|
|
|
|
|
is_codeblock = !is_codeblock;
|
|
|
|
|
// Check whether code is rust by inspecting fence guards
|
|
|
|
|
let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..];
|
|
|
|
|
let is_rust =
|
|
|
|
|
guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim()));
|
|
|
|
|
is_doctest = is_codeblock && is_rust;
|
2020-04-28 11:01:51 +02:00
|
|
|
false
|
|
|
|
|
} else {
|
|
|
|
|
is_doctest
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.map(|comment| {
|
|
|
|
|
let prefix_len = comment.prefix().len();
|
|
|
|
|
let line: &str = comment.text().as_str();
|
|
|
|
|
let range = comment.syntax().text_range();
|
|
|
|
|
|
|
|
|
|
// whitespace after comment is ignored
|
|
|
|
|
let pos = if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) {
|
|
|
|
|
prefix_len + ws.len_utf8()
|
|
|
|
|
} else {
|
|
|
|
|
prefix_len
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// lines marked with `#` should be ignored in output, we skip the `#` char
|
|
|
|
|
let pos = if let Some(ws) = line.chars().nth(pos).filter(|&c| c == '#') {
|
|
|
|
|
pos + ws.len_utf8()
|
|
|
|
|
} else {
|
|
|
|
|
pos
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
range_mapping.insert(line_start, range.start() + TextSize::try_from(pos).unwrap());
|
|
|
|
|
new_comments.push(HighlightedRange {
|
|
|
|
|
range: TextRange::new(
|
|
|
|
|
range.start(),
|
|
|
|
|
range.start() + TextSize::try_from(pos).unwrap(),
|
|
|
|
|
),
|
2020-06-17 09:33:21 -04:00
|
|
|
highlight: HighlightTag::Comment | HighlightModifier::Documentation,
|
2020-04-28 11:01:51 +02:00
|
|
|
binding_hash: None,
|
|
|
|
|
});
|
|
|
|
|
line_start += range.len() - TextSize::try_from(pos).unwrap();
|
|
|
|
|
line_start += TextSize::try_from('\n'.len_utf8()).unwrap();
|
|
|
|
|
|
|
|
|
|
line[pos..].to_owned()
|
|
|
|
|
})
|
2020-08-12 15:04:06 +02:00
|
|
|
.join("\n");
|
2020-04-28 11:01:51 +02:00
|
|
|
|
|
|
|
|
if doctest.is_empty() {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let doctest = format!("{}{}{}", prefix, doctest, suffix);
|
|
|
|
|
Some((doctest, range_mapping, new_comments))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Injection of syntax highlighting of doctests.
|
|
|
|
|
pub(super) fn highlight_doc_comment(
|
|
|
|
|
text: String,
|
|
|
|
|
range_mapping: RangesMap,
|
|
|
|
|
new_comments: Vec<HighlightedRange>,
|
|
|
|
|
stack: &mut HighlightedRangeStack,
|
|
|
|
|
) {
|
|
|
|
|
let (analysis, tmp_file_id) = Analysis::from_single_file(text);
|
|
|
|
|
|
|
|
|
|
stack.push();
|
2020-06-23 09:17:53 -04:00
|
|
|
for mut h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() {
|
2020-04-28 11:01:51 +02:00
|
|
|
// Determine start offset and end offset in case of multi-line ranges
|
|
|
|
|
let mut start_offset = None;
|
|
|
|
|
let mut end_offset = None;
|
|
|
|
|
for (line_start, orig_line_start) in range_mapping.range(..h.range.end()).rev() {
|
2020-06-22 22:28:09 -04:00
|
|
|
// It's possible for orig_line_start - line_start to be negative. Add h.range.start()
|
|
|
|
|
// here and remove it from the end range after the loop below so that the values are
|
|
|
|
|
// always non-negative.
|
|
|
|
|
let offset = h.range.start() + orig_line_start - line_start;
|
2020-04-28 11:01:51 +02:00
|
|
|
if line_start <= &h.range.start() {
|
2020-06-22 22:28:09 -04:00
|
|
|
start_offset.get_or_insert(offset);
|
2020-04-28 11:01:51 +02:00
|
|
|
break;
|
|
|
|
|
} else {
|
2020-06-22 22:28:09 -04:00
|
|
|
end_offset.get_or_insert(offset);
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Some(start_offset) = start_offset {
|
|
|
|
|
h.range = TextRange::new(
|
2020-06-22 22:28:09 -04:00
|
|
|
start_offset,
|
|
|
|
|
h.range.end() + end_offset.unwrap_or(start_offset) - h.range.start(),
|
2020-04-28 11:01:51 +02:00
|
|
|
);
|
2020-06-17 09:33:21 -04:00
|
|
|
|
2020-06-18 09:37:22 -04:00
|
|
|
h.highlight |= HighlightModifier::Injected;
|
2020-04-28 11:01:51 +02:00
|
|
|
stack.add(h);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Inject the comment prefix highlight ranges
|
|
|
|
|
stack.push();
|
|
|
|
|
for comment in new_comments {
|
|
|
|
|
stack.add(comment);
|
|
|
|
|
}
|
2020-06-23 09:17:53 -04:00
|
|
|
stack.pop_and_inject(None);
|
2020-12-19 17:16:05 +03:00
|
|
|
stack.pop_and_inject(Some(Highlight::from(HighlightTag::Dummy) | HighlightModifier::Injected));
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|