From 92dac67d0cf9036557249aa8801b0dfed1fd0320 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 9 Jan 2021 17:31:22 +0300 Subject: Simplify --- crates/ide/src/syntax_highlighting.rs | 8 +- crates/ide/src/syntax_highlighting/inject.rs | 158 ++++++++++++++++++++ crates/ide/src/syntax_highlighting/injection.rs | 190 ------------------------ 3 files changed, 161 insertions(+), 195 deletions(-) create mode 100644 crates/ide/src/syntax_highlighting/inject.rs delete mode 100644 crates/ide/src/syntax_highlighting/injection.rs diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index ad456bc00..079248511 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -4,7 +4,7 @@ mod highlights; mod injector; mod format; -mod injection; +mod inject; mod macro_rules; mod html; @@ -135,9 +135,7 @@ pub(crate) fn highlight( if ast::Attr::can_cast(node.kind()) { inside_attribute = false } - if let Some((new_comments, inj)) = injection::extract_doc_comments(node) { - injection::highlight_doc_comment(new_comments, inj, &mut hl); - } + inject::doc_comment(&mut hl, node); } WalkEvent::Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => { inside_attribute = true @@ -181,7 +179,7 @@ pub(crate) fn highlight( if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) { if token.is_raw() { let expanded = element_to_highlight.as_token().unwrap().clone(); - if injection::highlight_injection(&mut hl, &sema, token, expanded).is_some() { + if inject::ra_fixture(&mut hl, &sema, token, expanded).is_some() { continue; } } diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs new file mode 100644 index 000000000..4647a72c2 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/inject.rs @@ -0,0 +1,158 @@ +//! Syntax highlighting injections such as highlighting of documentation tests. + +use hir::Semantics; +use ide_db::call_info::ActiveParameter; +use syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; + +use crate::{Analysis, HlMod, HlRange, HlTag, RootDatabase}; + +use super::{highlights::Highlights, injector::Injector}; + +pub(super) fn ra_fixture( + hl: &mut Highlights, + sema: &Semantics, + literal: ast::String, + expanded: SyntaxToken, +) -> Option<()> { + let active_parameter = ActiveParameter::at_token(&sema, expanded)?; + if !active_parameter.name.starts_with("ra_fixture") { + return None; + } + let value = literal.value()?; + + if let Some(range) = literal.open_quote_text_range() { + hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None }) + } + + let mut inj = Injector::default(); + + let mut text = &*value; + let mut offset: TextSize = 0.into(); + + while !text.is_empty() { + let marker = "$0"; + let idx = text.find(marker).unwrap_or(text.len()); + let (chunk, next) = text.split_at(idx); + inj.add(chunk, TextRange::at(offset, TextSize::of(chunk))); + + text = next; + offset += TextSize::of(chunk); + + if let Some(next) = text.strip_prefix(marker) { + if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) { + hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None }); + } + + text = next; + + let marker_len = TextSize::of(marker); + offset += marker_len; + } + } + + let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string()); + + for mut hl_range in analysis.highlight(tmp_file_id).unwrap() { + for range in inj.map_range_up(hl_range.range) { + if let Some(range) = literal.map_range_up(range) { + hl_range.range = range; + hl.add(hl_range.clone()); + } + } + } + + if let Some(range) = literal.close_quote_text_range() { + hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None }) + } + + Some(()) +} + +const RUSTDOC_FENCE: &'static str = "```"; +const RUSTDOC_FENCE_TOKENS: &[&'static str] = &[ + "", + "rust", + "should_panic", + "ignore", + "no_run", + "compile_fail", + "edition2015", + "edition2018", + "edition2021", +]; + +/// Injection of syntax highlighting of doctests. +pub(super) fn doc_comment(hl: &mut Highlights, node: &SyntaxNode) { + let doc_comments = node + .children_with_tokens() + .filter_map(|it| it.into_token().and_then(ast::Comment::cast)) + .filter(|it| it.kind().doc.is_some()); + + if !doc_comments.clone().any(|it| it.text().contains(RUSTDOC_FENCE)) { + return; + } + + let mut inj = Injector::default(); + inj.add_unmapped("fn doctest() {\n"); + + let mut is_codeblock = false; + let mut is_doctest = false; + + // Replace the original, line-spanning comment ranges by new, only comment-prefix + // spanning comment ranges. + let mut new_comments = Vec::new(); + for comment in doc_comments { + match comment.text().find(RUSTDOC_FENCE) { + Some(idx) => { + is_codeblock = !is_codeblock; + // Check whether code is rust by inspecting fence guards + let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..]; + let is_rust = + guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim())); + is_doctest = is_codeblock && is_rust; + continue; + } + None if !is_doctest => continue, + None => (), + } + + let line: &str = comment.text().as_str(); + let range = comment.syntax().text_range(); + + let mut pos = TextSize::of(comment.prefix()); + // whitespace after comment is ignored + if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) { + pos += TextSize::of(ws); + } + // lines marked with `#` should be ignored in output, we skip the `#` char + if let Some(ws) = line[pos.into()..].chars().next().filter(|&c| c == '#') { + pos += TextSize::of(ws); + } + + new_comments.push(TextRange::at(range.start(), pos)); + + inj.add(&line[pos.into()..], TextRange::new(range.start() + pos, range.end())); + inj.add_unmapped("\n"); + } + inj.add_unmapped("\n}"); + + let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string()); + + for h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() { + for r in inj.map_range_up(h.range) { + hl.add(HlRange { + range: r, + highlight: h.highlight | HlMod::Injected, + binding_hash: h.binding_hash, + }); + } + } + + for range in new_comments { + hl.add(HlRange { + range, + highlight: HlTag::Comment | HlMod::Documentation, + binding_hash: None, + }); + } +} diff --git a/crates/ide/src/syntax_highlighting/injection.rs b/crates/ide/src/syntax_highlighting/injection.rs deleted file mode 100644 index 008d5ce24..000000000 --- a/crates/ide/src/syntax_highlighting/injection.rs +++ /dev/null @@ -1,190 +0,0 @@ -//! Syntax highlighting injections such as highlighting of documentation tests. - -use std::convert::TryFrom; - -use hir::Semantics; -use ide_db::call_info::ActiveParameter; -use itertools::Itertools; -use syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; - -use crate::{Analysis, HlMod, HlRange, HlTag, RootDatabase}; - -use super::{highlights::Highlights, injector::Injector}; - -pub(super) fn highlight_injection( - hl: &mut Highlights, - sema: &Semantics, - literal: ast::String, - expanded: SyntaxToken, -) -> Option<()> { - let active_parameter = ActiveParameter::at_token(&sema, expanded)?; - if !active_parameter.name.starts_with("ra_fixture") { - return None; - } - let value = literal.value()?; - - if let Some(range) = literal.open_quote_text_range() { - hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None }) - } - - let mut inj = Injector::default(); - - let mut text = &*value; - let mut offset: TextSize = 0.into(); - - while !text.is_empty() { - let marker = "$0"; - let idx = text.find(marker).unwrap_or(text.len()); - let (chunk, next) = text.split_at(idx); - inj.add(chunk, TextRange::at(offset, TextSize::of(chunk))); - - text = next; - offset += TextSize::of(chunk); - - if let Some(next) = text.strip_prefix(marker) { - if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) { - hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None }); - } - - text = next; - - let marker_len = TextSize::of(marker); - offset += marker_len; - } - } - - let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string()); - - for mut hl_range in analysis.highlight(tmp_file_id).unwrap() { - for range in inj.map_range_up(hl_range.range) { - if let Some(range) = literal.map_range_up(range) { - hl_range.range = range; - hl.add(hl_range.clone()); - } - } - } - - if let Some(range) = literal.close_quote_text_range() { - hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None }) - } - - Some(()) -} - -const RUSTDOC_FENCE: &'static str = "```"; -const RUSTDOC_FENCE_TOKENS: &[&'static str] = &[ - "", - "rust", - "should_panic", - "ignore", - "no_run", - "compile_fail", - "edition2015", - "edition2018", - "edition2021", -]; - -/// Extracts Rust code from documentation comments as well as a mapping from -/// the extracted source code back to the original source ranges. -/// Lastly, a vector of new comment highlight ranges (spanning only the -/// comment prefix) is returned which is used in the syntax highlighting -/// injection to replace the previous (line-spanning) comment ranges. -pub(super) fn extract_doc_comments(node: &SyntaxNode) -> Option<(Vec, Injector)> { - let mut inj = Injector::default(); - // wrap the doctest into function body to get correct syntax highlighting - let prefix = "fn doctest() {\n"; - let suffix = "}\n"; - - let mut line_start = TextSize::of(prefix); - let mut is_codeblock = false; - let mut is_doctest = false; - // Replace the original, line-spanning comment ranges by new, only comment-prefix - // spanning comment ranges. - let mut new_comments = Vec::new(); - - inj.add_unmapped(prefix); - let doctest = node - .children_with_tokens() - .filter_map(|el| el.into_token().and_then(ast::Comment::cast)) - .filter(|comment| comment.kind().doc.is_some()) - .filter(|comment| { - if let Some(idx) = comment.text().find(RUSTDOC_FENCE) { - is_codeblock = !is_codeblock; - // Check whether code is rust by inspecting fence guards - let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..]; - let is_rust = - guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim())); - is_doctest = is_codeblock && is_rust; - false - } else { - is_doctest - } - }) - .map(|comment| { - let prefix_len = comment.prefix().len(); - let line: &str = comment.text().as_str(); - let range = comment.syntax().text_range(); - - // whitespace after comment is ignored - let pos = if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) { - prefix_len + ws.len_utf8() - } else { - prefix_len - }; - - // lines marked with `#` should be ignored in output, we skip the `#` char - let pos = if let Some(ws) = line.chars().nth(pos).filter(|&c| c == '#') { - pos + ws.len_utf8() - } else { - pos - }; - - new_comments.push(HlRange { - range: TextRange::new( - range.start(), - range.start() + TextSize::try_from(pos).unwrap(), - ), - highlight: HlTag::Comment | HlMod::Documentation, - binding_hash: None, - }); - line_start += range.len() - TextSize::try_from(pos).unwrap(); - line_start += TextSize::of("\n"); - - inj.add( - &line[pos..], - TextRange::new(range.start() + TextSize::try_from(pos).unwrap(), range.end()), - ); - inj.add_unmapped("\n"); - line[pos..].to_owned() - }) - .join("\n"); - inj.add_unmapped(suffix); - - if doctest.is_empty() { - return None; - } - - Some((new_comments, inj)) -} - -/// Injection of syntax highlighting of doctests. -pub(super) fn highlight_doc_comment( - new_comments: Vec, - inj: Injector, - stack: &mut Highlights, -) { - let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string()); - for comment in new_comments { - stack.add(comment); - } - - for h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() { - for r in inj.map_range_up(h.range) { - stack.add(HlRange { - range: r, - highlight: h.highlight | HlMod::Injected, - binding_hash: h.binding_hash, - }); - } - } -} -- cgit v1.2.3