diff options
author | Aleksey Kladov <[email protected]> | 2020-02-27 15:35:00 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2020-02-27 15:35:00 +0000 |
commit | 062c12e3cdad887d751defa6f448edb5426ebf01 (patch) | |
tree | f158029423bcadf42d3d7ff705c15a11ff3b6973 /crates | |
parent | 57f0d6cba3b2c7b0a2ac384ab0cb73983afd2a5e (diff) | |
parent | 8ed7e751b627791722aa10187894ff6ecc7e5a96 (diff) |
Merge pull request #3348 from matklad/single-line
Ensure that semantic tokens are single-line
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_ide_db/src/line_index.rs | 44 | ||||
-rw-r--r-- | crates/rust-analyzer/src/main_loop/handlers.rs | 11 |
2 files changed, 52 insertions, 3 deletions
diff --git a/crates/ra_ide_db/src/line_index.rs b/crates/ra_ide_db/src/line_index.rs index af7b759e5..b9db5c276 100644 --- a/crates/ra_ide_db/src/line_index.rs +++ b/crates/ra_ide_db/src/line_index.rs | |||
@@ -1,7 +1,8 @@ | |||
1 | //! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)` | 1 | //! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)` |
2 | //! representation. | 2 | //! representation. |
3 | use std::iter; | ||
3 | 4 | ||
4 | use ra_syntax::TextUnit; | 5 | use ra_syntax::{TextRange, TextUnit}; |
5 | use rustc_hash::FxHashMap; | 6 | use rustc_hash::FxHashMap; |
6 | use superslice::Ext; | 7 | use superslice::Ext; |
7 | 8 | ||
@@ -87,6 +88,19 @@ impl LineIndex { | |||
87 | self.newlines[line_col.line as usize] + col | 88 | self.newlines[line_col.line as usize] + col |
88 | } | 89 | } |
89 | 90 | ||
91 | pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ { | ||
92 | let lo = self.newlines.lower_bound(&range.start()); | ||
93 | let hi = self.newlines.upper_bound(&range.end()); | ||
94 | let all = iter::once(range.start()) | ||
95 | .chain(self.newlines[lo..hi].iter().copied()) | ||
96 | .chain(iter::once(range.end())); | ||
97 | |||
98 | all.clone() | ||
99 | .zip(all.skip(1)) | ||
100 | .map(|(lo, hi)| TextRange::from_to(lo, hi)) | ||
101 | .filter(|it| !it.is_empty()) | ||
102 | } | ||
103 | |||
90 | fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { | 104 | fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { |
91 | if let Some(utf16_chars) = self.utf16_lines.get(&line) { | 105 | if let Some(utf16_chars) = self.utf16_lines.get(&line) { |
92 | let mut correction = TextUnit::from_usize(0); | 106 | let mut correction = TextUnit::from_usize(0); |
@@ -221,4 +235,32 @@ const C: char = \"メ メ\"; | |||
221 | 235 | ||
222 | assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); | 236 | assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); |
223 | } | 237 | } |
238 | |||
239 | #[test] | ||
240 | fn test_splitlines() { | ||
241 | fn r(lo: u32, hi: u32) -> TextRange { | ||
242 | TextRange::from_to(lo.into(), hi.into()) | ||
243 | } | ||
244 | |||
245 | let text = "a\nbb\nccc\n"; | ||
246 | let line_index = LineIndex::new(text); | ||
247 | |||
248 | let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>(); | ||
249 | let expected = vec![r(0, 2), r(2, 5), r(5, 9)]; | ||
250 | assert_eq!(actual, expected); | ||
251 | |||
252 | let text = ""; | ||
253 | let line_index = LineIndex::new(text); | ||
254 | |||
255 | let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>(); | ||
256 | let expected = vec![]; | ||
257 | assert_eq!(actual, expected); | ||
258 | |||
259 | let text = "\n"; | ||
260 | let line_index = LineIndex::new(text); | ||
261 | |||
262 | let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>(); | ||
263 | let expected = vec![r(0, 1)]; | ||
264 | assert_eq!(actual, expected) | ||
265 | } | ||
224 | } | 266 | } |
diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs index 9ed53169c..6f517760f 100644 --- a/crates/rust-analyzer/src/main_loop/handlers.rs +++ b/crates/rust-analyzer/src/main_loop/handlers.rs | |||
@@ -1078,13 +1078,20 @@ pub fn handle_semantic_tokens( | |||
1078 | let _p = profile("handle_semantic_tokens"); | 1078 | let _p = profile("handle_semantic_tokens"); |
1079 | 1079 | ||
1080 | let file_id = params.text_document.try_conv_with(&world)?; | 1080 | let file_id = params.text_document.try_conv_with(&world)?; |
1081 | let text = world.analysis().file_text(file_id)?; | ||
1081 | let line_index = world.analysis().file_line_index(file_id)?; | 1082 | let line_index = world.analysis().file_line_index(file_id)?; |
1082 | 1083 | ||
1083 | let mut builder = SemanticTokensBuilder::default(); | 1084 | let mut builder = SemanticTokensBuilder::default(); |
1084 | 1085 | ||
1085 | for highlight_range in world.analysis().highlight(file_id)?.into_iter() { | 1086 | for highlight_range in world.analysis().highlight(file_id)?.into_iter() { |
1086 | let (token_type, token_modifiers) = highlight_range.highlight.conv(); | 1087 | let (token_index, modifier_bitset) = highlight_range.highlight.conv(); |
1087 | builder.push(highlight_range.range.conv_with(&line_index), token_type, token_modifiers); | 1088 | for mut range in line_index.lines(highlight_range.range) { |
1089 | if text[range].ends_with('\n') { | ||
1090 | range = TextRange::from_to(range.start(), range.end() - TextUnit::of_char('\n')); | ||
1091 | } | ||
1092 | let range = range.conv_with(&line_index); | ||
1093 | builder.push(range, token_index, modifier_bitset); | ||
1094 | } | ||
1088 | } | 1095 | } |
1089 | 1096 | ||
1090 | let tokens = SemanticTokens { data: builder.build(), ..Default::default() }; | 1097 | let tokens = SemanticTokens { data: builder.build(), ..Default::default() }; |