aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-08-06 02:44:38 +0100
committerGitHub <[email protected]>2020-08-06 02:44:38 +0100
commitf1d507270c7d915ef0177feca7b6745d95169ac8 (patch)
treef2c039eba062235b458686151441530051723d5f /crates
parent2cb079ba9add594908f40d48ee2c9ac553306b33 (diff)
parent195111d7698c39fb4d653da3a39a8cb52c9260e4 (diff)
Merge #5526
5526: Handle semantic token deltas r=kjeremy a=kjeremy This basically takes the naive approach where we always compute the tokens but save space sending over the wire which apparently solves some GC problems with vscode. This is waiting for https://github.com/gluon-lang/lsp-types/pull/174 to be merged. I am also unsure of the best way to stash the tokens into `DocumentData` in a safe manner. Co-authored-by: kjeremy <[email protected]> Co-authored-by: Jeremy Kolb <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r--crates/rust-analyzer/src/caps.rs4
-rw-r--r--crates/rust-analyzer/src/document.rs6
-rw-r--r--crates/rust-analyzer/src/global_state.rs8
-rw-r--r--crates/rust-analyzer/src/handlers.rs41
-rw-r--r--crates/rust-analyzer/src/main_loop.rs5
-rw-r--r--crates/rust-analyzer/src/semantic_tokens.rs139
-rw-r--r--crates/rust-analyzer/src/to_proto.rs19
7 files changed, 208 insertions, 14 deletions
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs
index 37d695448..92a743fd8 100644
--- a/crates/rust-analyzer/src/caps.rs
+++ b/crates/rust-analyzer/src/caps.rs
@@ -76,7 +76,9 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti
76 token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(), 76 token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
77 }, 77 },
78 78
79 document_provider: Some(SemanticTokensDocumentProvider::Bool(true)), 79 document_provider: Some(SemanticTokensDocumentProvider::Edits {
80 edits: Some(true),
81 }),
80 range_provider: Some(true), 82 range_provider: Some(true),
81 work_done_progress_options: Default::default(), 83 work_done_progress_options: Default::default(),
82 } 84 }
diff --git a/crates/rust-analyzer/src/document.rs b/crates/rust-analyzer/src/document.rs
index 43219e633..e882c9865 100644
--- a/crates/rust-analyzer/src/document.rs
+++ b/crates/rust-analyzer/src/document.rs
@@ -1,9 +1,9 @@
1//! In-memory document information. 1//! In-memory document information.
2 2
3/// Information about a document that the Language Client 3/// Information about a document that the Language Client
4// knows about. 4/// knows about.
5// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose 5/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
6// client notifications. 6/// client notifications.
7#[derive(Debug, Clone)] 7#[derive(Debug, Clone)]
8pub(crate) struct DocumentData { 8pub(crate) struct DocumentData {
9 pub version: Option<i64>, 9 pub version: Option<i64>,
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index b2d65a6d1..0e592ac1b 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -7,8 +7,8 @@ use std::{sync::Arc, time::Instant};
7 7
8use crossbeam_channel::{unbounded, Receiver, Sender}; 8use crossbeam_channel::{unbounded, Receiver, Sender};
9use flycheck::FlycheckHandle; 9use flycheck::FlycheckHandle;
10use lsp_types::Url; 10use lsp_types::{SemanticTokens, Url};
11use parking_lot::RwLock; 11use parking_lot::{Mutex, RwLock};
12use ra_db::{CrateId, VfsPath}; 12use ra_db::{CrateId, VfsPath};
13use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; 13use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
14use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; 14use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
@@ -71,6 +71,7 @@ pub(crate) struct GlobalState {
71 pub(crate) analysis_host: AnalysisHost, 71 pub(crate) analysis_host: AnalysisHost,
72 pub(crate) diagnostics: DiagnosticCollection, 72 pub(crate) diagnostics: DiagnosticCollection,
73 pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>, 73 pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
74 pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
74 pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, 75 pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
75 pub(crate) status: Status, 76 pub(crate) status: Status,
76 pub(crate) source_root_config: SourceRootConfig, 77 pub(crate) source_root_config: SourceRootConfig,
@@ -86,6 +87,7 @@ pub(crate) struct GlobalStateSnapshot {
86 pub(crate) check_fixes: CheckFixes, 87 pub(crate) check_fixes: CheckFixes,
87 pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, 88 pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
88 mem_docs: FxHashMap<VfsPath, DocumentData>, 89 mem_docs: FxHashMap<VfsPath, DocumentData>,
90 pub semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
89 vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, 91 vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
90 pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, 92 pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
91} 93}
@@ -120,6 +122,7 @@ impl GlobalState {
120 analysis_host, 122 analysis_host,
121 diagnostics: Default::default(), 123 diagnostics: Default::default(),
122 mem_docs: FxHashMap::default(), 124 mem_docs: FxHashMap::default(),
125 semantic_tokens_cache: Arc::new(Default::default()),
123 vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), 126 vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
124 status: Status::default(), 127 status: Status::default(),
125 source_root_config: SourceRootConfig::default(), 128 source_root_config: SourceRootConfig::default(),
@@ -186,6 +189,7 @@ impl GlobalState {
186 latest_requests: Arc::clone(&self.latest_requests), 189 latest_requests: Arc::clone(&self.latest_requests),
187 check_fixes: Arc::clone(&self.diagnostics.check_fixes), 190 check_fixes: Arc::clone(&self.diagnostics.check_fixes),
188 mem_docs: self.mem_docs.clone(), 191 mem_docs: self.mem_docs.clone(),
192 semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
189 } 193 }
190 } 194 }
191 195
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 82f6de5da..067259e24 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -13,9 +13,10 @@ use lsp_types::{
13 CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, 13 CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
14 CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams, 14 CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams,
15 DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location, 15 DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location,
16 Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensParams, 16 Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensEditResult,
17 SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, 17 SemanticTokensEditsParams, SemanticTokensParams, SemanticTokensRangeParams,
18 SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, 18 SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
19 TextDocumentIdentifier, Url, WorkspaceEdit,
19}; 20};
20use ra_ide::{ 21use ra_ide::{
21 FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, 22 FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
@@ -1179,6 +1180,40 @@ pub(crate) fn handle_semantic_tokens(
1179 1180
1180 let highlights = snap.analysis.highlight(file_id)?; 1181 let highlights = snap.analysis.highlight(file_id)?;
1181 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1182 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
1183
1184 // Unconditionally cache the tokens
1185 snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
1186
1187 Ok(Some(semantic_tokens.into()))
1188}
1189
1190pub(crate) fn handle_semantic_tokens_edits(
1191 snap: GlobalStateSnapshot,
1192 params: SemanticTokensEditsParams,
1193) -> Result<Option<SemanticTokensEditResult>> {
1194 let _p = profile("handle_semantic_tokens_edits");
1195
1196 let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
1197 let text = snap.analysis.file_text(file_id)?;
1198 let line_index = snap.analysis.file_line_index(file_id)?;
1199
1200 let highlights = snap.analysis.highlight(file_id)?;
1201
1202 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
1203
1204 let mut cache = snap.semantic_tokens_cache.lock();
1205 let cached_tokens = cache.entry(params.text_document.uri).or_default();
1206
1207 if let Some(prev_id) = &cached_tokens.result_id {
1208 if *prev_id == params.previous_result_id {
1209 let edits = to_proto::semantic_token_edits(&cached_tokens, &semantic_tokens);
1210 *cached_tokens = semantic_tokens;
1211 return Ok(Some(edits.into()));
1212 }
1213 }
1214
1215 *cached_tokens = semantic_tokens.clone();
1216
1182 Ok(Some(semantic_tokens.into())) 1217 Ok(Some(semantic_tokens.into()))
1183} 1218}
1184 1219
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 51626fcd5..ceddb2b05 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -386,6 +386,9 @@ impl GlobalState {
386 handlers::handle_call_hierarchy_outgoing, 386 handlers::handle_call_hierarchy_outgoing,
387 )? 387 )?
388 .on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)? 388 .on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
389 .on::<lsp_types::request::SemanticTokensEditsRequest>(
390 handlers::handle_semantic_tokens_edits,
391 )?
389 .on::<lsp_types::request::SemanticTokensRangeRequest>( 392 .on::<lsp_types::request::SemanticTokensRangeRequest>(
390 handlers::handle_semantic_tokens_range, 393 handlers::handle_semantic_tokens_range,
391 )? 394 )?
@@ -443,6 +446,8 @@ impl GlobalState {
443 None => log::error!("orphan DidCloseTextDocument: {}", path), 446 None => log::error!("orphan DidCloseTextDocument: {}", path),
444 } 447 }
445 448
449 this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
450
446 if let Some(path) = path.as_path() { 451 if let Some(path) = path.as_path() {
447 this.loader.handle.invalidate(path.to_path_buf()); 452 this.loader.handle.invalidate(path.to_path_buf());
448 } 453 }
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs
index 576bd8adc..afc38fb4e 100644
--- a/crates/rust-analyzer/src/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/semantic_tokens.rs
@@ -2,7 +2,10 @@
2 2
3use std::ops; 3use std::ops;
4 4
5use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens}; 5use lsp_types::{
6 Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
7 SemanticTokensEdit,
8};
6 9
7macro_rules! define_semantic_token_types { 10macro_rules! define_semantic_token_types {
8 ($(($ident:ident, $string:literal)),*$(,)?) => { 11 ($(($ident:ident, $string:literal)),*$(,)?) => {
@@ -89,14 +92,18 @@ impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
89/// Tokens are encoded relative to each other. 92/// Tokens are encoded relative to each other.
90/// 93///
91/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45 94/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
92#[derive(Default)]
93pub(crate) struct SemanticTokensBuilder { 95pub(crate) struct SemanticTokensBuilder {
96 id: String,
94 prev_line: u32, 97 prev_line: u32,
95 prev_char: u32, 98 prev_char: u32,
96 data: Vec<SemanticToken>, 99 data: Vec<SemanticToken>,
97} 100}
98 101
99impl SemanticTokensBuilder { 102impl SemanticTokensBuilder {
103 pub fn new(id: String) -> Self {
104 SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
105 }
106
100 /// Push a new token onto the builder 107 /// Push a new token onto the builder
101 pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) { 108 pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
102 let mut push_line = range.start.line as u32; 109 let mut push_line = range.start.line as u32;
@@ -127,10 +134,136 @@ impl SemanticTokensBuilder {
127 } 134 }
128 135
129 pub fn build(self) -> SemanticTokens { 136 pub fn build(self) -> SemanticTokens {
130 SemanticTokens { result_id: None, data: self.data } 137 SemanticTokens { result_id: Some(self.id), data: self.data }
138 }
139}
140
141pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
142 let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
143
144 let (_, old) = old.split_at(offset);
145 let (_, new) = new.split_at(offset);
146
147 let offset_from_end =
148 new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
149
150 let (old, _) = old.split_at(old.len() - offset_from_end);
151 let (new, _) = new.split_at(new.len() - offset_from_end);
152
153 if old.is_empty() && new.is_empty() {
154 vec![]
155 } else {
156 // The lsp data field is actually a byte-diff but we
157 // travel in tokens so `start` and `delete_count` are in multiples of the
158 // serialized size of `SemanticToken`.
159 vec![SemanticTokensEdit {
160 start: 5 * offset as u32,
161 delete_count: 5 * old.len() as u32,
162 data: Some(new.into()),
163 }]
131 } 164 }
132} 165}
133 166
134pub fn type_index(type_: SemanticTokenType) -> u32 { 167pub fn type_index(type_: SemanticTokenType) -> u32 {
135 SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32 168 SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
136} 169}
170
171#[cfg(test)]
172mod tests {
173 use super::*;
174
175 fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
176 SemanticToken {
177 delta_line: t.0,
178 delta_start: t.1,
179 length: t.2,
180 token_type: t.3,
181 token_modifiers_bitset: t.4,
182 }
183 }
184
185 #[test]
186 fn test_diff_insert_at_end() {
187 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
188 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
189
190 let edits = diff_tokens(&before, &after);
191 assert_eq!(
192 edits[0],
193 SemanticTokensEdit {
194 start: 10,
195 delete_count: 0,
196 data: Some(vec![from((11, 12, 13, 14, 15))])
197 }
198 );
199 }
200
201 #[test]
202 fn test_diff_insert_at_beginning() {
203 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
204 let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
205
206 let edits = diff_tokens(&before, &after);
207 assert_eq!(
208 edits[0],
209 SemanticTokensEdit {
210 start: 0,
211 delete_count: 0,
212 data: Some(vec![from((11, 12, 13, 14, 15))])
213 }
214 );
215 }
216
217 #[test]
218 fn test_diff_insert_in_middle() {
219 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
220 let after = [
221 from((1, 2, 3, 4, 5)),
222 from((10, 20, 30, 40, 50)),
223 from((60, 70, 80, 90, 100)),
224 from((6, 7, 8, 9, 10)),
225 ];
226
227 let edits = diff_tokens(&before, &after);
228 assert_eq!(
229 edits[0],
230 SemanticTokensEdit {
231 start: 5,
232 delete_count: 0,
233 data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
234 }
235 );
236 }
237
238 #[test]
239 fn test_diff_remove_from_end() {
240 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
241 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
242
243 let edits = diff_tokens(&before, &after);
244 assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
245 }
246
247 #[test]
248 fn test_diff_remove_from_beginning() {
249 let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
250 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
251
252 let edits = diff_tokens(&before, &after);
253 assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
254 }
255
256 #[test]
257 fn test_diff_remove_from_middle() {
258 let before = [
259 from((1, 2, 3, 4, 5)),
260 from((10, 20, 30, 40, 50)),
261 from((60, 70, 80, 90, 100)),
262 from((6, 7, 8, 9, 10)),
263 ];
264 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
265
266 let edits = diff_tokens(&before, &after);
267 assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
268 }
269}
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index fadcc5853..5eba1f155 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -1,5 +1,8 @@
1//! Conversion of rust-analyzer specific types to lsp_types equivalents. 1//! Conversion of rust-analyzer specific types to lsp_types equivalents.
2use std::path::{self, Path}; 2use std::{
3 path::{self, Path},
4 sync::atomic::{AtomicU32, Ordering},
5};
3 6
4use itertools::Itertools; 7use itertools::Itertools;
5use ra_db::{FileId, FileRange}; 8use ra_db::{FileId, FileRange};
@@ -303,12 +306,15 @@ pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ex
303 } 306 }
304} 307}
305 308
309static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
310
306pub(crate) fn semantic_tokens( 311pub(crate) fn semantic_tokens(
307 text: &str, 312 text: &str,
308 line_index: &LineIndex, 313 line_index: &LineIndex,
309 highlights: Vec<HighlightedRange>, 314 highlights: Vec<HighlightedRange>,
310) -> lsp_types::SemanticTokens { 315) -> lsp_types::SemanticTokens {
311 let mut builder = semantic_tokens::SemanticTokensBuilder::default(); 316 let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
317 let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
312 318
313 for highlight_range in highlights { 319 for highlight_range in highlights {
314 let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); 320 let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
@@ -328,6 +334,15 @@ pub(crate) fn semantic_tokens(
328 builder.build() 334 builder.build()
329} 335}
330 336
337pub(crate) fn semantic_token_edits(
338 previous: &lsp_types::SemanticTokens,
339 current: &lsp_types::SemanticTokens,
340) -> lsp_types::SemanticTokensEdits {
341 let result_id = current.result_id.clone();
342 let edits = semantic_tokens::diff_tokens(&previous.data, &current.data);
343 lsp_types::SemanticTokensEdits { result_id, edits }
344}
345
331fn semantic_token_type_and_modifiers( 346fn semantic_token_type_and_modifiers(
332 highlight: Highlight, 347 highlight: Highlight,
333) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) { 348) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {