diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/rust-analyzer/src/caps.rs | 4 | ||||
-rw-r--r-- | crates/rust-analyzer/src/document.rs | 6 | ||||
-rw-r--r-- | crates/rust-analyzer/src/global_state.rs | 11 | ||||
-rw-r--r-- | crates/rust-analyzer/src/handlers.rs | 44 | ||||
-rw-r--r-- | crates/rust-analyzer/src/main_loop.rs | 5 | ||||
-rw-r--r-- | crates/rust-analyzer/src/semantic_tokens.rs | 139 | ||||
-rw-r--r-- | crates/rust-analyzer/src/to_proto.rs | 17 |
7 files changed, 213 insertions, 13 deletions
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 37d695448..92a743fd8 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs | |||
@@ -76,7 +76,9 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti | |||
76 | token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(), | 76 | token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(), |
77 | }, | 77 | }, |
78 | 78 | ||
79 | document_provider: Some(SemanticTokensDocumentProvider::Bool(true)), | 79 | document_provider: Some(SemanticTokensDocumentProvider::Edits { |
80 | edits: Some(true), | ||
81 | }), | ||
80 | range_provider: Some(true), | 82 | range_provider: Some(true), |
81 | work_done_progress_options: Default::default(), | 83 | work_done_progress_options: Default::default(), |
82 | } | 84 | } |
diff --git a/crates/rust-analyzer/src/document.rs b/crates/rust-analyzer/src/document.rs index 43219e633..e882c9865 100644 --- a/crates/rust-analyzer/src/document.rs +++ b/crates/rust-analyzer/src/document.rs | |||
@@ -1,9 +1,9 @@ | |||
1 | //! In-memory document information. | 1 | //! In-memory document information. |
2 | 2 | ||
3 | /// Information about a document that the Language Client | 3 | /// Information about a document that the Language Client |
4 | // knows about. | 4 | /// knows about. |
5 | // Its lifetime is driven by the textDocument/didOpen and textDocument/didClose | 5 | /// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose |
6 | // client notifications. | 6 | /// client notifications. |
7 | #[derive(Debug, Clone)] | 7 | #[derive(Debug, Clone)] |
8 | pub(crate) struct DocumentData { | 8 | pub(crate) struct DocumentData { |
9 | pub version: Option<i64>, | 9 | pub version: Option<i64>, |
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index b2d65a6d1..4b34c3ec5 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs | |||
@@ -3,11 +3,14 @@ | |||
3 | //! | 3 | //! |
4 | //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. | 4 | //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. |
5 | 5 | ||
6 | use std::{sync::Arc, time::Instant}; | 6 | use std::{ |
7 | sync::{Arc, Mutex}, | ||
8 | time::Instant, | ||
9 | }; | ||
7 | 10 | ||
8 | use crossbeam_channel::{unbounded, Receiver, Sender}; | 11 | use crossbeam_channel::{unbounded, Receiver, Sender}; |
9 | use flycheck::FlycheckHandle; | 12 | use flycheck::FlycheckHandle; |
10 | use lsp_types::Url; | 13 | use lsp_types::{SemanticTokens, Url}; |
11 | use parking_lot::RwLock; | 14 | use parking_lot::RwLock; |
12 | use ra_db::{CrateId, VfsPath}; | 15 | use ra_db::{CrateId, VfsPath}; |
13 | use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; | 16 | use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; |
@@ -71,6 +74,7 @@ pub(crate) struct GlobalState { | |||
71 | pub(crate) analysis_host: AnalysisHost, | 74 | pub(crate) analysis_host: AnalysisHost, |
72 | pub(crate) diagnostics: DiagnosticCollection, | 75 | pub(crate) diagnostics: DiagnosticCollection, |
73 | pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>, | 76 | pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>, |
77 | pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>, | ||
74 | pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, | 78 | pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, |
75 | pub(crate) status: Status, | 79 | pub(crate) status: Status, |
76 | pub(crate) source_root_config: SourceRootConfig, | 80 | pub(crate) source_root_config: SourceRootConfig, |
@@ -86,6 +90,7 @@ pub(crate) struct GlobalStateSnapshot { | |||
86 | pub(crate) check_fixes: CheckFixes, | 90 | pub(crate) check_fixes: CheckFixes, |
87 | pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, | 91 | pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, |
88 | mem_docs: FxHashMap<VfsPath, DocumentData>, | 92 | mem_docs: FxHashMap<VfsPath, DocumentData>, |
93 | pub semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>, | ||
89 | vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, | 94 | vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, |
90 | pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, | 95 | pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, |
91 | } | 96 | } |
@@ -120,6 +125,7 @@ impl GlobalState { | |||
120 | analysis_host, | 125 | analysis_host, |
121 | diagnostics: Default::default(), | 126 | diagnostics: Default::default(), |
122 | mem_docs: FxHashMap::default(), | 127 | mem_docs: FxHashMap::default(), |
128 | semantic_tokens_cache: Arc::new(Default::default()), | ||
123 | vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), | 129 | vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), |
124 | status: Status::default(), | 130 | status: Status::default(), |
125 | source_root_config: SourceRootConfig::default(), | 131 | source_root_config: SourceRootConfig::default(), |
@@ -186,6 +192,7 @@ impl GlobalState { | |||
186 | latest_requests: Arc::clone(&self.latest_requests), | 192 | latest_requests: Arc::clone(&self.latest_requests), |
187 | check_fixes: Arc::clone(&self.diagnostics.check_fixes), | 193 | check_fixes: Arc::clone(&self.diagnostics.check_fixes), |
188 | mem_docs: self.mem_docs.clone(), | 194 | mem_docs: self.mem_docs.clone(), |
195 | semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), | ||
189 | } | 196 | } |
190 | } | 197 | } |
191 | 198 | ||
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index e73b3a211..0b0ea23fd 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -13,9 +13,10 @@ use lsp_types::{ | |||
13 | CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, | 13 | CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, |
14 | CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams, | 14 | CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams, |
15 | DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location, | 15 | DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location, |
16 | Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensParams, | 16 | Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensEditResult, |
17 | SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, | 17 | SemanticTokensEditsParams, SemanticTokensParams, SemanticTokensRangeParams, |
18 | SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, | 18 | SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag, |
19 | TextDocumentIdentifier, Url, WorkspaceEdit, | ||
19 | }; | 20 | }; |
20 | use ra_ide::{ | 21 | use ra_ide::{ |
21 | FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, | 22 | FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, |
@@ -1184,6 +1185,43 @@ pub(crate) fn handle_semantic_tokens( | |||
1184 | 1185 | ||
1185 | let highlights = snap.analysis.highlight(file_id)?; | 1186 | let highlights = snap.analysis.highlight(file_id)?; |
1186 | let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); | 1187 | let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); |
1188 | |||
1189 | // Unconditionally cache the tokens | ||
1190 | snap.semantic_tokens_cache | ||
1191 | .lock() | ||
1192 | .unwrap() | ||
1193 | .insert(params.text_document.uri, semantic_tokens.clone()); | ||
1194 | |||
1195 | Ok(Some(semantic_tokens.into())) | ||
1196 | } | ||
1197 | |||
1198 | pub(crate) fn handle_semantic_tokens_edits( | ||
1199 | snap: GlobalStateSnapshot, | ||
1200 | params: SemanticTokensEditsParams, | ||
1201 | ) -> Result<Option<SemanticTokensEditResult>> { | ||
1202 | let _p = profile("handle_semantic_tokens_edits"); | ||
1203 | |||
1204 | let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; | ||
1205 | let text = snap.analysis.file_text(file_id)?; | ||
1206 | let line_index = snap.analysis.file_line_index(file_id)?; | ||
1207 | |||
1208 | let highlights = snap.analysis.highlight(file_id)?; | ||
1209 | |||
1210 | let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); | ||
1211 | |||
1212 | let mut cache = snap.semantic_tokens_cache.lock().unwrap(); | ||
1213 | let cached_tokens = cache.entry(params.text_document.uri).or_default(); | ||
1214 | |||
1215 | if let Some(prev_id) = &cached_tokens.result_id { | ||
1216 | if *prev_id == params.previous_result_id { | ||
1217 | let edits = to_proto::semantic_token_edits(&cached_tokens, &semantic_tokens); | ||
1218 | *cached_tokens = semantic_tokens; | ||
1219 | return Ok(Some(edits.into())); | ||
1220 | } | ||
1221 | } | ||
1222 | |||
1223 | *cached_tokens = semantic_tokens.clone(); | ||
1224 | |||
1187 | Ok(Some(semantic_tokens.into())) | 1225 | Ok(Some(semantic_tokens.into())) |
1188 | } | 1226 | } |
1189 | 1227 | ||
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 0ace4cb45..eb2a86972 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -387,6 +387,9 @@ impl GlobalState { | |||
387 | handlers::handle_call_hierarchy_outgoing, | 387 | handlers::handle_call_hierarchy_outgoing, |
388 | )? | 388 | )? |
389 | .on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)? | 389 | .on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)? |
390 | .on::<lsp_types::request::SemanticTokensEditsRequest>( | ||
391 | handlers::handle_semantic_tokens_edits, | ||
392 | )? | ||
390 | .on::<lsp_types::request::SemanticTokensRangeRequest>( | 393 | .on::<lsp_types::request::SemanticTokensRangeRequest>( |
391 | handlers::handle_semantic_tokens_range, | 394 | handlers::handle_semantic_tokens_range, |
392 | )? | 395 | )? |
@@ -449,6 +452,8 @@ impl GlobalState { | |||
449 | None => log::error!("orphan DidCloseTextDocument: {}", path), | 452 | None => log::error!("orphan DidCloseTextDocument: {}", path), |
450 | } | 453 | } |
451 | 454 | ||
455 | this.semantic_tokens_cache.lock().unwrap().remove(¶ms.text_document.uri); | ||
456 | |||
452 | if let Some(path) = path.as_path() { | 457 | if let Some(path) = path.as_path() { |
453 | this.loader.handle.invalidate(path.to_path_buf()); | 458 | this.loader.handle.invalidate(path.to_path_buf()); |
454 | } | 459 | } |
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index 576bd8adc..afc38fb4e 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -2,7 +2,10 @@ | |||
2 | 2 | ||
3 | use std::ops; | 3 | use std::ops; |
4 | 4 | ||
5 | use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens}; | 5 | use lsp_types::{ |
6 | Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, | ||
7 | SemanticTokensEdit, | ||
8 | }; | ||
6 | 9 | ||
7 | macro_rules! define_semantic_token_types { | 10 | macro_rules! define_semantic_token_types { |
8 | ($(($ident:ident, $string:literal)),*$(,)?) => { | 11 | ($(($ident:ident, $string:literal)),*$(,)?) => { |
@@ -89,14 +92,18 @@ impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet { | |||
89 | /// Tokens are encoded relative to each other. | 92 | /// Tokens are encoded relative to each other. |
90 | /// | 93 | /// |
91 | /// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45 | 94 | /// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45 |
92 | #[derive(Default)] | ||
93 | pub(crate) struct SemanticTokensBuilder { | 95 | pub(crate) struct SemanticTokensBuilder { |
96 | id: String, | ||
94 | prev_line: u32, | 97 | prev_line: u32, |
95 | prev_char: u32, | 98 | prev_char: u32, |
96 | data: Vec<SemanticToken>, | 99 | data: Vec<SemanticToken>, |
97 | } | 100 | } |
98 | 101 | ||
99 | impl SemanticTokensBuilder { | 102 | impl SemanticTokensBuilder { |
103 | pub fn new(id: String) -> Self { | ||
104 | SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() } | ||
105 | } | ||
106 | |||
100 | /// Push a new token onto the builder | 107 | /// Push a new token onto the builder |
101 | pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) { | 108 | pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) { |
102 | let mut push_line = range.start.line as u32; | 109 | let mut push_line = range.start.line as u32; |
@@ -127,10 +134,136 @@ impl SemanticTokensBuilder { | |||
127 | } | 134 | } |
128 | 135 | ||
129 | pub fn build(self) -> SemanticTokens { | 136 | pub fn build(self) -> SemanticTokens { |
130 | SemanticTokens { result_id: None, data: self.data } | 137 | SemanticTokens { result_id: Some(self.id), data: self.data } |
138 | } | ||
139 | } | ||
140 | |||
141 | pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> { | ||
142 | let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count(); | ||
143 | |||
144 | let (_, old) = old.split_at(offset); | ||
145 | let (_, new) = new.split_at(offset); | ||
146 | |||
147 | let offset_from_end = | ||
148 | new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count(); | ||
149 | |||
150 | let (old, _) = old.split_at(old.len() - offset_from_end); | ||
151 | let (new, _) = new.split_at(new.len() - offset_from_end); | ||
152 | |||
153 | if old.is_empty() && new.is_empty() { | ||
154 | vec![] | ||
155 | } else { | ||
156 | // The lsp data field is actually a byte-diff but we | ||
157 | // travel in tokens so `start` and `delete_count` are in multiples of the | ||
158 | // serialized size of `SemanticToken`. | ||
159 | vec![SemanticTokensEdit { | ||
160 | start: 5 * offset as u32, | ||
161 | delete_count: 5 * old.len() as u32, | ||
162 | data: Some(new.into()), | ||
163 | }] | ||
131 | } | 164 | } |
132 | } | 165 | } |
133 | 166 | ||
134 | pub fn type_index(type_: SemanticTokenType) -> u32 { | 167 | pub fn type_index(type_: SemanticTokenType) -> u32 { |
135 | SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32 | 168 | SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32 |
136 | } | 169 | } |
170 | |||
171 | #[cfg(test)] | ||
172 | mod tests { | ||
173 | use super::*; | ||
174 | |||
175 | fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken { | ||
176 | SemanticToken { | ||
177 | delta_line: t.0, | ||
178 | delta_start: t.1, | ||
179 | length: t.2, | ||
180 | token_type: t.3, | ||
181 | token_modifiers_bitset: t.4, | ||
182 | } | ||
183 | } | ||
184 | |||
185 | #[test] | ||
186 | fn test_diff_insert_at_end() { | ||
187 | let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
188 | let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))]; | ||
189 | |||
190 | let edits = diff_tokens(&before, &after); | ||
191 | assert_eq!( | ||
192 | edits[0], | ||
193 | SemanticTokensEdit { | ||
194 | start: 10, | ||
195 | delete_count: 0, | ||
196 | data: Some(vec![from((11, 12, 13, 14, 15))]) | ||
197 | } | ||
198 | ); | ||
199 | } | ||
200 | |||
201 | #[test] | ||
202 | fn test_diff_insert_at_beginning() { | ||
203 | let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
204 | let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
205 | |||
206 | let edits = diff_tokens(&before, &after); | ||
207 | assert_eq!( | ||
208 | edits[0], | ||
209 | SemanticTokensEdit { | ||
210 | start: 0, | ||
211 | delete_count: 0, | ||
212 | data: Some(vec![from((11, 12, 13, 14, 15))]) | ||
213 | } | ||
214 | ); | ||
215 | } | ||
216 | |||
217 | #[test] | ||
218 | fn test_diff_insert_in_middle() { | ||
219 | let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
220 | let after = [ | ||
221 | from((1, 2, 3, 4, 5)), | ||
222 | from((10, 20, 30, 40, 50)), | ||
223 | from((60, 70, 80, 90, 100)), | ||
224 | from((6, 7, 8, 9, 10)), | ||
225 | ]; | ||
226 | |||
227 | let edits = diff_tokens(&before, &after); | ||
228 | assert_eq!( | ||
229 | edits[0], | ||
230 | SemanticTokensEdit { | ||
231 | start: 5, | ||
232 | delete_count: 0, | ||
233 | data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))]) | ||
234 | } | ||
235 | ); | ||
236 | } | ||
237 | |||
238 | #[test] | ||
239 | fn test_diff_remove_from_end() { | ||
240 | let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))]; | ||
241 | let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
242 | |||
243 | let edits = diff_tokens(&before, &after); | ||
244 | assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) }); | ||
245 | } | ||
246 | |||
247 | #[test] | ||
248 | fn test_diff_remove_from_beginning() { | ||
249 | let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
250 | let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
251 | |||
252 | let edits = diff_tokens(&before, &after); | ||
253 | assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) }); | ||
254 | } | ||
255 | |||
256 | #[test] | ||
257 | fn test_diff_remove_from_middle() { | ||
258 | let before = [ | ||
259 | from((1, 2, 3, 4, 5)), | ||
260 | from((10, 20, 30, 40, 50)), | ||
261 | from((60, 70, 80, 90, 100)), | ||
262 | from((6, 7, 8, 9, 10)), | ||
263 | ]; | ||
264 | let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))]; | ||
265 | |||
266 | let edits = diff_tokens(&before, &after); | ||
267 | assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) }); | ||
268 | } | ||
269 | } | ||
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index fadcc5853..8da883ae4 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -1,5 +1,6 @@ | |||
1 | //! Conversion of rust-analyzer specific types to lsp_types equivalents. | 1 | //! Conversion of rust-analyzer specific types to lsp_types equivalents. |
2 | use std::path::{self, Path}; | 2 | use std::path::{self, Path}; |
3 | use std::time::SystemTime; | ||
3 | 4 | ||
4 | use itertools::Itertools; | 5 | use itertools::Itertools; |
5 | use ra_db::{FileId, FileRange}; | 6 | use ra_db::{FileId, FileRange}; |
@@ -308,7 +309,12 @@ pub(crate) fn semantic_tokens( | |||
308 | line_index: &LineIndex, | 309 | line_index: &LineIndex, |
309 | highlights: Vec<HighlightedRange>, | 310 | highlights: Vec<HighlightedRange>, |
310 | ) -> lsp_types::SemanticTokens { | 311 | ) -> lsp_types::SemanticTokens { |
311 | let mut builder = semantic_tokens::SemanticTokensBuilder::default(); | 312 | let id = match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) { |
313 | Ok(d) => d.as_millis().to_string(), | ||
314 | Err(_) => String::new(), | ||
315 | }; | ||
316 | |||
317 | let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); | ||
312 | 318 | ||
313 | for highlight_range in highlights { | 319 | for highlight_range in highlights { |
314 | let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); | 320 | let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); |
@@ -328,6 +334,15 @@ pub(crate) fn semantic_tokens( | |||
328 | builder.build() | 334 | builder.build() |
329 | } | 335 | } |
330 | 336 | ||
337 | pub(crate) fn semantic_token_edits( | ||
338 | previous: &lsp_types::SemanticTokens, | ||
339 | current: &lsp_types::SemanticTokens, | ||
340 | ) -> lsp_types::SemanticTokensEdits { | ||
341 | let result_id = current.result_id.clone(); | ||
342 | let edits = semantic_tokens::diff_tokens(&previous.data, ¤t.data); | ||
343 | lsp_types::SemanticTokensEdits { result_id, edits } | ||
344 | } | ||
345 | |||
331 | fn semantic_token_type_and_modifiers( | 346 | fn semantic_token_type_and_modifiers( |
332 | highlight: Highlight, | 347 | highlight: Highlight, |
333 | ) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) { | 348 | ) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) { |