aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorbors[bot] <bors[bot]@users.noreply.github.com>2018-10-20 21:04:06 +0100
committerbors[bot] <bors[bot]@users.noreply.github.com>2018-10-20 21:04:06 +0100
commitfd336d1134d405d833b762101a25c00076bc7fd2 (patch)
tree040ab6dc1286ab9fe5da0002d29ae4eb7a37850a /crates
parent73dd870da2dcc991b0fdcdde8bee91f05cb9e182 (diff)
parent0102a01f76c855da447e25eb81191047a3ca79b8 (diff)
Merge #147
147: Cancelation r=matklad a=matklad This series of commits switch cancellation strategy from `JobToken` (which are cancellation tokens, explicitly controlled by the called) to salsa built-in auto cancellation. "Auto" means that, as soon as we advance the revision, all pending queries are cancelled automatically, and this looks like a semantic we actually want. "client-side" cancellation is a rare event, and it's ok to just punt on it. Automatic cancellation after the user types something in happens all the time. Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_analysis/Cargo.toml1
-rw-r--r--crates/ra_analysis/src/db.rs34
-rw-r--r--crates/ra_analysis/src/descriptors.rs5
-rw-r--r--crates/ra_analysis/src/imp.rs78
-rw-r--r--crates/ra_analysis/src/job.rs53
-rw-r--r--crates/ra_analysis/src/lib.rs76
-rw-r--r--crates/ra_analysis/src/module_map.rs22
-rw-r--r--crates/ra_analysis/src/roots.rs30
-rw-r--r--crates/ra_analysis/src/symbol_index.rs14
-rw-r--r--crates/ra_analysis/tests/tests.rs31
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs78
-rw-r--r--crates/ra_lsp_server/src/main_loop/mod.rs38
-rw-r--r--crates/ra_lsp_server/src/path_map.rs4
-rw-r--r--crates/ra_lsp_server/src/project_model.rs4
-rw-r--r--crates/ra_lsp_server/src/thread_watcher.rs5
15 files changed, 212 insertions, 261 deletions
diff --git a/crates/ra_analysis/Cargo.toml b/crates/ra_analysis/Cargo.toml
index dd4ec8375..d7ac69fe8 100644
--- a/crates/ra_analysis/Cargo.toml
+++ b/crates/ra_analysis/Cargo.toml
@@ -7,7 +7,6 @@ authors = ["Aleksey Kladov <[email protected]>"]
7[dependencies] 7[dependencies]
8relative-path = "0.3.7" 8relative-path = "0.3.7"
9log = "0.4.2" 9log = "0.4.2"
10crossbeam-channel = "0.2.4"
11parking_lot = "0.6.3" 10parking_lot = "0.6.3"
12once_cell = "0.1.5" 11once_cell = "0.1.5"
13rayon = "1.0.2" 12rayon = "1.0.2"
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs
index cce959669..09d74b9e7 100644
--- a/crates/ra_analysis/src/db.rs
+++ b/crates/ra_analysis/src/db.rs
@@ -1,17 +1,20 @@
1use crate::{ 1use std::{
2 module_map::{ModuleDescriptorQuery, ModuleTreeQuery, ModulesDatabase}, 2 fmt,
3 symbol_index::SymbolIndex, 3 hash::{Hash, Hasher},
4 FileId, FileResolverImp, 4 sync::Arc,
5}; 5};
6
6use ra_editor::LineIndex; 7use ra_editor::LineIndex;
7use ra_syntax::File; 8use ra_syntax::File;
8use rustc_hash::FxHashSet; 9use rustc_hash::FxHashSet;
9use salsa; 10use salsa;
10 11
11use std::{ 12use crate::{
12 fmt, 13 db,
13 hash::{Hash, Hasher}, 14 Cancelable, Canceled,
14 sync::Arc, 15 module_map::{ModuleDescriptorQuery, ModuleTreeQuery, ModulesDatabase},
16 symbol_index::SymbolIndex,
17 FileId, FileResolverImp,
15}; 18};
16 19
17#[derive(Default)] 20#[derive(Default)]
@@ -31,6 +34,14 @@ impl salsa::Database for RootDatabase {
31 } 34 }
32} 35}
33 36
37pub(crate) fn check_canceled(db: &impl salsa::Database) -> Cancelable<()> {
38 if db.salsa_runtime().is_current_revision_canceled() {
39 Err(Canceled)
40 } else {
41 Ok(())
42 }
43}
44
34impl salsa::ParallelDatabase for RootDatabase { 45impl salsa::ParallelDatabase for RootDatabase {
35 fn fork(&self) -> Self { 46 fn fork(&self) -> Self {
36 RootDatabase { 47 RootDatabase {
@@ -98,7 +109,7 @@ salsa::query_group! {
98 fn file_lines(file_id: FileId) -> Arc<LineIndex> { 109 fn file_lines(file_id: FileId) -> Arc<LineIndex> {
99 type FileLinesQuery; 110 type FileLinesQuery;
100 } 111 }
101 fn file_symbols(file_id: FileId) -> Arc<SymbolIndex> { 112 fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
102 type FileSymbolsQuery; 113 type FileSymbolsQuery;
103 } 114 }
104 } 115 }
@@ -112,7 +123,8 @@ fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
112 let text = db.file_text(file_id); 123 let text = db.file_text(file_id);
113 Arc::new(LineIndex::new(&*text)) 124 Arc::new(LineIndex::new(&*text))
114} 125}
115fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<SymbolIndex> { 126fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
127 db::check_canceled(db)?;
116 let syntax = db.file_syntax(file_id); 128 let syntax = db.file_syntax(file_id);
117 Arc::new(SymbolIndex::for_file(file_id, syntax)) 129 Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
118} 130}
diff --git a/crates/ra_analysis/src/descriptors.rs b/crates/ra_analysis/src/descriptors.rs
index 6f26f9935..310bf1585 100644
--- a/crates/ra_analysis/src/descriptors.rs
+++ b/crates/ra_analysis/src/descriptors.rs
@@ -1,4 +1,5 @@
1use crate::{imp::FileResolverImp, FileId}; 1use std::collections::BTreeMap;
2
2use ra_syntax::{ 3use ra_syntax::{
3 ast::{self, AstNode, NameOwner}, 4 ast::{self, AstNode, NameOwner},
4 text_utils::is_subrange, 5 text_utils::is_subrange,
@@ -6,7 +7,7 @@ use ra_syntax::{
6}; 7};
7use relative_path::RelativePathBuf; 8use relative_path::RelativePathBuf;
8 9
9use std::collections::BTreeMap; 10use crate::{imp::FileResolverImp, FileId};
10 11
11#[derive(Debug, PartialEq, Eq, Hash)] 12#[derive(Debug, PartialEq, Eq, Hash)]
12pub struct ModuleDescriptor { 13pub struct ModuleDescriptor {
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs
index a67b1717a..32e9bb6d7 100644
--- a/crates/ra_analysis/src/imp.rs
+++ b/crates/ra_analysis/src/imp.rs
@@ -19,8 +19,8 @@ use rustc_hash::FxHashSet;
19use crate::{ 19use crate::{
20 descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem}, 20 descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem},
21 roots::{ReadonlySourceRoot, SourceRoot, WritableSourceRoot}, 21 roots::{ReadonlySourceRoot, SourceRoot, WritableSourceRoot},
22 CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, JobToken, Position, 22 CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, Position,
23 Query, SourceChange, SourceFileEdit, 23 Query, SourceChange, SourceFileEdit, Cancelable,
24}; 24};
25 25
26#[derive(Clone, Debug)] 26#[derive(Clone, Debug)]
@@ -148,19 +148,21 @@ impl AnalysisImpl {
148 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { 148 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
149 self.root(file_id).lines(file_id) 149 self.root(file_id).lines(file_id)
150 } 150 }
151 pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { 151 pub fn world_symbols(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> {
152 let mut buf = Vec::new(); 152 let mut buf = Vec::new();
153 if query.libs { 153 if query.libs {
154 self.data.libs.iter().for_each(|it| it.symbols(&mut buf)); 154 for lib in self.data.libs.iter() {
155 lib.symbols(&mut buf)?;
156 }
155 } else { 157 } else {
156 self.data.root.symbols(&mut buf); 158 self.data.root.symbols(&mut buf)?;
157 } 159 }
158 query.search(&buf, token) 160 Ok(query.search(&buf))
159 } 161 }
160 pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { 162 pub fn parent_module(&self, file_id: FileId) -> Cancelable<Vec<(FileId, FileSymbol)>> {
161 let root = self.root(file_id); 163 let root = self.root(file_id);
162 let module_tree = root.module_tree(); 164 let module_tree = root.module_tree()?;
163 module_tree 165 let res = module_tree
164 .parent_modules(file_id) 166 .parent_modules(file_id)
165 .iter() 167 .iter()
166 .map(|link| { 168 .map(|link| {
@@ -174,10 +176,11 @@ impl AnalysisImpl {
174 }; 176 };
175 (file_id, sym) 177 (file_id, sym)
176 }) 178 })
177 .collect() 179 .collect();
180 Ok(res)
178 } 181 }
179 pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> { 182 pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
180 let module_tree = self.root(file_id).module_tree(); 183 let module_tree = self.root(file_id).module_tree()?;
181 let crate_graph = &self.data.crate_graph; 184 let crate_graph = &self.data.crate_graph;
182 let mut res = Vec::new(); 185 let mut res = Vec::new();
183 let mut work = VecDeque::new(); 186 let mut work = VecDeque::new();
@@ -195,7 +198,7 @@ impl AnalysisImpl {
195 .filter(|&id| visited.insert(id)); 198 .filter(|&id| visited.insert(id));
196 work.extend(parents); 199 work.extend(parents);
197 } 200 }
198 res 201 Ok(res)
199 } 202 }
200 pub fn crate_root(&self, crate_id: CrateId) -> FileId { 203 pub fn crate_root(&self, crate_id: CrateId) -> FileId {
201 self.data.crate_graph.crate_roots[&crate_id] 204 self.data.crate_graph.crate_roots[&crate_id]
@@ -204,15 +207,14 @@ impl AnalysisImpl {
204 &self, 207 &self,
205 file_id: FileId, 208 file_id: FileId,
206 offset: TextUnit, 209 offset: TextUnit,
207 token: &JobToken, 210 ) -> Cancelable<Vec<(FileId, FileSymbol)>> {
208 ) -> Vec<(FileId, FileSymbol)> {
209 let root = self.root(file_id); 211 let root = self.root(file_id);
210 let module_tree = root.module_tree(); 212 let module_tree = root.module_tree()?;
211 let file = root.syntax(file_id); 213 let file = root.syntax(file_id);
212 let syntax = file.syntax(); 214 let syntax = file.syntax();
213 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) { 215 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
214 // First try to resolve the symbol locally 216 // First try to resolve the symbol locally
215 if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) { 217 return if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) {
216 let mut vec = vec![]; 218 let mut vec = vec![];
217 vec.push(( 219 vec.push((
218 file_id, 220 file_id,
@@ -222,12 +224,11 @@ impl AnalysisImpl {
222 kind: NAME, 224 kind: NAME,
223 }, 225 },
224 )); 226 ));
225 227 Ok(vec)
226 return vec;
227 } else { 228 } else {
228 // If that fails try the index based approach. 229 // If that fails try the index based approach.
229 return self.index_resolve(name_ref, token); 230 self.index_resolve(name_ref)
230 } 231 };
231 } 232 }
232 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) { 233 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
233 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { 234 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
@@ -250,14 +251,14 @@ impl AnalysisImpl {
250 }) 251 })
251 .collect(); 252 .collect();
252 253
253 return res; 254 return Ok(res);
254 } 255 }
255 } 256 }
256 } 257 }
257 vec![] 258 Ok(vec![])
258 } 259 }
259 260
260 pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, _token: &JobToken) -> Vec<(FileId, TextRange)> { 261 pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit) -> Vec<(FileId, TextRange)> {
261 let root = self.root(file_id); 262 let root = self.root(file_id);
262 let file = root.syntax(file_id); 263 let file = root.syntax(file_id);
263 let syntax = file.syntax(); 264 let syntax = file.syntax();
@@ -289,9 +290,9 @@ impl AnalysisImpl {
289 ret 290 ret
290 } 291 }
291 292
292 pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { 293 pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
293 let root = self.root(file_id); 294 let root = self.root(file_id);
294 let module_tree = root.module_tree(); 295 let module_tree = root.module_tree()?;
295 let syntax = root.syntax(file_id); 296 let syntax = root.syntax(file_id);
296 297
297 let mut res = ra_editor::diagnostics(&syntax) 298 let mut res = ra_editor::diagnostics(&syntax)
@@ -346,7 +347,7 @@ impl AnalysisImpl {
346 }; 347 };
347 res.push(diag) 348 res.push(diag)
348 } 349 }
349 res 350 Ok(res)
350 } 351 }
351 352
352 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> { 353 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
@@ -379,18 +380,23 @@ impl AnalysisImpl {
379 &self, 380 &self,
380 file_id: FileId, 381 file_id: FileId,
381 offset: TextUnit, 382 offset: TextUnit,
382 token: &JobToken, 383 ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> {
383 ) -> Option<(FnDescriptor, Option<usize>)> {
384 let root = self.root(file_id); 384 let root = self.root(file_id);
385 let file = root.syntax(file_id); 385 let file = root.syntax(file_id);
386 let syntax = file.syntax(); 386 let syntax = file.syntax();
387 387
388 // Find the calling expression and it's NameRef 388 // Find the calling expression and it's NameRef
389 let calling_node = FnCallNode::with_node(syntax, offset)?; 389 let calling_node = match FnCallNode::with_node(syntax, offset) {
390 let name_ref = calling_node.name_ref()?; 390 Some(node) => node,
391 None => return Ok(None),
392 };
393 let name_ref = match calling_node.name_ref() {
394 Some(name) => name,
395 None => return Ok(None),
396 };
391 397
392 // Resolve the function's NameRef (NOTE: this isn't entirely accurate). 398 // Resolve the function's NameRef (NOTE: this isn't entirely accurate).
393 let file_symbols = self.index_resolve(name_ref, token); 399 let file_symbols = self.index_resolve(name_ref)?;
394 for (_, fs) in file_symbols { 400 for (_, fs) in file_symbols {
395 if fs.kind == FN_DEF { 401 if fs.kind == FN_DEF {
396 if let Some(fn_def) = find_node_at_offset(syntax, fs.node_range.start()) { 402 if let Some(fn_def) = find_node_at_offset(syntax, fs.node_range.start()) {
@@ -432,21 +438,21 @@ impl AnalysisImpl {
432 } 438 }
433 } 439 }
434 440
435 return Some((descriptor, current_parameter)); 441 return Ok(Some((descriptor, current_parameter)));
436 } 442 }
437 } 443 }
438 } 444 }
439 } 445 }
440 446
441 None 447 Ok(None)
442 } 448 }
443 449
444 fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> { 450 fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> {
445 let name = name_ref.text(); 451 let name = name_ref.text();
446 let mut query = Query::new(name.to_string()); 452 let mut query = Query::new(name.to_string());
447 query.exact(); 453 query.exact();
448 query.limit(4); 454 query.limit(4);
449 self.world_symbols(query, token) 455 self.world_symbols(query)
450 } 456 }
451 457
452 fn resolve_module( 458 fn resolve_module(
diff --git a/crates/ra_analysis/src/job.rs b/crates/ra_analysis/src/job.rs
deleted file mode 100644
index 2871f9839..000000000
--- a/crates/ra_analysis/src/job.rs
+++ /dev/null
@@ -1,53 +0,0 @@
1use crossbeam_channel::{bounded, Receiver, Sender};
2
3pub struct JobHandle {
4 job_alive: Receiver<Never>,
5 _job_canceled: Sender<Never>,
6}
7
8pub struct JobToken {
9 _job_alive: Sender<Never>,
10 job_canceled: Receiver<Never>,
11}
12
13impl JobHandle {
14 pub fn new() -> (JobHandle, JobToken) {
15 let (sender_alive, receiver_alive) = bounded(0);
16 let (sender_canceled, receiver_canceled) = bounded(0);
17 let token = JobToken {
18 _job_alive: sender_alive,
19 job_canceled: receiver_canceled,
20 };
21 let handle = JobHandle {
22 job_alive: receiver_alive,
23 _job_canceled: sender_canceled,
24 };
25 (handle, token)
26 }
27 pub fn has_completed(&self) -> bool {
28 is_closed(&self.job_alive)
29 }
30 pub fn cancel(self) {}
31}
32
33impl JobToken {
34 pub fn is_canceled(&self) -> bool {
35 is_closed(&self.job_canceled)
36 }
37}
38
39// We don't actually send messages through the channels,
40// and instead just check if the channel is closed,
41// so we use uninhabited enum as a message type
42enum Never {}
43
44/// Nonblocking
45fn is_closed(chan: &Receiver<Never>) -> bool {
46 select! {
47 recv(chan, msg) => match msg {
48 None => true,
49 Some(never) => match never {}
50 }
51 default => false,
52 }
53}
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs
index 46cc0722b..28e0a12b2 100644
--- a/crates/ra_analysis/src/lib.rs
+++ b/crates/ra_analysis/src/lib.rs
@@ -7,8 +7,6 @@ extern crate ra_editor;
7extern crate ra_syntax; 7extern crate ra_syntax;
8extern crate rayon; 8extern crate rayon;
9extern crate relative_path; 9extern crate relative_path;
10#[macro_use]
11extern crate crossbeam_channel;
12extern crate im; 10extern crate im;
13extern crate rustc_hash; 11extern crate rustc_hash;
14extern crate salsa; 12extern crate salsa;
@@ -16,27 +14,40 @@ extern crate salsa;
16mod db; 14mod db;
17mod descriptors; 15mod descriptors;
18mod imp; 16mod imp;
19mod job;
20mod module_map; 17mod module_map;
21mod roots; 18mod roots;
22mod symbol_index; 19mod symbol_index;
23 20
24use std::{fmt::Debug, sync::Arc}; 21use std::{fmt::Debug, sync::Arc};
25 22
26use crate::imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp};
27use ra_syntax::{AtomEdit, File, TextRange, TextUnit}; 23use ra_syntax::{AtomEdit, File, TextRange, TextUnit};
28use relative_path::{RelativePath, RelativePathBuf}; 24use relative_path::{RelativePath, RelativePathBuf};
29use rustc_hash::FxHashMap; 25use rustc_hash::FxHashMap;
30 26
27use crate::imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp};
28
31pub use crate::{ 29pub use crate::{
32 descriptors::FnDescriptor, 30 descriptors::FnDescriptor,
33 job::{JobHandle, JobToken},
34}; 31};
35pub use ra_editor::{ 32pub use ra_editor::{
36 CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, 33 CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
37 RunnableKind, StructureNode, 34 RunnableKind, StructureNode,
38}; 35};
39 36
37#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
38pub struct Canceled;
39
40pub type Cancelable<T> = Result<T, Canceled>;
41
42impl std::fmt::Display for Canceled {
43 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
44 fmt.write_str("Canceled")
45 }
46}
47
48impl std::error::Error for Canceled {
49}
50
40#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 51#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
41pub struct FileId(pub u32); 52pub struct FileId(pub u32);
42 53
@@ -205,60 +216,57 @@ impl Analysis {
205 let file = self.imp.file_syntax(file_id); 216 let file = self.imp.file_syntax(file_id);
206 ra_editor::file_structure(&file) 217 ra_editor::file_structure(&file)
207 } 218 }
208 pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { 219 pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
209 self.imp.world_symbols(query, token) 220 let file = self.imp.file_syntax(file_id);
221 ra_editor::folding_ranges(&file)
222 }
223 pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> {
224 self.imp.world_symbols(query)
210 } 225 }
211 pub fn approximately_resolve_symbol( 226 pub fn approximately_resolve_symbol(
212 &self, 227 &self,
213 file_id: FileId, 228 file_id: FileId,
214 offset: TextUnit, 229 offset: TextUnit
215 token: &JobToken, 230 ) -> Cancelable<Vec<(FileId, FileSymbol)>> {
216 ) -> Vec<(FileId, FileSymbol)> {
217 self.imp 231 self.imp
218 .approximately_resolve_symbol(file_id, offset, token) 232 .approximately_resolve_symbol(file_id, offset)
219 } 233 }
220 pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, TextRange)> { 234 pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, ) -> Cancelable<Vec<(FileId, TextRange)>> {
221 self.imp.find_all_refs(file_id, offset, token) 235 Ok(self.imp.find_all_refs(file_id, offset))
222 } 236 }
223 pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { 237 pub fn parent_module(&self, file_id: FileId) -> Cancelable<Vec<(FileId, FileSymbol)>> {
224 self.imp.parent_module(file_id) 238 self.imp.parent_module(file_id)
225 } 239 }
226 pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> { 240 pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
227 self.imp.crate_for(file_id) 241 self.imp.crate_for(file_id)
228 } 242 }
229 pub fn crate_root(&self, crate_id: CrateId) -> FileId { 243 pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> {
230 self.imp.crate_root(crate_id) 244 Ok(self.imp.crate_root(crate_id))
231 } 245 }
232 pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> { 246 pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> {
233 let file = self.imp.file_syntax(file_id); 247 let file = self.imp.file_syntax(file_id);
234 ra_editor::runnables(&file) 248 Ok(ra_editor::runnables(&file))
235 } 249 }
236 pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> { 250 pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
237 let file = self.imp.file_syntax(file_id); 251 let file = self.imp.file_syntax(file_id);
238 ra_editor::highlight(&file) 252 Ok(ra_editor::highlight(&file))
239 } 253 }
240 pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> { 254 pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Cancelable<Option<Vec<CompletionItem>>> {
241 let file = self.imp.file_syntax(file_id); 255 let file = self.imp.file_syntax(file_id);
242 ra_editor::scope_completion(&file, offset) 256 Ok(ra_editor::scope_completion(&file, offset))
243 } 257 }
244 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> { 258 pub fn assists(&self, file_id: FileId, range: TextRange) -> Cancelable<Vec<SourceChange>> {
245 self.imp.assists(file_id, range) 259 Ok(self.imp.assists(file_id, range))
246 } 260 }
247 pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { 261 pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
248 self.imp.diagnostics(file_id) 262 self.imp.diagnostics(file_id)
249 } 263 }
250 pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
251 let file = self.imp.file_syntax(file_id);
252 ra_editor::folding_ranges(&file)
253 }
254
255 pub fn resolve_callable( 264 pub fn resolve_callable(
256 &self, 265 &self,
257 file_id: FileId, 266 file_id: FileId,
258 offset: TextUnit, 267 offset: TextUnit,
259 token: &JobToken, 268 ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> {
260 ) -> Option<(FnDescriptor, Option<usize>)> { 269 self.imp.resolve_callable(file_id, offset)
261 self.imp.resolve_callable(file_id, offset, token)
262 } 270 }
263} 271}
264 272
diff --git a/crates/ra_analysis/src/module_map.rs b/crates/ra_analysis/src/module_map.rs
index b15432498..3c800265a 100644
--- a/crates/ra_analysis/src/module_map.rs
+++ b/crates/ra_analysis/src/module_map.rs
@@ -1,37 +1,41 @@
1use std::sync::Arc;
2
1use crate::{ 3use crate::{
4 db,
5 Cancelable,
2 db::SyntaxDatabase, 6 db::SyntaxDatabase,
3 descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, 7 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
4 FileId, 8 FileId,
5}; 9};
6 10
7use std::sync::Arc;
8
9salsa::query_group! { 11salsa::query_group! {
10 pub(crate) trait ModulesDatabase: SyntaxDatabase { 12 pub(crate) trait ModulesDatabase: SyntaxDatabase {
11 fn module_tree() -> Arc<ModuleTreeDescriptor> { 13 fn module_tree() -> Cancelable<Arc<ModuleTreeDescriptor>> {
12 type ModuleTreeQuery; 14 type ModuleTreeQuery;
13 } 15 }
14 fn module_descriptor(file_id: FileId) -> Arc<ModuleDescriptor> { 16 fn module_descriptor(file_id: FileId) -> Cancelable<Arc<ModuleDescriptor>> {
15 type ModuleDescriptorQuery; 17 type ModuleDescriptorQuery;
16 } 18 }
17 } 19 }
18} 20}
19 21
20fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> { 22fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Cancelable<Arc<ModuleDescriptor>> {
23 db::check_canceled(db)?;
21 let file = db.file_syntax(file_id); 24 let file = db.file_syntax(file_id);
22 Arc::new(ModuleDescriptor::new(file.ast())) 25 Ok(Arc::new(ModuleDescriptor::new(file.ast())))
23} 26}
24 27
25fn module_tree(db: &impl ModulesDatabase) -> Arc<ModuleTreeDescriptor> { 28fn module_tree(db: &impl ModulesDatabase) -> Cancelable<Arc<ModuleTreeDescriptor>> {
29 db::check_canceled(db)?;
26 let file_set = db.file_set(); 30 let file_set = db.file_set();
27 let mut files = Vec::new(); 31 let mut files = Vec::new();
28 for &file_id in file_set.files.iter() { 32 for &file_id in file_set.files.iter() {
29 let module_descr = db.module_descriptor(file_id); 33 let module_descr = db.module_descriptor(file_id)?;
30 files.push((file_id, module_descr)); 34 files.push((file_id, module_descr));
31 } 35 }
32 let res = ModuleTreeDescriptor::new( 36 let res = ModuleTreeDescriptor::new(
33 files.iter().map(|(file_id, descr)| (*file_id, &**descr)), 37 files.iter().map(|(file_id, descr)| (*file_id, &**descr)),
34 &file_set.resolver, 38 &file_set.resolver,
35 ); 39 );
36 Arc::new(res) 40 Ok(Arc::new(res))
37} 41}
diff --git a/crates/ra_analysis/src/roots.rs b/crates/ra_analysis/src/roots.rs
index 19c84df65..123c4acfa 100644
--- a/crates/ra_analysis/src/roots.rs
+++ b/crates/ra_analysis/src/roots.rs
@@ -8,6 +8,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
8use salsa::Database; 8use salsa::Database;
9 9
10use crate::{ 10use crate::{
11 Cancelable,
11 db::{self, FilesDatabase, SyntaxDatabase}, 12 db::{self, FilesDatabase, SyntaxDatabase},
12 descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, 13 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
13 imp::FileResolverImp, 14 imp::FileResolverImp,
@@ -18,10 +19,10 @@ use crate::{
18 19
19pub(crate) trait SourceRoot { 20pub(crate) trait SourceRoot {
20 fn contains(&self, file_id: FileId) -> bool; 21 fn contains(&self, file_id: FileId) -> bool;
21 fn module_tree(&self) -> Arc<ModuleTreeDescriptor>; 22 fn module_tree(&self) -> Cancelable<Arc<ModuleTreeDescriptor>>;
22 fn lines(&self, file_id: FileId) -> Arc<LineIndex>; 23 fn lines(&self, file_id: FileId) -> Arc<LineIndex>;
23 fn syntax(&self, file_id: FileId) -> File; 24 fn syntax(&self, file_id: FileId) -> File;
24 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>); 25 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()>;
25} 26}
26 27
27#[derive(Default, Debug, Clone)] 28#[derive(Default, Debug, Clone)]
@@ -64,7 +65,7 @@ impl WritableSourceRoot {
64} 65}
65 66
66impl SourceRoot for WritableSourceRoot { 67impl SourceRoot for WritableSourceRoot {
67 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> { 68 fn module_tree(&self) -> Cancelable<Arc<ModuleTreeDescriptor>> {
68 self.db.module_tree() 69 self.db.module_tree()
69 } 70 }
70 fn contains(&self, file_id: FileId) -> bool { 71 fn contains(&self, file_id: FileId) -> bool {
@@ -76,14 +77,12 @@ impl SourceRoot for WritableSourceRoot {
76 fn syntax(&self, file_id: FileId) -> File { 77 fn syntax(&self, file_id: FileId) -> File {
77 self.db.file_syntax(file_id) 78 self.db.file_syntax(file_id)
78 } 79 }
79 fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) { 80 fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()> {
80 let db = &self.db; 81 for &file_id in self.db.file_set().files.iter() {
81 let symbols = db.file_set(); 82 let symbols = self.db.file_symbols(file_id)?;
82 let symbols = symbols 83 acc.push(symbols)
83 .files 84 }
84 .iter() 85 Ok(())
85 .map(|&file_id| db.file_symbols(file_id));
86 acc.extend(symbols);
87 } 86 }
88} 87}
89 88
@@ -167,8 +166,8 @@ impl ReadonlySourceRoot {
167} 166}
168 167
169impl SourceRoot for ReadonlySourceRoot { 168impl SourceRoot for ReadonlySourceRoot {
170 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> { 169 fn module_tree(&self) -> Cancelable<Arc<ModuleTreeDescriptor>> {
171 Arc::clone(&self.module_tree) 170 Ok(Arc::clone(&self.module_tree))
172 } 171 }
173 fn contains(&self, file_id: FileId) -> bool { 172 fn contains(&self, file_id: FileId) -> bool {
174 self.file_map.contains_key(&file_id) 173 self.file_map.contains_key(&file_id)
@@ -179,7 +178,8 @@ impl SourceRoot for ReadonlySourceRoot {
179 fn syntax(&self, file_id: FileId) -> File { 178 fn syntax(&self, file_id: FileId) -> File {
180 self.data(file_id).syntax().clone() 179 self.data(file_id).syntax().clone()
181 } 180 }
182 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) { 181 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()> {
183 acc.push(Arc::clone(&self.symbol_index)) 182 acc.push(Arc::clone(&self.symbol_index));
183 Ok(())
184 } 184 }
185} 185}
diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs
index 51eef8170..a0f3c0437 100644
--- a/crates/ra_analysis/src/symbol_index.rs
+++ b/crates/ra_analysis/src/symbol_index.rs
@@ -1,4 +1,8 @@
1use crate::{FileId, JobToken, Query}; 1use std::{
2 hash::{Hash, Hasher},
3 sync::Arc,
4};
5
2use fst::{self, Streamer}; 6use fst::{self, Streamer};
3use ra_editor::{file_symbols, FileSymbol}; 7use ra_editor::{file_symbols, FileSymbol};
4use ra_syntax::{ 8use ra_syntax::{
@@ -7,10 +11,7 @@ use ra_syntax::{
7}; 11};
8use rayon::prelude::*; 12use rayon::prelude::*;
9 13
10use std::{ 14use crate::{FileId, Query};
11 hash::{Hash, Hasher},
12 sync::Arc,
13};
14 15
15#[derive(Debug)] 16#[derive(Debug)]
16pub(crate) struct SymbolIndex { 17pub(crate) struct SymbolIndex {
@@ -59,7 +60,6 @@ impl Query {
59 pub(crate) fn search( 60 pub(crate) fn search(
60 self, 61 self,
61 indices: &[Arc<SymbolIndex>], 62 indices: &[Arc<SymbolIndex>],
62 token: &JobToken,
63 ) -> Vec<(FileId, FileSymbol)> { 63 ) -> Vec<(FileId, FileSymbol)> {
64 let mut op = fst::map::OpBuilder::new(); 64 let mut op = fst::map::OpBuilder::new();
65 for file_symbols in indices.iter() { 65 for file_symbols in indices.iter() {
@@ -69,7 +69,7 @@ impl Query {
69 let mut stream = op.union(); 69 let mut stream = op.union();
70 let mut res = Vec::new(); 70 let mut res = Vec::new();
71 while let Some((_, indexed_values)) = stream.next() { 71 while let Some((_, indexed_values)) = stream.next() {
72 if res.len() >= self.limit || token.is_canceled() { 72 if res.len() >= self.limit {
73 break; 73 break;
74 } 74 }
75 for indexed_value in indexed_values { 75 for indexed_value in indexed_values {
diff --git a/crates/ra_analysis/tests/tests.rs b/crates/ra_analysis/tests/tests.rs
index 0c2c69ea0..7ae3d0eeb 100644
--- a/crates/ra_analysis/tests/tests.rs
+++ b/crates/ra_analysis/tests/tests.rs
@@ -7,15 +7,15 @@ extern crate test_utils;
7 7
8use std::sync::Arc; 8use std::sync::Arc;
9 9
10use ra_analysis::{
11 Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, FnDescriptor, JobHandle,
12};
13use ra_syntax::TextRange; 10use ra_syntax::TextRange;
14
15use relative_path::{RelativePath, RelativePathBuf}; 11use relative_path::{RelativePath, RelativePathBuf};
16use rustc_hash::FxHashMap; 12use rustc_hash::FxHashMap;
17use test_utils::{assert_eq_dbg, extract_offset}; 13use test_utils::{assert_eq_dbg, extract_offset};
18 14
15use ra_analysis::{
16 Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, FnDescriptor,
17};
18
19#[derive(Debug)] 19#[derive(Debug)]
20struct FileMap(Vec<(FileId, RelativePathBuf)>); 20struct FileMap(Vec<(FileId, RelativePathBuf)>);
21 21
@@ -64,24 +64,22 @@ fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) {
64 let (offset, code) = extract_offset(text); 64 let (offset, code) = extract_offset(text);
65 let code = code.as_str(); 65 let code = code.as_str();
66 66
67 let (_handle, token) = JobHandle::new();
68 let snap = analysis(&[("/lib.rs", code)]); 67 let snap = analysis(&[("/lib.rs", code)]);
69 68
70 snap.resolve_callable(FileId(1), offset, &token).unwrap() 69 snap.resolve_callable(FileId(1), offset).unwrap().unwrap()
71} 70}
72 71
73#[test] 72#[test]
74fn test_resolve_module() { 73fn test_resolve_module() {
75 let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]); 74 let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
76 let (_handle, token) = JobHandle::new(); 75 let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into()).unwrap();
77 let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token);
78 assert_eq_dbg( 76 assert_eq_dbg(
79 r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, 77 r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#,
80 &symbols, 78 &symbols,
81 ); 79 );
82 80
83 let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo/mod.rs", "")]); 81 let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo/mod.rs", "")]);
84 let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token); 82 let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into()).unwrap();
85 assert_eq_dbg( 83 assert_eq_dbg(
86 r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, 84 r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#,
87 &symbols, 85 &symbols,
@@ -91,7 +89,7 @@ fn test_resolve_module() {
91#[test] 89#[test]
92fn test_unresolved_module_diagnostic() { 90fn test_unresolved_module_diagnostic() {
93 let snap = analysis(&[("/lib.rs", "mod foo;")]); 91 let snap = analysis(&[("/lib.rs", "mod foo;")]);
94 let diagnostics = snap.diagnostics(FileId(1)); 92 let diagnostics = snap.diagnostics(FileId(1)).unwrap();
95 assert_eq_dbg( 93 assert_eq_dbg(
96 r#"[Diagnostic { 94 r#"[Diagnostic {
97 message: "unresolved module", 95 message: "unresolved module",
@@ -108,14 +106,14 @@ fn test_unresolved_module_diagnostic() {
108#[test] 106#[test]
109fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() { 107fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() {
110 let snap = analysis(&[("/lib.rs", "mod foo {}")]); 108 let snap = analysis(&[("/lib.rs", "mod foo {}")]);
111 let diagnostics = snap.diagnostics(FileId(1)); 109 let diagnostics = snap.diagnostics(FileId(1)).unwrap();
112 assert_eq_dbg(r#"[]"#, &diagnostics); 110 assert_eq_dbg(r#"[]"#, &diagnostics);
113} 111}
114 112
115#[test] 113#[test]
116fn test_resolve_parent_module() { 114fn test_resolve_parent_module() {
117 let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]); 115 let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
118 let symbols = snap.parent_module(FileId(2)); 116 let symbols = snap.parent_module(FileId(2)).unwrap();
119 assert_eq_dbg( 117 assert_eq_dbg(
120 r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#, 118 r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#,
121 &symbols, 119 &symbols,
@@ -126,7 +124,7 @@ fn test_resolve_parent_module() {
126fn test_resolve_crate_root() { 124fn test_resolve_crate_root() {
127 let mut host = analysis_host(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]); 125 let mut host = analysis_host(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
128 let snap = host.analysis(); 126 let snap = host.analysis();
129 assert!(snap.crate_for(FileId(2)).is_empty()); 127 assert!(snap.crate_for(FileId(2)).unwrap().is_empty());
130 128
131 let crate_graph = CrateGraph { 129 let crate_graph = CrateGraph {
132 crate_roots: { 130 crate_roots: {
@@ -138,7 +136,7 @@ fn test_resolve_crate_root() {
138 host.set_crate_graph(crate_graph); 136 host.set_crate_graph(crate_graph);
139 let snap = host.analysis(); 137 let snap = host.analysis();
140 138
141 assert_eq!(snap.crate_for(FileId(2)), vec![CrateId(1)],); 139 assert_eq!(snap.crate_for(FileId(2)).unwrap(), vec![CrateId(1)],);
142} 140}
143 141
144#[test] 142#[test]
@@ -232,10 +230,9 @@ fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> {
232 let (offset, code) = extract_offset(text); 230 let (offset, code) = extract_offset(text);
233 let code = code.as_str(); 231 let code = code.as_str();
234 232
235 let (_handle, token) = JobHandle::new();
236 let snap = analysis(&[("/lib.rs", code)]); 233 let snap = analysis(&[("/lib.rs", code)]);
237 234
238 snap.find_all_refs(FileId(1), offset, &token) 235 snap.find_all_refs(FileId(1), offset).unwrap()
239} 236}
240 237
241#[test] 238#[test]
@@ -266,4 +263,4 @@ fn test_find_all_refs_for_param_inside() {
266 263
267 let refs = get_all_refs(code); 264 let refs = get_all_refs(code);
268 assert_eq!(refs.len(), 2); 265 assert_eq!(refs.len(), 2);
269} \ No newline at end of file 266}
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index 639fe4553..f5dff4c80 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -1,12 +1,13 @@
1use rustc_hash::FxHashMap; 1use std::collections::HashMap;
2 2
3use rustc_hash::FxHashMap;
3use languageserver_types::{ 4use languageserver_types::{
4 CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic, 5 CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic,
5 DiagnosticSeverity, DocumentSymbol, FoldingRange, FoldingRangeKind, FoldingRangeParams, 6 DiagnosticSeverity, DocumentSymbol, FoldingRange, FoldingRangeKind, FoldingRangeParams,
6 InsertTextFormat, Location, Position, SymbolInformation, TextDocumentIdentifier, TextEdit, 7 InsertTextFormat, Location, Position, SymbolInformation, TextDocumentIdentifier, TextEdit,
7 RenameParams, WorkspaceEdit, PrepareRenameResponse 8 RenameParams, WorkspaceEdit, PrepareRenameResponse
8}; 9};
9use ra_analysis::{FileId, FoldKind, JobToken, Query, RunnableKind}; 10use ra_analysis::{FileId, FoldKind, Query, RunnableKind};
10use ra_syntax::text_utils::contains_offset_nonstrict; 11use ra_syntax::text_utils::contains_offset_nonstrict;
11use serde_json::to_value; 12use serde_json::to_value;
12 13
@@ -18,12 +19,9 @@ use crate::{
18 Result, 19 Result,
19}; 20};
20 21
21use std::collections::HashMap;
22
23pub fn handle_syntax_tree( 22pub fn handle_syntax_tree(
24 world: ServerWorld, 23 world: ServerWorld,
25 params: req::SyntaxTreeParams, 24 params: req::SyntaxTreeParams,
26 _token: JobToken,
27) -> Result<String> { 25) -> Result<String> {
28 let id = params.text_document.try_conv_with(&world)?; 26 let id = params.text_document.try_conv_with(&world)?;
29 let res = world.analysis().syntax_tree(id); 27 let res = world.analysis().syntax_tree(id);
@@ -33,7 +31,6 @@ pub fn handle_syntax_tree(
33pub fn handle_extend_selection( 31pub fn handle_extend_selection(
34 world: ServerWorld, 32 world: ServerWorld,
35 params: req::ExtendSelectionParams, 33 params: req::ExtendSelectionParams,
36 _token: JobToken,
37) -> Result<req::ExtendSelectionResult> { 34) -> Result<req::ExtendSelectionResult> {
38 let file_id = params.text_document.try_conv_with(&world)?; 35 let file_id = params.text_document.try_conv_with(&world)?;
39 let file = world.analysis().file_syntax(file_id); 36 let file = world.analysis().file_syntax(file_id);
@@ -51,7 +48,6 @@ pub fn handle_extend_selection(
51pub fn handle_find_matching_brace( 48pub fn handle_find_matching_brace(
52 world: ServerWorld, 49 world: ServerWorld,
53 params: req::FindMatchingBraceParams, 50 params: req::FindMatchingBraceParams,
54 _token: JobToken,
55) -> Result<Vec<Position>> { 51) -> Result<Vec<Position>> {
56 let file_id = params.text_document.try_conv_with(&world)?; 52 let file_id = params.text_document.try_conv_with(&world)?;
57 let file = world.analysis().file_syntax(file_id); 53 let file = world.analysis().file_syntax(file_id);
@@ -74,7 +70,6 @@ pub fn handle_find_matching_brace(
74pub fn handle_join_lines( 70pub fn handle_join_lines(
75 world: ServerWorld, 71 world: ServerWorld,
76 params: req::JoinLinesParams, 72 params: req::JoinLinesParams,
77 _token: JobToken,
78) -> Result<req::SourceChange> { 73) -> Result<req::SourceChange> {
79 let file_id = params.text_document.try_conv_with(&world)?; 74 let file_id = params.text_document.try_conv_with(&world)?;
80 let line_index = world.analysis().file_line_index(file_id); 75 let line_index = world.analysis().file_line_index(file_id);
@@ -88,7 +83,6 @@ pub fn handle_join_lines(
88pub fn handle_on_enter( 83pub fn handle_on_enter(
89 world: ServerWorld, 84 world: ServerWorld,
90 params: req::TextDocumentPositionParams, 85 params: req::TextDocumentPositionParams,
91 _token: JobToken,
92) -> Result<Option<req::SourceChange>> { 86) -> Result<Option<req::SourceChange>> {
93 let file_id = params.text_document.try_conv_with(&world)?; 87 let file_id = params.text_document.try_conv_with(&world)?;
94 let line_index = world.analysis().file_line_index(file_id); 88 let line_index = world.analysis().file_line_index(file_id);
@@ -102,7 +96,6 @@ pub fn handle_on_enter(
102pub fn handle_on_type_formatting( 96pub fn handle_on_type_formatting(
103 world: ServerWorld, 97 world: ServerWorld,
104 params: req::DocumentOnTypeFormattingParams, 98 params: req::DocumentOnTypeFormattingParams,
105 _token: JobToken,
106) -> Result<Option<Vec<TextEdit>>> { 99) -> Result<Option<Vec<TextEdit>>> {
107 if params.ch != "=" { 100 if params.ch != "=" {
108 return Ok(None); 101 return Ok(None);
@@ -122,7 +115,6 @@ pub fn handle_on_type_formatting(
122pub fn handle_document_symbol( 115pub fn handle_document_symbol(
123 world: ServerWorld, 116 world: ServerWorld,
124 params: req::DocumentSymbolParams, 117 params: req::DocumentSymbolParams,
125 _token: JobToken,
126) -> Result<Option<req::DocumentSymbolResponse>> { 118) -> Result<Option<req::DocumentSymbolResponse>> {
127 let file_id = params.text_document.try_conv_with(&world)?; 119 let file_id = params.text_document.try_conv_with(&world)?;
128 let line_index = world.analysis().file_line_index(file_id); 120 let line_index = world.analysis().file_line_index(file_id);
@@ -161,7 +153,6 @@ pub fn handle_document_symbol(
161pub fn handle_workspace_symbol( 153pub fn handle_workspace_symbol(
162 world: ServerWorld, 154 world: ServerWorld,
163 params: req::WorkspaceSymbolParams, 155 params: req::WorkspaceSymbolParams,
164 token: JobToken,
165) -> Result<Option<Vec<SymbolInformation>>> { 156) -> Result<Option<Vec<SymbolInformation>>> {
166 let all_symbols = params.query.contains("#"); 157 let all_symbols = params.query.contains("#");
167 let libs = params.query.contains("*"); 158 let libs = params.query.contains("*");
@@ -181,11 +172,11 @@ pub fn handle_workspace_symbol(
181 q.limit(128); 172 q.limit(128);
182 q 173 q
183 }; 174 };
184 let mut res = exec_query(&world, query, &token)?; 175 let mut res = exec_query(&world, query)?;
185 if res.is_empty() && !all_symbols { 176 if res.is_empty() && !all_symbols {
186 let mut query = Query::new(params.query); 177 let mut query = Query::new(params.query);
187 query.limit(128); 178 query.limit(128);
188 res = exec_query(&world, query, &token)?; 179 res = exec_query(&world, query)?;
189 } 180 }
190 181
191 return Ok(Some(res)); 182 return Ok(Some(res));
@@ -193,10 +184,9 @@ pub fn handle_workspace_symbol(
193 fn exec_query( 184 fn exec_query(
194 world: &ServerWorld, 185 world: &ServerWorld,
195 query: Query, 186 query: Query,
196 token: &JobToken,
197 ) -> Result<Vec<SymbolInformation>> { 187 ) -> Result<Vec<SymbolInformation>> {
198 let mut res = Vec::new(); 188 let mut res = Vec::new();
199 for (file_id, symbol) in world.analysis().symbol_search(query, token) { 189 for (file_id, symbol) in world.analysis().symbol_search(query)? {
200 let line_index = world.analysis().file_line_index(file_id); 190 let line_index = world.analysis().file_line_index(file_id);
201 let info = SymbolInformation { 191 let info = SymbolInformation {
202 name: symbol.name.to_string(), 192 name: symbol.name.to_string(),
@@ -214,7 +204,6 @@ pub fn handle_workspace_symbol(
214pub fn handle_goto_definition( 204pub fn handle_goto_definition(
215 world: ServerWorld, 205 world: ServerWorld,
216 params: req::TextDocumentPositionParams, 206 params: req::TextDocumentPositionParams,
217 token: JobToken,
218) -> Result<Option<req::GotoDefinitionResponse>> { 207) -> Result<Option<req::GotoDefinitionResponse>> {
219 let file_id = params.text_document.try_conv_with(&world)?; 208 let file_id = params.text_document.try_conv_with(&world)?;
220 let line_index = world.analysis().file_line_index(file_id); 209 let line_index = world.analysis().file_line_index(file_id);
@@ -222,7 +211,7 @@ pub fn handle_goto_definition(
222 let mut res = Vec::new(); 211 let mut res = Vec::new();
223 for (file_id, symbol) in world 212 for (file_id, symbol) in world
224 .analysis() 213 .analysis()
225 .approximately_resolve_symbol(file_id, offset, &token) 214 .approximately_resolve_symbol(file_id, offset)?
226 { 215 {
227 let line_index = world.analysis().file_line_index(file_id); 216 let line_index = world.analysis().file_line_index(file_id);
228 let location = to_location(file_id, symbol.node_range, &world, &line_index)?; 217 let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
@@ -234,11 +223,10 @@ pub fn handle_goto_definition(
234pub fn handle_parent_module( 223pub fn handle_parent_module(
235 world: ServerWorld, 224 world: ServerWorld,
236 params: TextDocumentIdentifier, 225 params: TextDocumentIdentifier,
237 _token: JobToken,
238) -> Result<Vec<Location>> { 226) -> Result<Vec<Location>> {
239 let file_id = params.try_conv_with(&world)?; 227 let file_id = params.try_conv_with(&world)?;
240 let mut res = Vec::new(); 228 let mut res = Vec::new();
241 for (file_id, symbol) in world.analysis().parent_module(file_id) { 229 for (file_id, symbol) in world.analysis().parent_module(file_id)? {
242 let line_index = world.analysis().file_line_index(file_id); 230 let line_index = world.analysis().file_line_index(file_id);
243 let location = to_location(file_id, symbol.node_range, &world, &line_index)?; 231 let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
244 res.push(location); 232 res.push(location);
@@ -249,20 +237,19 @@ pub fn handle_parent_module(
249pub fn handle_runnables( 237pub fn handle_runnables(
250 world: ServerWorld, 238 world: ServerWorld,
251 params: req::RunnablesParams, 239 params: req::RunnablesParams,
252 _token: JobToken,
253) -> Result<Vec<req::Runnable>> { 240) -> Result<Vec<req::Runnable>> {
254 let file_id = params.text_document.try_conv_with(&world)?; 241 let file_id = params.text_document.try_conv_with(&world)?;
255 let line_index = world.analysis().file_line_index(file_id); 242 let line_index = world.analysis().file_line_index(file_id);
256 let offset = params.position.map(|it| it.conv_with(&line_index)); 243 let offset = params.position.map(|it| it.conv_with(&line_index));
257 let mut res = Vec::new(); 244 let mut res = Vec::new();
258 for runnable in world.analysis().runnables(file_id) { 245 for runnable in world.analysis().runnables(file_id)? {
259 if let Some(offset) = offset { 246 if let Some(offset) = offset {
260 if !contains_offset_nonstrict(runnable.range, offset) { 247 if !contains_offset_nonstrict(runnable.range, offset) {
261 continue; 248 continue;
262 } 249 }
263 } 250 }
264 251
265 let args = runnable_args(&world, file_id, &runnable.kind); 252 let args = runnable_args(&world, file_id, &runnable.kind)?;
266 253
267 let r = req::Runnable { 254 let r = req::Runnable {
268 range: runnable.range.conv_with(&line_index), 255 range: runnable.range.conv_with(&line_index),
@@ -282,9 +269,9 @@ pub fn handle_runnables(
282 } 269 }
283 return Ok(res); 270 return Ok(res);
284 271
285 fn runnable_args(world: &ServerWorld, file_id: FileId, kind: &RunnableKind) -> Vec<String> { 272 fn runnable_args(world: &ServerWorld, file_id: FileId, kind: &RunnableKind) -> Result<Vec<String>> {
286 let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id).first() { 273 let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id)?.first() {
287 let file_id = world.analysis().crate_root(crate_id); 274 let file_id = world.analysis().crate_root(crate_id)?;
288 let path = world.path_map.get_path(file_id); 275 let path = world.path_map.get_path(file_id);
289 world 276 world
290 .workspaces 277 .workspaces
@@ -319,7 +306,7 @@ pub fn handle_runnables(
319 } 306 }
320 } 307 }
321 } 308 }
322 res 309 Ok(res)
323 } 310 }
324 311
325 fn spec_args(pkg_name: &str, tgt_name: &str, tgt_kind: TargetKind, buf: &mut Vec<String>) { 312 fn spec_args(pkg_name: &str, tgt_name: &str, tgt_kind: TargetKind, buf: &mut Vec<String>) {
@@ -353,21 +340,19 @@ pub fn handle_runnables(
353pub fn handle_decorations( 340pub fn handle_decorations(
354 world: ServerWorld, 341 world: ServerWorld,
355 params: TextDocumentIdentifier, 342 params: TextDocumentIdentifier,
356 _token: JobToken,
357) -> Result<Vec<Decoration>> { 343) -> Result<Vec<Decoration>> {
358 let file_id = params.try_conv_with(&world)?; 344 let file_id = params.try_conv_with(&world)?;
359 Ok(highlight(&world, file_id)) 345 highlight(&world, file_id)
360} 346}
361 347
362pub fn handle_completion( 348pub fn handle_completion(
363 world: ServerWorld, 349 world: ServerWorld,
364 params: req::CompletionParams, 350 params: req::CompletionParams,
365 _token: JobToken,
366) -> Result<Option<req::CompletionResponse>> { 351) -> Result<Option<req::CompletionResponse>> {
367 let file_id = params.text_document.try_conv_with(&world)?; 352 let file_id = params.text_document.try_conv_with(&world)?;
368 let line_index = world.analysis().file_line_index(file_id); 353 let line_index = world.analysis().file_line_index(file_id);
369 let offset = params.position.conv_with(&line_index); 354 let offset = params.position.conv_with(&line_index);
370 let items = match world.analysis().completions(file_id, offset) { 355 let items = match world.analysis().completions(file_id, offset)? {
371 None => return Ok(None), 356 None => return Ok(None),
372 Some(items) => items, 357 Some(items) => items,
373 }; 358 };
@@ -394,7 +379,6 @@ pub fn handle_completion(
394pub fn handle_folding_range( 379pub fn handle_folding_range(
395 world: ServerWorld, 380 world: ServerWorld,
396 params: FoldingRangeParams, 381 params: FoldingRangeParams,
397 _token: JobToken,
398) -> Result<Option<Vec<FoldingRange>>> { 382) -> Result<Option<Vec<FoldingRange>>> {
399 let file_id = params.text_document.try_conv_with(&world)?; 383 let file_id = params.text_document.try_conv_with(&world)?;
400 let line_index = world.analysis().file_line_index(file_id); 384 let line_index = world.analysis().file_line_index(file_id);
@@ -427,7 +411,6 @@ pub fn handle_folding_range(
427pub fn handle_signature_help( 411pub fn handle_signature_help(
428 world: ServerWorld, 412 world: ServerWorld,
429 params: req::TextDocumentPositionParams, 413 params: req::TextDocumentPositionParams,
430 token: JobToken,
431) -> Result<Option<req::SignatureHelp>> { 414) -> Result<Option<req::SignatureHelp>> {
432 use languageserver_types::{ParameterInformation, SignatureInformation}; 415 use languageserver_types::{ParameterInformation, SignatureInformation};
433 416
@@ -436,7 +419,7 @@ pub fn handle_signature_help(
436 let offset = params.position.conv_with(&line_index); 419 let offset = params.position.conv_with(&line_index);
437 420
438 if let Some((descriptor, active_param)) = 421 if let Some((descriptor, active_param)) =
439 world.analysis().resolve_callable(file_id, offset, &token) 422 world.analysis().resolve_callable(file_id, offset)?
440 { 423 {
441 let parameters: Vec<ParameterInformation> = descriptor 424 let parameters: Vec<ParameterInformation> = descriptor
442 .params 425 .params
@@ -466,7 +449,6 @@ pub fn handle_signature_help(
466pub fn handle_prepare_rename( 449pub fn handle_prepare_rename(
467 world: ServerWorld, 450 world: ServerWorld,
468 params: req::TextDocumentPositionParams, 451 params: req::TextDocumentPositionParams,
469 token: JobToken,
470) -> Result<Option<PrepareRenameResponse>> { 452) -> Result<Option<PrepareRenameResponse>> {
471 let file_id = params.text_document.try_conv_with(&world)?; 453 let file_id = params.text_document.try_conv_with(&world)?;
472 let line_index = world.analysis().file_line_index(file_id); 454 let line_index = world.analysis().file_line_index(file_id);
@@ -474,7 +456,7 @@ pub fn handle_prepare_rename(
474 456
475 // We support renaming references like handle_rename does. 457 // We support renaming references like handle_rename does.
476 // In the future we may want to reject the renaming of things like keywords here too. 458 // In the future we may want to reject the renaming of things like keywords here too.
477 let refs = world.analysis().find_all_refs(file_id, offset, &token); 459 let refs = world.analysis().find_all_refs(file_id, offset)?;
478 if refs.is_empty() { 460 if refs.is_empty() {
479 return Ok(None); 461 return Ok(None);
480 } 462 }
@@ -488,7 +470,6 @@ pub fn handle_prepare_rename(
488pub fn handle_rename( 470pub fn handle_rename(
489 world: ServerWorld, 471 world: ServerWorld,
490 params: RenameParams, 472 params: RenameParams,
491 token: JobToken,
492) -> Result<Option<WorkspaceEdit>> { 473) -> Result<Option<WorkspaceEdit>> {
493 let file_id = params.text_document.try_conv_with(&world)?; 474 let file_id = params.text_document.try_conv_with(&world)?;
494 let line_index = world.analysis().file_line_index(file_id); 475 let line_index = world.analysis().file_line_index(file_id);
@@ -498,7 +479,7 @@ pub fn handle_rename(
498 return Ok(None); 479 return Ok(None);
499 } 480 }
500 481
501 let refs = world.analysis().find_all_refs(file_id, offset, &token); 482 let refs = world.analysis().find_all_refs(file_id, offset)?;
502 if refs.is_empty() { 483 if refs.is_empty() {
503 return Ok(None); 484 return Ok(None);
504 } 485 }
@@ -525,13 +506,12 @@ pub fn handle_rename(
525pub fn handle_references( 506pub fn handle_references(
526 world: ServerWorld, 507 world: ServerWorld,
527 params: req::ReferenceParams, 508 params: req::ReferenceParams,
528 token: JobToken,
529) -> Result<Option<Vec<Location>>> { 509) -> Result<Option<Vec<Location>>> {
530 let file_id = params.text_document.try_conv_with(&world)?; 510 let file_id = params.text_document.try_conv_with(&world)?;
531 let line_index = world.analysis().file_line_index(file_id); 511 let line_index = world.analysis().file_line_index(file_id);
532 let offset = params.position.conv_with(&line_index); 512 let offset = params.position.conv_with(&line_index);
533 513
534 let refs = world.analysis().find_all_refs(file_id, offset, &token); 514 let refs = world.analysis().find_all_refs(file_id, offset)?;
535 515
536 Ok(Some(refs.into_iter() 516 Ok(Some(refs.into_iter()
537 .filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok()) 517 .filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok())
@@ -541,16 +521,15 @@ pub fn handle_references(
541pub fn handle_code_action( 521pub fn handle_code_action(
542 world: ServerWorld, 522 world: ServerWorld,
543 params: req::CodeActionParams, 523 params: req::CodeActionParams,
544 _token: JobToken,
545) -> Result<Option<CodeActionResponse>> { 524) -> Result<Option<CodeActionResponse>> {
546 let file_id = params.text_document.try_conv_with(&world)?; 525 let file_id = params.text_document.try_conv_with(&world)?;
547 let line_index = world.analysis().file_line_index(file_id); 526 let line_index = world.analysis().file_line_index(file_id);
548 let range = params.range.conv_with(&line_index); 527 let range = params.range.conv_with(&line_index);
549 528
550 let assists = world.analysis().assists(file_id, range).into_iter(); 529 let assists = world.analysis().assists(file_id, range)?.into_iter();
551 let fixes = world 530 let fixes = world
552 .analysis() 531 .analysis()
553 .diagnostics(file_id) 532 .diagnostics(file_id)?
554 .into_iter() 533 .into_iter()
555 .filter_map(|d| Some((d.range, d.fix?))) 534 .filter_map(|d| Some((d.range, d.fix?)))
556 .filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start())) 535 .filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start()))
@@ -579,7 +558,7 @@ pub fn publish_diagnostics(
579 let line_index = world.analysis().file_line_index(file_id); 558 let line_index = world.analysis().file_line_index(file_id);
580 let diagnostics = world 559 let diagnostics = world
581 .analysis() 560 .analysis()
582 .diagnostics(file_id) 561 .diagnostics(file_id)?
583 .into_iter() 562 .into_iter()
584 .map(|d| Diagnostic { 563 .map(|d| Diagnostic {
585 range: d.range.conv_with(&line_index), 564 range: d.range.conv_with(&line_index),
@@ -600,19 +579,20 @@ pub fn publish_decorations(
600 let uri = world.file_id_to_uri(file_id)?; 579 let uri = world.file_id_to_uri(file_id)?;
601 Ok(req::PublishDecorationsParams { 580 Ok(req::PublishDecorationsParams {
602 uri, 581 uri,
603 decorations: highlight(&world, file_id), 582 decorations: highlight(&world, file_id)?,
604 }) 583 })
605} 584}
606 585
607fn highlight(world: &ServerWorld, file_id: FileId) -> Vec<Decoration> { 586fn highlight(world: &ServerWorld, file_id: FileId) -> Result<Vec<Decoration>> {
608 let line_index = world.analysis().file_line_index(file_id); 587 let line_index = world.analysis().file_line_index(file_id);
609 world 588 let res = world
610 .analysis() 589 .analysis()
611 .highlight(file_id) 590 .highlight(file_id)?
612 .into_iter() 591 .into_iter()
613 .map(|h| Decoration { 592 .map(|h| Decoration {
614 range: h.range.conv_with(&line_index), 593 range: h.range.conv_with(&line_index),
615 tag: h.tag, 594 tag: h.tag,
616 }) 595 })
617 .collect() 596 .collect();
597 Ok(res)
618} 598}
diff --git a/crates/ra_lsp_server/src/main_loop/mod.rs b/crates/ra_lsp_server/src/main_loop/mod.rs
index 165f2e78f..b35ebd38b 100644
--- a/crates/ra_lsp_server/src/main_loop/mod.rs
+++ b/crates/ra_lsp_server/src/main_loop/mod.rs
@@ -8,9 +8,9 @@ use gen_lsp_server::{
8 handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse, 8 handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse,
9}; 9};
10use languageserver_types::NumberOrString; 10use languageserver_types::NumberOrString;
11use ra_analysis::{FileId, JobHandle, JobToken, LibraryData}; 11use ra_analysis::{FileId, LibraryData};
12use rayon::{self, ThreadPool}; 12use rayon::{self, ThreadPool};
13use rustc_hash::FxHashMap; 13use rustc_hash::FxHashSet;
14use serde::{de::DeserializeOwned, Serialize}; 14use serde::{de::DeserializeOwned, Serialize};
15 15
16use crate::{ 16use crate::{
@@ -47,7 +47,7 @@ pub fn main_loop(
47 info!("server initialized, serving requests"); 47 info!("server initialized, serving requests");
48 let mut state = ServerWorldState::new(); 48 let mut state = ServerWorldState::new();
49 49
50 let mut pending_requests = FxHashMap::default(); 50 let mut pending_requests = FxHashSet::default();
51 let mut subs = Subscriptions::new(); 51 let mut subs = Subscriptions::new();
52 let main_res = main_loop_inner( 52 let main_res = main_loop_inner(
53 internal_mode, 53 internal_mode,
@@ -92,7 +92,7 @@ fn main_loop_inner(
92 fs_worker: Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, 92 fs_worker: Worker<PathBuf, (PathBuf, Vec<FileEvent>)>,
93 ws_worker: Worker<PathBuf, Result<CargoWorkspace>>, 93 ws_worker: Worker<PathBuf, Result<CargoWorkspace>>,
94 state: &mut ServerWorldState, 94 state: &mut ServerWorldState,
95 pending_requests: &mut FxHashMap<u64, JobHandle>, 95 pending_requests: &mut FxHashSet<u64>,
96 subs: &mut Subscriptions, 96 subs: &mut Subscriptions,
97) -> Result<()> { 97) -> Result<()> {
98 let (libdata_sender, libdata_receiver) = unbounded(); 98 let (libdata_sender, libdata_receiver) = unbounded();
@@ -204,14 +204,13 @@ fn main_loop_inner(
204fn on_task( 204fn on_task(
205 task: Task, 205 task: Task,
206 msg_sender: &Sender<RawMessage>, 206 msg_sender: &Sender<RawMessage>,
207 pending_requests: &mut FxHashMap<u64, JobHandle>, 207 pending_requests: &mut FxHashSet<u64>,
208) { 208) {
209 match task { 209 match task {
210 Task::Respond(response) => { 210 Task::Respond(response) => {
211 if let Some(handle) = pending_requests.remove(&response.id) { 211 if pending_requests.remove(&response.id) {
212 assert!(handle.has_completed()); 212 msg_sender.send(RawMessage::Response(response))
213 } 213 }
214 msg_sender.send(RawMessage::Response(response))
215 } 214 }
216 Task::Notify(n) => msg_sender.send(RawMessage::Notification(n)), 215 Task::Notify(n) => msg_sender.send(RawMessage::Notification(n)),
217 } 216 }
@@ -219,7 +218,7 @@ fn on_task(
219 218
220fn on_request( 219fn on_request(
221 world: &mut ServerWorldState, 220 world: &mut ServerWorldState,
222 pending_requests: &mut FxHashMap<u64, JobHandle>, 221 pending_requests: &mut FxHashSet<u64>,
223 pool: &ThreadPool, 222 pool: &ThreadPool,
224 sender: &Sender<Task>, 223 sender: &Sender<Task>,
225 req: RawRequest, 224 req: RawRequest,
@@ -253,8 +252,8 @@ fn on_request(
253 .on::<req::References>(handlers::handle_references)? 252 .on::<req::References>(handlers::handle_references)?
254 .finish(); 253 .finish();
255 match req { 254 match req {
256 Ok((id, handle)) => { 255 Ok(id) => {
257 let inserted = pending_requests.insert(id, handle).is_none(); 256 let inserted = pending_requests.insert(id);
258 assert!(inserted, "duplicate request: {}", id); 257 assert!(inserted, "duplicate request: {}", id);
259 Ok(None) 258 Ok(None)
260 } 259 }
@@ -265,7 +264,7 @@ fn on_request(
265fn on_notification( 264fn on_notification(
266 msg_sender: &Sender<RawMessage>, 265 msg_sender: &Sender<RawMessage>,
267 state: &mut ServerWorldState, 266 state: &mut ServerWorldState,
268 pending_requests: &mut FxHashMap<u64, JobHandle>, 267 pending_requests: &mut FxHashSet<u64>,
269 subs: &mut Subscriptions, 268 subs: &mut Subscriptions,
270 not: RawNotification, 269 not: RawNotification,
271) -> Result<()> { 270) -> Result<()> {
@@ -277,9 +276,7 @@ fn on_notification(
277 panic!("string id's not supported: {:?}", id); 276 panic!("string id's not supported: {:?}", id);
278 } 277 }
279 }; 278 };
280 if let Some(handle) = pending_requests.remove(&id) { 279 pending_requests.remove(&id);
281 handle.cancel();
282 }
283 return Ok(()); 280 return Ok(());
284 } 281 }
285 Err(not) => not, 282 Err(not) => not,
@@ -336,7 +333,7 @@ fn on_notification(
336 333
337struct PoolDispatcher<'a> { 334struct PoolDispatcher<'a> {
338 req: Option<RawRequest>, 335 req: Option<RawRequest>,
339 res: Option<(u64, JobHandle)>, 336 res: Option<u64>,
340 pool: &'a ThreadPool, 337 pool: &'a ThreadPool,
341 world: &'a ServerWorldState, 338 world: &'a ServerWorldState,
342 sender: &'a Sender<Task>, 339 sender: &'a Sender<Task>,
@@ -345,7 +342,7 @@ struct PoolDispatcher<'a> {
345impl<'a> PoolDispatcher<'a> { 342impl<'a> PoolDispatcher<'a> {
346 fn on<'b, R>( 343 fn on<'b, R>(
347 &'b mut self, 344 &'b mut self,
348 f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result>, 345 f: fn(ServerWorld, R::Params) -> Result<R::Result>,
349 ) -> Result<&'b mut Self> 346 ) -> Result<&'b mut Self>
350 where 347 where
351 R: req::Request, 348 R: req::Request,
@@ -358,11 +355,10 @@ impl<'a> PoolDispatcher<'a> {
358 }; 355 };
359 match req.cast::<R>() { 356 match req.cast::<R>() {
360 Ok((id, params)) => { 357 Ok((id, params)) => {
361 let (handle, token) = JobHandle::new();
362 let world = self.world.snapshot(); 358 let world = self.world.snapshot();
363 let sender = self.sender.clone(); 359 let sender = self.sender.clone();
364 self.pool.spawn(move || { 360 self.pool.spawn(move || {
365 let resp = match f(world, params, token) { 361 let resp = match f(world, params) {
366 Ok(resp) => RawResponse::ok::<R>(id, &resp), 362 Ok(resp) => RawResponse::ok::<R>(id, &resp),
367 Err(e) => { 363 Err(e) => {
368 RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string()) 364 RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string())
@@ -371,14 +367,14 @@ impl<'a> PoolDispatcher<'a> {
371 let task = Task::Respond(resp); 367 let task = Task::Respond(resp);
372 sender.send(task); 368 sender.send(task);
373 }); 369 });
374 self.res = Some((id, handle)); 370 self.res = Some(id);
375 } 371 }
376 Err(req) => self.req = Some(req), 372 Err(req) => self.req = Some(req),
377 } 373 }
378 Ok(self) 374 Ok(self)
379 } 375 }
380 376
381 fn finish(&mut self) -> ::std::result::Result<(u64, JobHandle), RawRequest> { 377 fn finish(&mut self) -> ::std::result::Result<u64, RawRequest> {
382 match (self.res.take(), self.req.take()) { 378 match (self.res.take(), self.req.take()) {
383 (Some(res), None) => Ok(res), 379 (Some(res), None) => Ok(res),
384 (None, Some(req)) => Err(req), 380 (None, Some(req)) => Err(req),
diff --git a/crates/ra_lsp_server/src/path_map.rs b/crates/ra_lsp_server/src/path_map.rs
index 585013acd..d32829382 100644
--- a/crates/ra_lsp_server/src/path_map.rs
+++ b/crates/ra_lsp_server/src/path_map.rs
@@ -1,9 +1,9 @@
1use std::path::{Component, Path, PathBuf};
2
1use im; 3use im;
2use ra_analysis::{FileId, FileResolver}; 4use ra_analysis::{FileId, FileResolver};
3use relative_path::RelativePath; 5use relative_path::RelativePath;
4 6
5use std::path::{Component, Path, PathBuf};
6
7#[derive(Debug, Clone, Copy, PartialEq, Eq)] 7#[derive(Debug, Clone, Copy, PartialEq, Eq)]
8pub enum Root { 8pub enum Root {
9 Workspace, 9 Workspace,
diff --git a/crates/ra_lsp_server/src/project_model.rs b/crates/ra_lsp_server/src/project_model.rs
index d170ceb73..04e2ef9c8 100644
--- a/crates/ra_lsp_server/src/project_model.rs
+++ b/crates/ra_lsp_server/src/project_model.rs
@@ -1,9 +1,9 @@
1use std::path::{Path, PathBuf};
2
1use cargo_metadata::{metadata_run, CargoOpt}; 3use cargo_metadata::{metadata_run, CargoOpt};
2use ra_syntax::SmolStr; 4use ra_syntax::SmolStr;
3use rustc_hash::{FxHashMap, FxHashSet}; 5use rustc_hash::{FxHashMap, FxHashSet};
4 6
5use std::path::{Path, PathBuf};
6
7use crate::{ 7use crate::{
8 thread_watcher::{ThreadWatcher, Worker}, 8 thread_watcher::{ThreadWatcher, Worker},
9 Result, 9 Result,
diff --git a/crates/ra_lsp_server/src/thread_watcher.rs b/crates/ra_lsp_server/src/thread_watcher.rs
index 67952eb74..51b35fa66 100644
--- a/crates/ra_lsp_server/src/thread_watcher.rs
+++ b/crates/ra_lsp_server/src/thread_watcher.rs
@@ -1,8 +1,9 @@
1use crate::Result; 1use std::thread;
2
2use crossbeam_channel::{bounded, unbounded, Receiver, Sender}; 3use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
3use drop_bomb::DropBomb; 4use drop_bomb::DropBomb;
4 5
5use std::thread; 6use crate::Result;
6 7
7pub struct Worker<I, O> { 8pub struct Worker<I, O> {
8 pub inp: Sender<I>, 9 pub inp: Sender<I>,