diff options
Diffstat (limited to 'crates')
29 files changed, 1053 insertions, 692 deletions
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs index b8d774eb5..3d0f13f34 100644 --- a/crates/ra_analysis/src/db.rs +++ b/crates/ra_analysis/src/db.rs | |||
@@ -30,11 +30,11 @@ impl Default for RootDatabase { | |||
30 | runtime: salsa::Runtime::default(), | 30 | runtime: salsa::Runtime::default(), |
31 | id_maps: Default::default(), | 31 | id_maps: Default::default(), |
32 | }; | 32 | }; |
33 | db.query_mut(ra_db::SourceRootQuery) | ||
34 | .set(ra_db::WORKSPACE, Default::default()); | ||
35 | db.query_mut(ra_db::CrateGraphQuery) | 33 | db.query_mut(ra_db::CrateGraphQuery) |
36 | .set((), Default::default()); | 34 | .set((), Default::default()); |
37 | db.query_mut(ra_db::LibrariesQuery) | 35 | db.query_mut(ra_db::LocalRootsQuery) |
36 | .set((), Default::default()); | ||
37 | db.query_mut(ra_db::LibraryRootsQuery) | ||
38 | .set((), Default::default()); | 38 | .set((), Default::default()); |
39 | db | 39 | db |
40 | } | 40 | } |
@@ -61,9 +61,11 @@ salsa::database_storage! { | |||
61 | pub(crate) struct RootDatabaseStorage for RootDatabase { | 61 | pub(crate) struct RootDatabaseStorage for RootDatabase { |
62 | impl ra_db::FilesDatabase { | 62 | impl ra_db::FilesDatabase { |
63 | fn file_text() for ra_db::FileTextQuery; | 63 | fn file_text() for ra_db::FileTextQuery; |
64 | fn file_relative_path() for ra_db::FileRelativePathQuery; | ||
64 | fn file_source_root() for ra_db::FileSourceRootQuery; | 65 | fn file_source_root() for ra_db::FileSourceRootQuery; |
65 | fn source_root() for ra_db::SourceRootQuery; | 66 | fn source_root() for ra_db::SourceRootQuery; |
66 | fn libraries() for ra_db::LibrariesQuery; | 67 | fn local_roots() for ra_db::LocalRootsQuery; |
68 | fn library_roots() for ra_db::LibraryRootsQuery; | ||
67 | fn crate_graph() for ra_db::CrateGraphQuery; | 69 | fn crate_graph() for ra_db::CrateGraphQuery; |
68 | } | 70 | } |
69 | impl ra_db::SyntaxDatabase { | 71 | impl ra_db::SyntaxDatabase { |
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 0de0e2645..c4291885a 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs | |||
@@ -10,9 +10,8 @@ use ra_syntax::{ | |||
10 | SyntaxKind::*, | 10 | SyntaxKind::*, |
11 | SyntaxNodeRef, TextRange, TextUnit, | 11 | SyntaxNodeRef, TextRange, TextUnit, |
12 | }; | 12 | }; |
13 | use ra_db::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE, SyntaxDatabase}; | 13 | use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; |
14 | use rayon::prelude::*; | 14 | use rayon::prelude::*; |
15 | use rustc_hash::FxHashSet; | ||
16 | use salsa::{Database, ParallelDatabase}; | 15 | use salsa::{Database, ParallelDatabase}; |
17 | use hir::{ | 16 | use hir::{ |
18 | self, | 17 | self, |
@@ -24,8 +23,8 @@ use hir::{ | |||
24 | use crate::{ | 23 | use crate::{ |
25 | completion::{completions, CompletionItem}, | 24 | completion::{completions, CompletionItem}, |
26 | db, | 25 | db, |
27 | symbol_index::{SymbolIndex, SymbolsDatabase}, | 26 | symbol_index::{SymbolIndex, SymbolsDatabase, LibrarySymbolsQuery}, |
28 | AnalysisChange, Cancelable, CrateId, Diagnostic, FileId, | 27 | AnalysisChange, RootChange, Cancelable, CrateId, Diagnostic, FileId, |
29 | FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit, | 28 | FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit, |
30 | ReferenceResolution, | 29 | ReferenceResolution, |
31 | }; | 30 | }; |
@@ -44,70 +43,41 @@ impl AnalysisHostImpl { | |||
44 | pub fn apply_change(&mut self, change: AnalysisChange) { | 43 | pub fn apply_change(&mut self, change: AnalysisChange) { |
45 | log::info!("apply_change {:?}", change); | 44 | log::info!("apply_change {:?}", change); |
46 | // self.gc_syntax_trees(); | 45 | // self.gc_syntax_trees(); |
47 | 46 | if !change.new_roots.is_empty() { | |
48 | for (file_id, text) in change.files_changed { | 47 | let mut local_roots = Vec::clone(&self.db.local_roots()); |
49 | self.db | 48 | for (root_id, is_local) in change.new_roots { |
50 | .query_mut(ra_db::FileTextQuery) | ||
51 | .set(file_id, Arc::new(text)) | ||
52 | } | ||
53 | if !(change.files_added.is_empty() && change.files_removed.is_empty()) { | ||
54 | let file_resolver = change | ||
55 | .file_resolver | ||
56 | .expect("change resolver when changing set of files"); | ||
57 | let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE)); | ||
58 | for (file_id, text) in change.files_added { | ||
59 | self.db | 49 | self.db |
60 | .query_mut(ra_db::FileTextQuery) | 50 | .query_mut(ra_db::SourceRootQuery) |
61 | .set(file_id, Arc::new(text)); | 51 | .set(root_id, Default::default()); |
62 | self.db | 52 | if is_local { |
63 | .query_mut(ra_db::FileSourceRootQuery) | 53 | local_roots.push(root_id); |
64 | .set(file_id, ra_db::WORKSPACE); | 54 | } |
65 | source_root.files.insert(file_id); | ||
66 | } | ||
67 | for file_id in change.files_removed { | ||
68 | self.db | ||
69 | .query_mut(ra_db::FileTextQuery) | ||
70 | .set(file_id, Arc::new(String::new())); | ||
71 | source_root.files.remove(&file_id); | ||
72 | } | 55 | } |
73 | source_root.file_resolver = file_resolver; | ||
74 | self.db | 56 | self.db |
75 | .query_mut(ra_db::SourceRootQuery) | 57 | .query_mut(ra_db::LocalRootsQuery) |
76 | .set(WORKSPACE, Arc::new(source_root)) | 58 | .set((), Arc::new(local_roots)); |
59 | } | ||
60 | |||
61 | for (root_id, root_change) in change.roots_changed { | ||
62 | self.apply_root_change(root_id, root_change); | ||
63 | } | ||
64 | for (file_id, text) in change.files_changed { | ||
65 | self.db.query_mut(ra_db::FileTextQuery).set(file_id, text) | ||
77 | } | 66 | } |
78 | if !change.libraries_added.is_empty() { | 67 | if !change.libraries_added.is_empty() { |
79 | let mut libraries = Vec::clone(&self.db.libraries()); | 68 | let mut libraries = Vec::clone(&self.db.library_roots()); |
80 | for library in change.libraries_added { | 69 | for library in change.libraries_added { |
81 | let source_root_id = SourceRootId(1 + libraries.len() as u32); | 70 | libraries.push(library.root_id); |
82 | libraries.push(source_root_id); | ||
83 | let mut files = FxHashSet::default(); | ||
84 | for (file_id, text) in library.files { | ||
85 | files.insert(file_id); | ||
86 | log::debug!( | ||
87 | "library file: {:?} {:?}", | ||
88 | file_id, | ||
89 | library.file_resolver.debug_path(file_id) | ||
90 | ); | ||
91 | self.db | ||
92 | .query_mut(ra_db::FileSourceRootQuery) | ||
93 | .set_constant(file_id, source_root_id); | ||
94 | self.db | ||
95 | .query_mut(ra_db::FileTextQuery) | ||
96 | .set_constant(file_id, Arc::new(text)); | ||
97 | } | ||
98 | let source_root = SourceRoot { | ||
99 | files, | ||
100 | file_resolver: library.file_resolver, | ||
101 | }; | ||
102 | self.db | 71 | self.db |
103 | .query_mut(ra_db::SourceRootQuery) | 72 | .query_mut(ra_db::SourceRootQuery) |
104 | .set(source_root_id, Arc::new(source_root)); | 73 | .set(library.root_id, Default::default()); |
105 | self.db | 74 | self.db |
106 | .query_mut(crate::symbol_index::LibrarySymbolsQuery) | 75 | .query_mut(LibrarySymbolsQuery) |
107 | .set(source_root_id, Arc::new(library.symbol_index)); | 76 | .set_constant(library.root_id, Arc::new(library.symbol_index)); |
77 | self.apply_root_change(library.root_id, library.root_change); | ||
108 | } | 78 | } |
109 | self.db | 79 | self.db |
110 | .query_mut(ra_db::LibrariesQuery) | 80 | .query_mut(ra_db::LibraryRootsQuery) |
111 | .set((), Arc::new(libraries)); | 81 | .set((), Arc::new(libraries)); |
112 | } | 82 | } |
113 | if let Some(crate_graph) = change.crate_graph { | 83 | if let Some(crate_graph) = change.crate_graph { |
@@ -117,6 +87,34 @@ impl AnalysisHostImpl { | |||
117 | } | 87 | } |
118 | } | 88 | } |
119 | 89 | ||
90 | fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { | ||
91 | let mut source_root = SourceRoot::clone(&self.db.source_root(root_id)); | ||
92 | for add_file in root_change.added { | ||
93 | self.db | ||
94 | .query_mut(ra_db::FileTextQuery) | ||
95 | .set(add_file.file_id, add_file.text); | ||
96 | self.db | ||
97 | .query_mut(ra_db::FileRelativePathQuery) | ||
98 | .set(add_file.file_id, add_file.path.clone()); | ||
99 | self.db | ||
100 | .query_mut(ra_db::FileSourceRootQuery) | ||
101 | .set(add_file.file_id, root_id); | ||
102 | source_root.files.insert(add_file.path, add_file.file_id); | ||
103 | } | ||
104 | for remove_file in root_change.removed { | ||
105 | self.db | ||
106 | .query_mut(ra_db::FileTextQuery) | ||
107 | .set(remove_file.file_id, Default::default()); | ||
108 | self.db | ||
109 | .query_mut(ra_db::FileRelativePathQuery) | ||
110 | .set(remove_file.file_id, Default::default()); | ||
111 | source_root.files.remove(&remove_file.path); | ||
112 | } | ||
113 | self.db | ||
114 | .query_mut(ra_db::SourceRootQuery) | ||
115 | .set(root_id, Arc::new(source_root)); | ||
116 | } | ||
117 | |||
120 | #[allow(unused)] | 118 | #[allow(unused)] |
121 | /// Ideally, we should call this function from time to time to collect heavy | 119 | /// Ideally, we should call this function from time to time to collect heavy |
122 | /// syntax trees. However, if we actually do that, everything is recomputed | 120 | /// syntax trees. However, if we actually do that, everything is recomputed |
@@ -156,21 +154,26 @@ impl AnalysisImpl { | |||
156 | self.db.file_lines(file_id) | 154 | self.db.file_lines(file_id) |
157 | } | 155 | } |
158 | pub fn world_symbols(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 156 | pub fn world_symbols(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
157 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
158 | struct Snap(salsa::Snapshot<db::RootDatabase>); | ||
159 | impl Clone for Snap { | ||
160 | fn clone(&self) -> Snap { | ||
161 | Snap(self.0.snapshot()) | ||
162 | } | ||
163 | } | ||
164 | |||
159 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | 165 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { |
166 | let snap = Snap(self.db.snapshot()); | ||
160 | self.db | 167 | self.db |
161 | .libraries() | 168 | .library_roots() |
162 | .iter() | 169 | .par_iter() |
163 | .map(|&lib_id| self.db.library_symbols(lib_id)) | 170 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) |
164 | .collect() | 171 | .collect() |
165 | } else { | 172 | } else { |
166 | let files = &self.db.source_root(WORKSPACE).files; | 173 | let mut files = Vec::new(); |
167 | 174 | for &root in self.db.local_roots().iter() { | |
168 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | 175 | let sr = self.db.source_root(root); |
169 | struct Snap(salsa::Snapshot<db::RootDatabase>); | 176 | files.extend(sr.files.values().map(|&it| it)) |
170 | impl Clone for Snap { | ||
171 | fn clone(&self) -> Snap { | ||
172 | Snap(self.0.snapshot()) | ||
173 | } | ||
174 | } | 177 | } |
175 | 178 | ||
176 | let snap = Snap(self.db.snapshot()); | 179 | let snap = Snap(self.db.snapshot()); |
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 22fff71ab..a1d462528 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs | |||
@@ -18,9 +18,9 @@ pub mod mock_analysis; | |||
18 | 18 | ||
19 | use std::{fmt, sync::Arc}; | 19 | use std::{fmt, sync::Arc}; |
20 | 20 | ||
21 | use rustc_hash::FxHashMap; | ||
21 | use ra_syntax::{SourceFileNode, TextRange, TextUnit}; | 22 | use ra_syntax::{SourceFileNode, TextRange, TextUnit}; |
22 | use ra_text_edit::AtomTextEdit; | 23 | use ra_text_edit::AtomTextEdit; |
23 | use ra_db::FileResolverImp; | ||
24 | use rayon::prelude::*; | 24 | use rayon::prelude::*; |
25 | use relative_path::RelativePathBuf; | 25 | use relative_path::RelativePathBuf; |
26 | 26 | ||
@@ -39,28 +39,54 @@ pub use hir::FnSignatureInfo; | |||
39 | 39 | ||
40 | pub use ra_db::{ | 40 | pub use ra_db::{ |
41 | Canceled, Cancelable, FilePosition, | 41 | Canceled, Cancelable, FilePosition, |
42 | CrateGraph, CrateId, FileId, FileResolver | 42 | CrateGraph, CrateId, SourceRootId, FileId |
43 | }; | 43 | }; |
44 | 44 | ||
45 | #[derive(Default)] | 45 | #[derive(Default)] |
46 | pub struct AnalysisChange { | 46 | pub struct AnalysisChange { |
47 | files_added: Vec<(FileId, String)>, | 47 | new_roots: Vec<(SourceRootId, bool)>, |
48 | files_changed: Vec<(FileId, String)>, | 48 | roots_changed: FxHashMap<SourceRootId, RootChange>, |
49 | files_removed: Vec<(FileId)>, | 49 | files_changed: Vec<(FileId, Arc<String>)>, |
50 | libraries_added: Vec<LibraryData>, | 50 | libraries_added: Vec<LibraryData>, |
51 | crate_graph: Option<CrateGraph>, | 51 | crate_graph: Option<CrateGraph>, |
52 | file_resolver: Option<FileResolverImp>, | 52 | } |
53 | |||
54 | #[derive(Default)] | ||
55 | struct RootChange { | ||
56 | added: Vec<AddFile>, | ||
57 | removed: Vec<RemoveFile>, | ||
58 | } | ||
59 | |||
60 | #[derive(Debug)] | ||
61 | struct AddFile { | ||
62 | file_id: FileId, | ||
63 | path: RelativePathBuf, | ||
64 | text: Arc<String>, | ||
65 | } | ||
66 | |||
67 | #[derive(Debug)] | ||
68 | struct RemoveFile { | ||
69 | file_id: FileId, | ||
70 | path: RelativePathBuf, | ||
53 | } | 71 | } |
54 | 72 | ||
55 | impl fmt::Debug for AnalysisChange { | 73 | impl fmt::Debug for AnalysisChange { |
56 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 74 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { |
57 | fmt.debug_struct("AnalysisChange") | 75 | fmt.debug_struct("AnalysisChange") |
58 | .field("files_added", &self.files_added.len()) | 76 | .field("new_roots", &self.new_roots) |
77 | .field("roots_changed", &self.roots_changed) | ||
59 | .field("files_changed", &self.files_changed.len()) | 78 | .field("files_changed", &self.files_changed.len()) |
60 | .field("files_removed", &self.files_removed.len()) | ||
61 | .field("libraries_added", &self.libraries_added.len()) | 79 | .field("libraries_added", &self.libraries_added.len()) |
62 | .field("crate_graph", &self.crate_graph) | 80 | .field("crate_graph", &self.crate_graph) |
63 | .field("file_resolver", &self.file_resolver) | 81 | .finish() |
82 | } | ||
83 | } | ||
84 | |||
85 | impl fmt::Debug for RootChange { | ||
86 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
87 | fmt.debug_struct("AnalysisChange") | ||
88 | .field("added", &self.added.len()) | ||
89 | .field("removed", &self.removed.len()) | ||
64 | .finish() | 90 | .finish() |
65 | } | 91 | } |
66 | } | 92 | } |
@@ -69,14 +95,37 @@ impl AnalysisChange { | |||
69 | pub fn new() -> AnalysisChange { | 95 | pub fn new() -> AnalysisChange { |
70 | AnalysisChange::default() | 96 | AnalysisChange::default() |
71 | } | 97 | } |
72 | pub fn add_file(&mut self, file_id: FileId, text: String) { | 98 | pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { |
73 | self.files_added.push((file_id, text)) | 99 | self.new_roots.push((root_id, is_local)); |
74 | } | 100 | } |
75 | pub fn change_file(&mut self, file_id: FileId, new_text: String) { | 101 | pub fn add_file( |
102 | &mut self, | ||
103 | root_id: SourceRootId, | ||
104 | file_id: FileId, | ||
105 | path: RelativePathBuf, | ||
106 | text: Arc<String>, | ||
107 | ) { | ||
108 | let file = AddFile { | ||
109 | file_id, | ||
110 | path, | ||
111 | text, | ||
112 | }; | ||
113 | self.roots_changed | ||
114 | .entry(root_id) | ||
115 | .or_default() | ||
116 | .added | ||
117 | .push(file); | ||
118 | } | ||
119 | pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) { | ||
76 | self.files_changed.push((file_id, new_text)) | 120 | self.files_changed.push((file_id, new_text)) |
77 | } | 121 | } |
78 | pub fn remove_file(&mut self, file_id: FileId) { | 122 | pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { |
79 | self.files_removed.push(file_id) | 123 | let file = RemoveFile { file_id, path }; |
124 | self.roots_changed | ||
125 | .entry(root_id) | ||
126 | .or_default() | ||
127 | .removed | ||
128 | .push(file); | ||
80 | } | 129 | } |
81 | pub fn add_library(&mut self, data: LibraryData) { | 130 | pub fn add_library(&mut self, data: LibraryData) { |
82 | self.libraries_added.push(data) | 131 | self.libraries_added.push(data) |
@@ -84,9 +133,6 @@ impl AnalysisChange { | |||
84 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | 133 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { |
85 | self.crate_graph = Some(graph); | 134 | self.crate_graph = Some(graph); |
86 | } | 135 | } |
87 | pub fn set_file_resolver(&mut self, file_resolver: Arc<FileResolver>) { | ||
88 | self.file_resolver = Some(FileResolverImp::new(file_resolver)); | ||
89 | } | ||
90 | } | 136 | } |
91 | 137 | ||
92 | /// `AnalysisHost` stores the current state of the world. | 138 | /// `AnalysisHost` stores the current state of the world. |
@@ -313,20 +359,32 @@ impl Analysis { | |||
313 | 359 | ||
314 | #[derive(Debug)] | 360 | #[derive(Debug)] |
315 | pub struct LibraryData { | 361 | pub struct LibraryData { |
316 | files: Vec<(FileId, String)>, | 362 | root_id: SourceRootId, |
317 | file_resolver: FileResolverImp, | 363 | root_change: RootChange, |
318 | symbol_index: SymbolIndex, | 364 | symbol_index: SymbolIndex, |
319 | } | 365 | } |
320 | 366 | ||
321 | impl LibraryData { | 367 | impl LibraryData { |
322 | pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData { | 368 | pub fn prepare( |
323 | let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, text)| { | 369 | root_id: SourceRootId, |
370 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | ||
371 | ) -> LibraryData { | ||
372 | let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { | ||
324 | let file = SourceFileNode::parse(text); | 373 | let file = SourceFileNode::parse(text); |
325 | (*file_id, file) | 374 | (*file_id, file) |
326 | })); | 375 | })); |
376 | let mut root_change = RootChange::default(); | ||
377 | root_change.added = files | ||
378 | .into_iter() | ||
379 | .map(|(file_id, path, text)| AddFile { | ||
380 | file_id, | ||
381 | path, | ||
382 | text, | ||
383 | }) | ||
384 | .collect(); | ||
327 | LibraryData { | 385 | LibraryData { |
328 | files, | 386 | root_id, |
329 | file_resolver: FileResolverImp::new(file_resolver), | 387 | root_change, |
330 | symbol_index, | 388 | symbol_index, |
331 | } | 389 | } |
332 | } | 390 | } |
diff --git a/crates/ra_analysis/src/mock_analysis.rs b/crates/ra_analysis/src/mock_analysis.rs index 0d9a7a147..7cbdfb953 100644 --- a/crates/ra_analysis/src/mock_analysis.rs +++ b/crates/ra_analysis/src/mock_analysis.rs | |||
@@ -4,7 +4,7 @@ use relative_path::{RelativePathBuf}; | |||
4 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; | 4 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; |
5 | use ra_db::mock::FileMap; | 5 | use ra_db::mock::FileMap; |
6 | 6 | ||
7 | use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FilePosition}; | 7 | use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FilePosition, SourceRootId}; |
8 | 8 | ||
9 | /// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis | 9 | /// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis |
10 | /// from a set of in-memory files. | 10 | /// from a set of in-memory files. |
@@ -78,14 +78,16 @@ impl MockAnalysis { | |||
78 | pub fn analysis_host(self) -> AnalysisHost { | 78 | pub fn analysis_host(self) -> AnalysisHost { |
79 | let mut host = AnalysisHost::default(); | 79 | let mut host = AnalysisHost::default(); |
80 | let mut file_map = FileMap::default(); | 80 | let mut file_map = FileMap::default(); |
81 | let source_root = SourceRootId(0); | ||
81 | let mut change = AnalysisChange::new(); | 82 | let mut change = AnalysisChange::new(); |
83 | change.add_root(source_root, true); | ||
82 | for (path, contents) in self.files.into_iter() { | 84 | for (path, contents) in self.files.into_iter() { |
83 | assert!(path.starts_with('/')); | 85 | assert!(path.starts_with('/')); |
84 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); | 86 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); |
85 | let file_id = file_map.add(path); | 87 | let file_id = file_map.add(path.clone()); |
86 | change.add_file(file_id, contents); | 88 | change.add_file(source_root, file_id, path, Arc::new(contents)); |
87 | } | 89 | } |
88 | change.set_file_resolver(Arc::new(file_map)); | 90 | // change.set_file_resolver(Arc::new(file_map)); |
89 | host.apply_change(change); | 91 | host.apply_change(change); |
90 | host | 92 | host |
91 | } | 93 | } |
diff --git a/crates/ra_db/src/file_resolver.rs b/crates/ra_db/src/file_resolver.rs deleted file mode 100644 index f849ac752..000000000 --- a/crates/ra_db/src/file_resolver.rs +++ /dev/null | |||
@@ -1,76 +0,0 @@ | |||
1 | use std::{ | ||
2 | sync::Arc, | ||
3 | hash::{Hash, Hasher}, | ||
4 | fmt, | ||
5 | }; | ||
6 | |||
7 | use relative_path::RelativePath; | ||
8 | |||
9 | use crate::input::FileId; | ||
10 | |||
11 | pub trait FileResolver: fmt::Debug + Send + Sync + 'static { | ||
12 | fn file_stem(&self, file_id: FileId) -> String; | ||
13 | fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>; | ||
14 | fn debug_path(&self, _1file_id: FileId) -> Option<std::path::PathBuf> { | ||
15 | None | ||
16 | } | ||
17 | } | ||
18 | |||
19 | #[derive(Clone, Debug)] | ||
20 | pub struct FileResolverImp { | ||
21 | inner: Arc<FileResolver>, | ||
22 | } | ||
23 | |||
24 | impl PartialEq for FileResolverImp { | ||
25 | fn eq(&self, other: &FileResolverImp) -> bool { | ||
26 | self.inner() == other.inner() | ||
27 | } | ||
28 | } | ||
29 | |||
30 | impl Eq for FileResolverImp {} | ||
31 | |||
32 | impl Hash for FileResolverImp { | ||
33 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
34 | self.inner().hash(hasher); | ||
35 | } | ||
36 | } | ||
37 | |||
38 | impl FileResolverImp { | ||
39 | pub fn new(inner: Arc<FileResolver>) -> FileResolverImp { | ||
40 | FileResolverImp { inner } | ||
41 | } | ||
42 | pub fn file_stem(&self, file_id: FileId) -> String { | ||
43 | self.inner.file_stem(file_id) | ||
44 | } | ||
45 | pub fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> { | ||
46 | self.inner.resolve(file_id, path) | ||
47 | } | ||
48 | pub fn debug_path(&self, file_id: FileId) -> Option<std::path::PathBuf> { | ||
49 | self.inner.debug_path(file_id) | ||
50 | } | ||
51 | fn inner(&self) -> *const FileResolver { | ||
52 | &*self.inner | ||
53 | } | ||
54 | } | ||
55 | |||
56 | impl Default for FileResolverImp { | ||
57 | fn default() -> FileResolverImp { | ||
58 | #[derive(Debug)] | ||
59 | struct DummyResolver; | ||
60 | impl FileResolver for DummyResolver { | ||
61 | fn file_stem(&self, _file_: FileId) -> String { | ||
62 | panic!("file resolver not set") | ||
63 | } | ||
64 | fn resolve( | ||
65 | &self, | ||
66 | _file_id: FileId, | ||
67 | _path: &::relative_path::RelativePath, | ||
68 | ) -> Option<FileId> { | ||
69 | panic!("file resolver not set") | ||
70 | } | ||
71 | } | ||
72 | FileResolverImp { | ||
73 | inner: Arc::new(DummyResolver), | ||
74 | } | ||
75 | } | ||
76 | } | ||
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index ac144b991..cccf37cc2 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -1,10 +1,12 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use rustc_hash::{FxHashSet, FxHashMap}; | 3 | use rustc_hash::{FxHashMap}; |
4 | use relative_path::RelativePathBuf; | ||
4 | use ra_syntax::SmolStr; | 5 | use ra_syntax::SmolStr; |
5 | use salsa; | 6 | use salsa; |
6 | 7 | ||
7 | use crate::file_resolver::FileResolverImp; | 8 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] |
9 | pub struct SourceRootId(pub u32); | ||
8 | 10 | ||
9 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | 11 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] |
10 | pub struct FileId(pub u32); | 12 | pub struct FileId(pub u32); |
@@ -85,6 +87,11 @@ salsa::query_group! { | |||
85 | type FileTextQuery; | 87 | type FileTextQuery; |
86 | storage input; | 88 | storage input; |
87 | } | 89 | } |
90 | /// Path to a file, relative to the root of its source root. | ||
91 | fn file_relative_path(file_id: FileId) -> RelativePathBuf { | ||
92 | type FileRelativePathQuery; | ||
93 | storage input; | ||
94 | } | ||
88 | fn file_source_root(file_id: FileId) -> SourceRootId { | 95 | fn file_source_root(file_id: FileId) -> SourceRootId { |
89 | type FileSourceRootQuery; | 96 | type FileSourceRootQuery; |
90 | storage input; | 97 | storage input; |
@@ -93,8 +100,12 @@ salsa::query_group! { | |||
93 | type SourceRootQuery; | 100 | type SourceRootQuery; |
94 | storage input; | 101 | storage input; |
95 | } | 102 | } |
96 | fn libraries() -> Arc<Vec<SourceRootId>> { | 103 | fn local_roots() -> Arc<Vec<SourceRootId>> { |
97 | type LibrariesQuery; | 104 | type LocalRootsQuery; |
105 | storage input; | ||
106 | } | ||
107 | fn library_roots() -> Arc<Vec<SourceRootId>> { | ||
108 | type LibraryRootsQuery; | ||
98 | storage input; | 109 | storage input; |
99 | } | 110 | } |
100 | fn crate_graph() -> Arc<CrateGraph> { | 111 | fn crate_graph() -> Arc<CrateGraph> { |
@@ -104,13 +115,7 @@ salsa::query_group! { | |||
104 | } | 115 | } |
105 | } | 116 | } |
106 | 117 | ||
107 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] | ||
108 | pub struct SourceRootId(pub u32); | ||
109 | |||
110 | #[derive(Default, Clone, Debug, PartialEq, Eq)] | 118 | #[derive(Default, Clone, Debug, PartialEq, Eq)] |
111 | pub struct SourceRoot { | 119 | pub struct SourceRoot { |
112 | pub file_resolver: FileResolverImp, | 120 | pub files: FxHashMap<RelativePathBuf, FileId>, |
113 | pub files: FxHashSet<FileId>, | ||
114 | } | 121 | } |
115 | |||
116 | pub const WORKSPACE: SourceRootId = SourceRootId(0); | ||
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 53805aada..65fa3cbfa 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | //! ra_db defines basic database traits. Concrete DB is defined by ra_analysis. | 1 | //! ra_db defines basic database traits. Concrete DB is defined by ra_analysis. |
2 | mod syntax_ptr; | 2 | mod syntax_ptr; |
3 | mod file_resolver; | ||
4 | mod input; | 3 | mod input; |
5 | mod loc2id; | 4 | mod loc2id; |
6 | pub mod mock; | 5 | pub mod mock; |
@@ -24,10 +23,10 @@ impl std::error::Error for Canceled {} | |||
24 | 23 | ||
25 | pub use crate::{ | 24 | pub use crate::{ |
26 | syntax_ptr::LocalSyntaxPtr, | 25 | syntax_ptr::LocalSyntaxPtr, |
27 | file_resolver::{FileResolver, FileResolverImp}, | ||
28 | input::{ | 26 | input::{ |
29 | FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, WORKSPACE, | 27 | FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, |
30 | FileTextQuery, FileSourceRootQuery, SourceRootQuery, LibrariesQuery, CrateGraphQuery, | 28 | FileTextQuery, FileSourceRootQuery, SourceRootQuery, LocalRootsQuery, LibraryRootsQuery, CrateGraphQuery, |
29 | FileRelativePathQuery | ||
31 | }, | 30 | }, |
32 | loc2id::{LocationIntener, NumericId}, | 31 | loc2id::{LocationIntener, NumericId}, |
33 | }; | 32 | }; |
diff --git a/crates/ra_db/src/mock.rs b/crates/ra_db/src/mock.rs index 2f7551597..5e185062b 100644 --- a/crates/ra_db/src/mock.rs +++ b/crates/ra_db/src/mock.rs | |||
@@ -1,9 +1,7 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use rustc_hash::FxHashSet; | 1 | use rustc_hash::FxHashSet; |
4 | use relative_path::{RelativePath, RelativePathBuf}; | 2 | use relative_path::{RelativePath, RelativePathBuf}; |
5 | 3 | ||
6 | use crate::{FileId, FileResolver, SourceRoot, FileResolverImp}; | 4 | use crate::{FileId}; |
7 | 5 | ||
8 | #[derive(Default, Debug, Clone)] | 6 | #[derive(Default, Debug, Clone)] |
9 | pub struct FileMap(Vec<(FileId, RelativePathBuf)>); | 7 | pub struct FileMap(Vec<(FileId, RelativePathBuf)>); |
@@ -15,15 +13,6 @@ impl FileMap { | |||
15 | file_id | 13 | file_id |
16 | } | 14 | } |
17 | 15 | ||
18 | pub fn into_source_root(self) -> SourceRoot { | ||
19 | let files = self.files(); | ||
20 | let file_resolver = FileResolverImp::new(Arc::new(self)); | ||
21 | SourceRoot { | ||
22 | file_resolver, | ||
23 | files, | ||
24 | } | ||
25 | } | ||
26 | |||
27 | pub fn files(&self) -> FxHashSet<FileId> { | 16 | pub fn files(&self) -> FxHashSet<FileId> { |
28 | self.iter().map(|(id, _)| id).collect() | 17 | self.iter().map(|(id, _)| id).collect() |
29 | } | 18 | } |
@@ -38,19 +27,4 @@ impl FileMap { | |||
38 | .iter() | 27 | .iter() |
39 | .map(|(id, path)| (*id, path.as_relative_path())) | 28 | .map(|(id, path)| (*id, path.as_relative_path())) |
40 | } | 29 | } |
41 | |||
42 | fn path(&self, id: FileId) -> &RelativePath { | ||
43 | self.iter().find(|&(it, _)| it == id).unwrap().1 | ||
44 | } | ||
45 | } | ||
46 | |||
47 | impl FileResolver for FileMap { | ||
48 | fn file_stem(&self, id: FileId) -> String { | ||
49 | self.path(id).file_stem().unwrap().to_string() | ||
50 | } | ||
51 | fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> { | ||
52 | let path = self.path(id).join(rel).normalize(); | ||
53 | let id = self.iter().find(|&(_, p)| path == p)?.0; | ||
54 | Some(id) | ||
55 | } | ||
56 | } | 30 | } |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 1b9e148b2..61650cee9 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -5,6 +5,7 @@ version = "0.1.0" | |||
5 | authors = ["Aleksey Kladov <[email protected]>"] | 5 | authors = ["Aleksey Kladov <[email protected]>"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | arrayvec = "0.4.9" | ||
8 | log = "0.4.5" | 9 | log = "0.4.5" |
9 | relative-path = "0.4.0" | 10 | relative-path = "0.4.0" |
10 | salsa = "0.8.0" | 11 | salsa = "0.8.0" |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index b7193c4f3..9423e6571 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -2,12 +2,14 @@ use std::sync::Arc; | |||
2 | 2 | ||
3 | use parking_lot::Mutex; | 3 | use parking_lot::Mutex; |
4 | use salsa::{self, Database}; | 4 | use salsa::{self, Database}; |
5 | use ra_db::{LocationIntener, BaseDatabase, FilePosition, mock::FileMap, FileId, WORKSPACE, CrateGraph}; | 5 | use ra_db::{LocationIntener, BaseDatabase, FilePosition, FileId, CrateGraph, SourceRoot, SourceRootId}; |
6 | use relative_path::RelativePathBuf; | 6 | use relative_path::RelativePathBuf; |
7 | use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; | 7 | use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; |
8 | 8 | ||
9 | use crate::{db, DefId, DefLoc}; | 9 | use crate::{db, DefId, DefLoc}; |
10 | 10 | ||
11 | const WORKSPACE: SourceRootId = SourceRootId(0); | ||
12 | |||
11 | #[derive(Debug)] | 13 | #[derive(Debug)] |
12 | pub(crate) struct MockDatabase { | 14 | pub(crate) struct MockDatabase { |
13 | events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>, | 15 | events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>, |
@@ -16,10 +18,10 @@ pub(crate) struct MockDatabase { | |||
16 | } | 18 | } |
17 | 19 | ||
18 | impl MockDatabase { | 20 | impl MockDatabase { |
19 | pub(crate) fn with_files(fixture: &str) -> (MockDatabase, FileMap) { | 21 | pub(crate) fn with_files(fixture: &str) -> (MockDatabase, SourceRoot) { |
20 | let (db, file_map, position) = MockDatabase::from_fixture(fixture); | 22 | let (db, source_root, position) = MockDatabase::from_fixture(fixture); |
21 | assert!(position.is_none()); | 23 | assert!(position.is_none()); |
22 | (db, file_map) | 24 | (db, source_root) |
23 | } | 25 | } |
24 | 26 | ||
25 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { | 27 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { |
@@ -33,48 +35,50 @@ impl MockDatabase { | |||
33 | .set((), Arc::new(crate_graph)); | 35 | .set((), Arc::new(crate_graph)); |
34 | } | 36 | } |
35 | 37 | ||
36 | fn from_fixture(fixture: &str) -> (MockDatabase, FileMap, Option<FilePosition>) { | 38 | fn from_fixture(fixture: &str) -> (MockDatabase, SourceRoot, Option<FilePosition>) { |
37 | let mut db = MockDatabase::default(); | 39 | let mut db = MockDatabase::default(); |
38 | 40 | ||
39 | let mut position = None; | 41 | let mut position = None; |
40 | let mut file_map = FileMap::default(); | 42 | let mut source_root = SourceRoot::default(); |
41 | for entry in parse_fixture(fixture) { | 43 | for entry in parse_fixture(fixture) { |
42 | if entry.text.contains(CURSOR_MARKER) { | 44 | if entry.text.contains(CURSOR_MARKER) { |
43 | assert!( | 45 | assert!( |
44 | position.is_none(), | 46 | position.is_none(), |
45 | "only one marker (<|>) per fixture is allowed" | 47 | "only one marker (<|>) per fixture is allowed" |
46 | ); | 48 | ); |
47 | position = Some(db.add_file_with_position(&mut file_map, &entry.meta, &entry.text)); | 49 | position = |
50 | Some(db.add_file_with_position(&mut source_root, &entry.meta, &entry.text)); | ||
48 | } else { | 51 | } else { |
49 | db.add_file(&mut file_map, &entry.meta, &entry.text); | 52 | db.add_file(&mut source_root, &entry.meta, &entry.text); |
50 | } | 53 | } |
51 | } | 54 | } |
52 | let source_root = file_map.clone().into_source_root(); | ||
53 | db.query_mut(ra_db::SourceRootQuery) | 55 | db.query_mut(ra_db::SourceRootQuery) |
54 | .set(WORKSPACE, Arc::new(source_root)); | 56 | .set(WORKSPACE, Arc::new(source_root.clone())); |
55 | (db, file_map, position) | 57 | (db, source_root, position) |
56 | } | 58 | } |
57 | 59 | ||
58 | fn add_file(&mut self, file_map: &mut FileMap, path: &str, text: &str) -> FileId { | 60 | fn add_file(&mut self, source_root: &mut SourceRoot, path: &str, text: &str) -> FileId { |
59 | assert!(path.starts_with('/')); | 61 | assert!(path.starts_with('/')); |
60 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); | 62 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); |
61 | 63 | let file_id = FileId(source_root.files.len() as u32); | |
62 | let file_id = file_map.add(path); | ||
63 | let text = Arc::new(text.to_string()); | 64 | let text = Arc::new(text.to_string()); |
64 | self.query_mut(ra_db::FileTextQuery).set(file_id, text); | 65 | self.query_mut(ra_db::FileTextQuery).set(file_id, text); |
66 | self.query_mut(ra_db::FileRelativePathQuery) | ||
67 | .set(file_id, path.clone()); | ||
65 | self.query_mut(ra_db::FileSourceRootQuery) | 68 | self.query_mut(ra_db::FileSourceRootQuery) |
66 | .set(file_id, WORKSPACE); | 69 | .set(file_id, WORKSPACE); |
70 | source_root.files.insert(path, file_id); | ||
67 | file_id | 71 | file_id |
68 | } | 72 | } |
69 | 73 | ||
70 | fn add_file_with_position( | 74 | fn add_file_with_position( |
71 | &mut self, | 75 | &mut self, |
72 | file_map: &mut FileMap, | 76 | source_root: &mut SourceRoot, |
73 | path: &str, | 77 | path: &str, |
74 | text: &str, | 78 | text: &str, |
75 | ) -> FilePosition { | 79 | ) -> FilePosition { |
76 | let (offset, text) = extract_offset(text); | 80 | let (offset, text) = extract_offset(text); |
77 | let file_id = self.add_file(file_map, path, &text); | 81 | let file_id = self.add_file(source_root, path, &text); |
78 | FilePosition { file_id, offset } | 82 | FilePosition { file_id, offset } |
79 | } | 83 | } |
80 | } | 84 | } |
@@ -104,11 +108,11 @@ impl Default for MockDatabase { | |||
104 | runtime: salsa::Runtime::default(), | 108 | runtime: salsa::Runtime::default(), |
105 | id_maps: Default::default(), | 109 | id_maps: Default::default(), |
106 | }; | 110 | }; |
107 | db.query_mut(ra_db::SourceRootQuery) | ||
108 | .set(ra_db::WORKSPACE, Default::default()); | ||
109 | db.query_mut(ra_db::CrateGraphQuery) | 111 | db.query_mut(ra_db::CrateGraphQuery) |
110 | .set((), Default::default()); | 112 | .set((), Default::default()); |
111 | db.query_mut(ra_db::LibrariesQuery) | 113 | db.query_mut(ra_db::LocalRootsQuery) |
114 | .set((), Default::default()); | ||
115 | db.query_mut(ra_db::LibraryRootsQuery) | ||
112 | .set((), Default::default()); | 116 | .set((), Default::default()); |
113 | db | 117 | db |
114 | } | 118 | } |
@@ -158,9 +162,11 @@ salsa::database_storage! { | |||
158 | pub(crate) struct MockDatabaseStorage for MockDatabase { | 162 | pub(crate) struct MockDatabaseStorage for MockDatabase { |
159 | impl ra_db::FilesDatabase { | 163 | impl ra_db::FilesDatabase { |
160 | fn file_text() for ra_db::FileTextQuery; | 164 | fn file_text() for ra_db::FileTextQuery; |
165 | fn file_relative_path() for ra_db::FileRelativePathQuery; | ||
161 | fn file_source_root() for ra_db::FileSourceRootQuery; | 166 | fn file_source_root() for ra_db::FileSourceRootQuery; |
162 | fn source_root() for ra_db::SourceRootQuery; | 167 | fn source_root() for ra_db::SourceRootQuery; |
163 | fn libraries() for ra_db::LibrariesQuery; | 168 | fn local_roots() for ra_db::LocalRootsQuery; |
169 | fn library_roots() for ra_db::LibraryRootsQuery; | ||
164 | fn crate_graph() for ra_db::CrateGraphQuery; | 170 | fn crate_graph() for ra_db::CrateGraphQuery; |
165 | } | 171 | } |
166 | impl ra_db::SyntaxDatabase { | 172 | impl ra_db::SyntaxDatabase { |
diff --git a/crates/ra_hir/src/module/imp.rs b/crates/ra_hir/src/module/imp.rs index 4a19842c4..f3a346152 100644 --- a/crates/ra_hir/src/module/imp.rs +++ b/crates/ra_hir/src/module/imp.rs | |||
@@ -4,9 +4,10 @@ use ra_syntax::{ | |||
4 | ast::{self, NameOwner}, | 4 | ast::{self, NameOwner}, |
5 | SmolStr, | 5 | SmolStr, |
6 | }; | 6 | }; |
7 | use relative_path::RelativePathBuf; | 7 | use relative_path::{RelativePathBuf, RelativePath}; |
8 | use rustc_hash::{FxHashMap, FxHashSet}; | 8 | use rustc_hash::{FxHashMap, FxHashSet}; |
9 | use ra_db::{SourceRoot, SourceRootId, FileResolverImp, Cancelable, FileId,}; | 9 | use arrayvec::ArrayVec; |
10 | use ra_db::{SourceRoot, SourceRootId, Cancelable, FileId}; | ||
10 | 11 | ||
11 | use crate::{ | 12 | use crate::{ |
12 | HirDatabase, | 13 | HirDatabase, |
@@ -65,7 +66,7 @@ fn create_module_tree<'a>( | |||
65 | let mut visited = FxHashSet::default(); | 66 | let mut visited = FxHashSet::default(); |
66 | 67 | ||
67 | let source_root = db.source_root(source_root); | 68 | let source_root = db.source_root(source_root); |
68 | for &file_id in source_root.files.iter() { | 69 | for &file_id in source_root.files.values() { |
69 | let source = ModuleSource::new_file(file_id); | 70 | let source = ModuleSource::new_file(file_id); |
70 | if visited.contains(&source) { | 71 | if visited.contains(&source) { |
71 | continue; // TODO: use explicit crate_roots here | 72 | continue; // TODO: use explicit crate_roots here |
@@ -110,8 +111,7 @@ fn build_subtree( | |||
110 | 111 | ||
111 | let (points_to, problem) = match sub { | 112 | let (points_to, problem) = match sub { |
112 | Submodule::Declaration(name) => { | 113 | Submodule::Declaration(name) => { |
113 | let (points_to, problem) = | 114 | let (points_to, problem) = resolve_submodule(db, source, &name); |
114 | resolve_submodule(source, &name, &source_root.file_resolver); | ||
115 | let points_to = points_to | 115 | let points_to = points_to |
116 | .into_iter() | 116 | .into_iter() |
117 | .map(|file_id| match roots.remove(&file_id) { | 117 | .map(|file_id| match roots.remove(&file_id) { |
@@ -153,34 +153,42 @@ fn build_subtree( | |||
153 | } | 153 | } |
154 | 154 | ||
155 | fn resolve_submodule( | 155 | fn resolve_submodule( |
156 | db: &impl HirDatabase, | ||
156 | source: ModuleSource, | 157 | source: ModuleSource, |
157 | name: &SmolStr, | 158 | name: &SmolStr, |
158 | file_resolver: &FileResolverImp, | ||
159 | ) -> (Vec<FileId>, Option<Problem>) { | 159 | ) -> (Vec<FileId>, Option<Problem>) { |
160 | // TODO: handle submodules of inline modules properly | 160 | // FIXME: handle submodules of inline modules properly |
161 | let file_id = source.file_id(); | 161 | let file_id = source.file_id(); |
162 | let mod_name = file_resolver.file_stem(file_id); | 162 | let source_root_id = db.file_source_root(file_id); |
163 | let path = db.file_relative_path(file_id); | ||
164 | let root = RelativePathBuf::default(); | ||
165 | let dir_path = path.parent().unwrap_or(&root); | ||
166 | let mod_name = path.file_stem().unwrap_or("unknown"); | ||
163 | let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; | 167 | let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; |
164 | 168 | ||
165 | let file_mod = RelativePathBuf::from(format!("../{}.rs", name)); | 169 | let file_mod = dir_path.join(format!("{}.rs", name)); |
166 | let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name)); | 170 | let dir_mod = dir_path.join(format!("{}/mod.rs", name)); |
167 | let file_dir_mod = RelativePathBuf::from(format!("../{}/{}.rs", mod_name, name)); | 171 | let file_dir_mod = dir_path.join(format!("{}/{}.rs", mod_name, name)); |
168 | let tmp1; | 172 | let mut candidates = ArrayVec::<[_; 2]>::new(); |
169 | let tmp2; | 173 | if is_dir_owner { |
170 | let candidates = if is_dir_owner { | 174 | candidates.push(file_mod.clone()); |
171 | tmp1 = [&file_mod, &dir_mod]; | 175 | candidates.push(dir_mod); |
172 | tmp1.iter() | ||
173 | } else { | 176 | } else { |
174 | tmp2 = [&file_dir_mod]; | 177 | candidates.push(file_dir_mod.clone()); |
175 | tmp2.iter() | ||
176 | }; | 178 | }; |
177 | 179 | let sr = db.source_root(source_root_id); | |
178 | let points_to = candidates | 180 | let points_to = candidates |
179 | .filter_map(|path| file_resolver.resolve(file_id, path)) | 181 | .into_iter() |
182 | .filter_map(|path| sr.files.get(&path)) | ||
183 | .map(|&it| it) | ||
180 | .collect::<Vec<_>>(); | 184 | .collect::<Vec<_>>(); |
181 | let problem = if points_to.is_empty() { | 185 | let problem = if points_to.is_empty() { |
182 | Some(Problem::UnresolvedModule { | 186 | Some(Problem::UnresolvedModule { |
183 | candidate: if is_dir_owner { file_mod } else { file_dir_mod }, | 187 | candidate: RelativePath::new("../").join(&if is_dir_owner { |
188 | file_mod | ||
189 | } else { | ||
190 | file_dir_mod | ||
191 | }), | ||
184 | }) | 192 | }) |
185 | } else { | 193 | } else { |
186 | None | 194 | None |
diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs index 5540b827f..f44abc730 100644 --- a/crates/ra_hir/src/module/nameres.rs +++ b/crates/ra_hir/src/module/nameres.rs | |||
@@ -32,11 +32,12 @@ use crate::{ | |||
32 | SourceItemId, SourceFileItemId, SourceFileItems, | 32 | SourceItemId, SourceFileItemId, SourceFileItems, |
33 | Path, PathKind, | 33 | Path, PathKind, |
34 | HirDatabase, Crate, | 34 | HirDatabase, Crate, |
35 | module::{ModuleId, ModuleTree}, | 35 | module::{Module, ModuleId, ModuleTree}, |
36 | }; | 36 | }; |
37 | 37 | ||
38 | /// Item map is the result of the name resolution. Item map contains, for each | 38 | /// Item map is the result of the name resolution. Item map contains, for each |
39 | /// module, the set of visible items. | 39 | /// module, the set of visible items. |
40 | // FIXME: currenty we compute item map per source-root. We should do it per crate instead. | ||
40 | #[derive(Default, Debug, PartialEq, Eq)] | 41 | #[derive(Default, Debug, PartialEq, Eq)] |
41 | pub struct ItemMap { | 42 | pub struct ItemMap { |
42 | pub per_module: FxHashMap<ModuleId, ModuleScope>, | 43 | pub per_module: FxHashMap<ModuleId, ModuleScope>, |
@@ -252,7 +253,8 @@ where | |||
252 | let krate = Crate::new(crate_id); | 253 | let krate = Crate::new(crate_id); |
253 | for dep in krate.dependencies(self.db) { | 254 | for dep in krate.dependencies(self.db) { |
254 | if let Some(module) = dep.krate.root_module(self.db)? { | 255 | if let Some(module) = dep.krate.root_module(self.db)? { |
255 | self.add_module_item(&mut module_items, dep.name, module.module_id); | 256 | let def_id = module.def_id(self.db); |
257 | self.add_module_item(&mut module_items, dep.name, def_id); | ||
256 | } | 258 | } |
257 | } | 259 | } |
258 | }; | 260 | }; |
@@ -294,21 +296,21 @@ where | |||
294 | 296 | ||
295 | // Populate modules | 297 | // Populate modules |
296 | for (name, module_id) in module_id.children(&self.module_tree) { | 298 | for (name, module_id) in module_id.children(&self.module_tree) { |
297 | self.add_module_item(&mut module_items, name, module_id); | 299 | let def_loc = DefLoc { |
300 | kind: DefKind::Module, | ||
301 | source_root_id: self.source_root, | ||
302 | module_id, | ||
303 | source_item_id: module_id.source(&self.module_tree).0, | ||
304 | }; | ||
305 | let def_id = def_loc.id(self.db); | ||
306 | self.add_module_item(&mut module_items, name, def_id); | ||
298 | } | 307 | } |
299 | 308 | ||
300 | self.result.per_module.insert(module_id, module_items); | 309 | self.result.per_module.insert(module_id, module_items); |
301 | Ok(()) | 310 | Ok(()) |
302 | } | 311 | } |
303 | 312 | ||
304 | fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, module_id: ModuleId) { | 313 | fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, def_id: DefId) { |
305 | let def_loc = DefLoc { | ||
306 | kind: DefKind::Module, | ||
307 | source_root_id: self.source_root, | ||
308 | module_id, | ||
309 | source_item_id: module_id.source(&self.module_tree).0, | ||
310 | }; | ||
311 | let def_id = def_loc.id(self.db); | ||
312 | let resolution = Resolution { | 314 | let resolution = Resolution { |
313 | def_id: Some(def_id), | 315 | def_id: Some(def_id), |
314 | import: None, | 316 | import: None, |
@@ -329,7 +331,7 @@ where | |||
329 | ImportKind::Named(ptr) => ptr, | 331 | ImportKind::Named(ptr) => ptr, |
330 | }; | 332 | }; |
331 | 333 | ||
332 | let mut curr = match import.path.kind { | 334 | let mut curr: ModuleId = match import.path.kind { |
333 | PathKind::Plain | PathKind::Self_ => module_id, | 335 | PathKind::Plain | PathKind::Self_ => module_id, |
334 | PathKind::Super => { | 336 | PathKind::Super => { |
335 | match module_id.parent(&self.module_tree) { | 337 | match module_id.parent(&self.module_tree) { |
@@ -356,9 +358,30 @@ where | |||
356 | curr = match def_id.loc(self.db) { | 358 | curr = match def_id.loc(self.db) { |
357 | DefLoc { | 359 | DefLoc { |
358 | kind: DefKind::Module, | 360 | kind: DefKind::Module, |
359 | module_id, | 361 | module_id: target_module_id, |
362 | source_root_id, | ||
360 | .. | 363 | .. |
361 | } => module_id, | 364 | } => { |
365 | if source_root_id == self.source_root { | ||
366 | target_module_id | ||
367 | } else { | ||
368 | let module = Module::new(self.db, source_root_id, target_module_id)?; | ||
369 | let path = Path { | ||
370 | segments: import.path.segments[i + 1..].iter().cloned().collect(), | ||
371 | kind: PathKind::Crate, | ||
372 | }; | ||
373 | if let Some(def_id) = module.resolve_path(self.db, path)? { | ||
374 | self.update(module_id, |items| { | ||
375 | let res = Resolution { | ||
376 | def_id: Some(def_id), | ||
377 | import: Some(ptr), | ||
378 | }; | ||
379 | items.items.insert(name.clone(), res); | ||
380 | }) | ||
381 | } | ||
382 | return Ok(()); | ||
383 | } | ||
384 | } | ||
362 | _ => return Ok(()), | 385 | _ => return Ok(()), |
363 | } | 386 | } |
364 | } else { | 387 | } else { |
diff --git a/crates/ra_hir/src/module/nameres/tests.rs b/crates/ra_hir/src/module/nameres/tests.rs index 9ddc32dcd..9fa9146e3 100644 --- a/crates/ra_hir/src/module/nameres/tests.rs +++ b/crates/ra_hir/src/module/nameres/tests.rs | |||
@@ -3,6 +3,7 @@ use std::sync::Arc; | |||
3 | use salsa::Database; | 3 | use salsa::Database; |
4 | use ra_db::{FilesDatabase, CrateGraph}; | 4 | use ra_db::{FilesDatabase, CrateGraph}; |
5 | use ra_syntax::SmolStr; | 5 | use ra_syntax::SmolStr; |
6 | use relative_path::RelativePath; | ||
6 | 7 | ||
7 | use crate::{ | 8 | use crate::{ |
8 | self as hir, | 9 | self as hir, |
@@ -44,7 +45,7 @@ fn item_map_smoke_test() { | |||
44 | 45 | ||
45 | #[test] | 46 | #[test] |
46 | fn item_map_across_crates() { | 47 | fn item_map_across_crates() { |
47 | let (mut db, files) = MockDatabase::with_files( | 48 | let (mut db, sr) = MockDatabase::with_files( |
48 | " | 49 | " |
49 | //- /main.rs | 50 | //- /main.rs |
50 | use test_crate::Baz; | 51 | use test_crate::Baz; |
@@ -53,8 +54,8 @@ fn item_map_across_crates() { | |||
53 | pub struct Baz; | 54 | pub struct Baz; |
54 | ", | 55 | ", |
55 | ); | 56 | ); |
56 | let main_id = files.file_id("/main.rs"); | 57 | let main_id = sr.files[RelativePath::new("/main.rs")]; |
57 | let lib_id = files.file_id("/lib.rs"); | 58 | let lib_id = sr.files[RelativePath::new("/lib.rs")]; |
58 | 59 | ||
59 | let mut crate_graph = CrateGraph::default(); | 60 | let mut crate_graph = CrateGraph::default(); |
60 | let main_crate = crate_graph.add_crate_root(main_id); | 61 | let main_crate = crate_graph.add_crate_root(main_id); |
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index ce4f79d46..fc10096e5 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -19,12 +19,13 @@ flexi_logger = "0.10.0" | |||
19 | log = "0.4.3" | 19 | log = "0.4.3" |
20 | url_serde = "0.2.0" | 20 | url_serde = "0.2.0" |
21 | languageserver-types = "0.53.0" | 21 | languageserver-types = "0.53.0" |
22 | walkdir = "2.2.0" | 22 | walkdir = "2.2.7" |
23 | im = "12.0.0" | 23 | im = "12.0.0" |
24 | cargo_metadata = "0.6.0" | 24 | cargo_metadata = "0.6.0" |
25 | text_unit = { version = "0.1.2", features = ["serde"] } | 25 | text_unit = { version = "0.1.2", features = ["serde"] } |
26 | smol_str = { version = "0.1.5", features = ["serde"] } | 26 | smol_str = { version = "0.1.5", features = ["serde"] } |
27 | rustc-hash = "1.0" | 27 | rustc-hash = "1.0" |
28 | parking_lot = "0.7.0" | ||
28 | 29 | ||
29 | thread_worker = { path = "../thread_worker" } | 30 | thread_worker = { path = "../thread_worker" } |
30 | ra_syntax = { path = "../ra_syntax" } | 31 | ra_syntax = { path = "../ra_syntax" } |
@@ -32,6 +33,7 @@ ra_editor = { path = "../ra_editor" } | |||
32 | ra_text_edit = { path = "../ra_text_edit" } | 33 | ra_text_edit = { path = "../ra_text_edit" } |
33 | ra_analysis = { path = "../ra_analysis" } | 34 | ra_analysis = { path = "../ra_analysis" } |
34 | gen_lsp_server = { path = "../gen_lsp_server" } | 35 | gen_lsp_server = { path = "../gen_lsp_server" } |
36 | ra_vfs = { path = "../ra_vfs" } | ||
35 | 37 | ||
36 | [dev-dependencies] | 38 | [dev-dependencies] |
37 | tempdir = "0.3.7" | 39 | tempdir = "0.3.7" |
diff --git a/crates/ra_lsp_server/src/lib.rs b/crates/ra_lsp_server/src/lib.rs index 1d7258c35..725b1258a 100644 --- a/crates/ra_lsp_server/src/lib.rs +++ b/crates/ra_lsp_server/src/lib.rs | |||
@@ -1,11 +1,9 @@ | |||
1 | mod caps; | 1 | mod caps; |
2 | mod conv; | 2 | mod conv; |
3 | mod main_loop; | 3 | mod main_loop; |
4 | mod path_map; | ||
5 | mod project_model; | 4 | mod project_model; |
6 | pub mod req; | 5 | pub mod req; |
7 | mod server_world; | 6 | mod server_world; |
8 | mod vfs; | ||
9 | 7 | ||
10 | pub type Result<T> = ::std::result::Result<T, ::failure::Error>; | 8 | pub type Result<T> = ::std::result::Result<T, ::failure::Error>; |
11 | pub use crate::{caps::server_capabilities, main_loop::main_loop, main_loop::LspError}; | 9 | pub use crate::{caps::server_capabilities, main_loop::main_loop, main_loop::LspError}; |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index eab82ee85..d2f16ea97 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -1,7 +1,10 @@ | |||
1 | mod handlers; | 1 | mod handlers; |
2 | mod subscriptions; | 2 | mod subscriptions; |
3 | 3 | ||
4 | use std::path::PathBuf; | 4 | use std::{ |
5 | path::PathBuf, | ||
6 | sync::Arc, | ||
7 | }; | ||
5 | 8 | ||
6 | use crossbeam_channel::{unbounded, select, Receiver, Sender}; | 9 | use crossbeam_channel::{unbounded, select, Receiver, Sender}; |
7 | use gen_lsp_server::{ | 10 | use gen_lsp_server::{ |
@@ -9,8 +12,8 @@ use gen_lsp_server::{ | |||
9 | }; | 12 | }; |
10 | use languageserver_types::NumberOrString; | 13 | use languageserver_types::NumberOrString; |
11 | use ra_analysis::{Canceled, FileId, LibraryData}; | 14 | use ra_analysis::{Canceled, FileId, LibraryData}; |
15 | use ra_vfs::{VfsTask}; | ||
12 | use rayon; | 16 | use rayon; |
13 | use thread_worker::Worker; | ||
14 | use threadpool::ThreadPool; | 17 | use threadpool::ThreadPool; |
15 | use rustc_hash::FxHashSet; | 18 | use rustc_hash::FxHashSet; |
16 | use serde::{de::DeserializeOwned, Serialize}; | 19 | use serde::{de::DeserializeOwned, Serialize}; |
@@ -19,10 +22,9 @@ use failure_derive::Fail; | |||
19 | 22 | ||
20 | use crate::{ | 23 | use crate::{ |
21 | main_loop::subscriptions::Subscriptions, | 24 | main_loop::subscriptions::Subscriptions, |
22 | project_model::{workspace_loader, CargoWorkspace}, | 25 | project_model::{workspace_loader}, |
23 | req, | 26 | req, |
24 | server_world::{ServerWorld, ServerWorldState}, | 27 | server_world::{ServerWorld, ServerWorldState}, |
25 | vfs::{self, FileEvent}, | ||
26 | Result, | 28 | Result, |
27 | }; | 29 | }; |
28 | 30 | ||
@@ -50,32 +52,42 @@ enum Task { | |||
50 | 52 | ||
51 | pub fn main_loop( | 53 | pub fn main_loop( |
52 | internal_mode: bool, | 54 | internal_mode: bool, |
53 | root: PathBuf, | 55 | ws_root: PathBuf, |
54 | publish_decorations: bool, | 56 | publish_decorations: bool, |
55 | msg_receiver: &Receiver<RawMessage>, | 57 | msg_receiver: &Receiver<RawMessage>, |
56 | msg_sender: &Sender<RawMessage>, | 58 | msg_sender: &Sender<RawMessage>, |
57 | ) -> Result<()> { | 59 | ) -> Result<()> { |
58 | let pool = ThreadPool::new(8); | 60 | let pool = ThreadPool::new(8); |
59 | let (task_sender, task_receiver) = unbounded::<Task>(); | 61 | let (task_sender, task_receiver) = unbounded::<Task>(); |
60 | let (fs_worker, fs_watcher) = vfs::roots_loader(); | ||
61 | let (ws_worker, ws_watcher) = workspace_loader(); | 62 | let (ws_worker, ws_watcher) = workspace_loader(); |
62 | 63 | ||
64 | ws_worker.send(ws_root.clone()); | ||
65 | // FIXME: support dynamic workspace loading. | ||
66 | let workspaces = match ws_worker.recv().unwrap() { | ||
67 | Ok(ws) => vec![ws], | ||
68 | Err(e) => { | ||
69 | log::warn!("loading workspace failed: {}", e); | ||
70 | Vec::new() | ||
71 | } | ||
72 | }; | ||
73 | ws_worker.shutdown(); | ||
74 | ws_watcher | ||
75 | .shutdown() | ||
76 | .map_err(|_| format_err!("ws watcher died"))?; | ||
77 | let mut state = ServerWorldState::new(ws_root.clone(), workspaces); | ||
78 | |||
63 | log::info!("server initialized, serving requests"); | 79 | log::info!("server initialized, serving requests"); |
64 | let mut state = ServerWorldState::default(); | ||
65 | 80 | ||
66 | let mut pending_requests = FxHashSet::default(); | 81 | let mut pending_requests = FxHashSet::default(); |
67 | let mut subs = Subscriptions::new(); | 82 | let mut subs = Subscriptions::new(); |
68 | let main_res = main_loop_inner( | 83 | let main_res = main_loop_inner( |
69 | internal_mode, | 84 | internal_mode, |
70 | publish_decorations, | 85 | publish_decorations, |
71 | root, | ||
72 | &pool, | 86 | &pool, |
73 | msg_sender, | 87 | msg_sender, |
74 | msg_receiver, | 88 | msg_receiver, |
75 | task_sender, | 89 | task_sender, |
76 | task_receiver.clone(), | 90 | task_receiver.clone(), |
77 | fs_worker, | ||
78 | ws_worker, | ||
79 | &mut state, | 91 | &mut state, |
80 | &mut pending_requests, | 92 | &mut pending_requests, |
81 | &mut subs, | 93 | &mut subs, |
@@ -88,12 +100,11 @@ pub fn main_loop( | |||
88 | drop(pool); | 100 | drop(pool); |
89 | log::info!("...threadpool has finished"); | 101 | log::info!("...threadpool has finished"); |
90 | 102 | ||
91 | let fs_res = fs_watcher.stop(); | 103 | let vfs = Arc::try_unwrap(state.vfs).expect("all snapshots should be dead"); |
92 | let ws_res = ws_watcher.stop(); | 104 | let vfs_res = vfs.into_inner().shutdown(); |
93 | 105 | ||
94 | main_res?; | 106 | main_res?; |
95 | fs_res.map_err(|_| format_err!("fs watcher died"))?; | 107 | vfs_res.map_err(|_| format_err!("fs watcher died"))?; |
96 | ws_res.map_err(|_| format_err!("ws watcher died"))?; | ||
97 | 108 | ||
98 | Ok(()) | 109 | Ok(()) |
99 | } | 110 | } |
@@ -101,28 +112,22 @@ pub fn main_loop( | |||
101 | fn main_loop_inner( | 112 | fn main_loop_inner( |
102 | internal_mode: bool, | 113 | internal_mode: bool, |
103 | publish_decorations: bool, | 114 | publish_decorations: bool, |
104 | ws_root: PathBuf, | ||
105 | pool: &ThreadPool, | 115 | pool: &ThreadPool, |
106 | msg_sender: &Sender<RawMessage>, | 116 | msg_sender: &Sender<RawMessage>, |
107 | msg_receiver: &Receiver<RawMessage>, | 117 | msg_receiver: &Receiver<RawMessage>, |
108 | task_sender: Sender<Task>, | 118 | task_sender: Sender<Task>, |
109 | task_receiver: Receiver<Task>, | 119 | task_receiver: Receiver<Task>, |
110 | fs_worker: Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, | ||
111 | ws_worker: Worker<PathBuf, Result<CargoWorkspace>>, | ||
112 | state: &mut ServerWorldState, | 120 | state: &mut ServerWorldState, |
113 | pending_requests: &mut FxHashSet<u64>, | 121 | pending_requests: &mut FxHashSet<u64>, |
114 | subs: &mut Subscriptions, | 122 | subs: &mut Subscriptions, |
115 | ) -> Result<()> { | 123 | ) -> Result<()> { |
116 | let (libdata_sender, libdata_receiver) = unbounded(); | 124 | let (libdata_sender, libdata_receiver) = unbounded(); |
117 | ws_worker.send(ws_root.clone()); | ||
118 | fs_worker.send(ws_root.clone()); | ||
119 | loop { | 125 | loop { |
120 | #[derive(Debug)] | 126 | #[derive(Debug)] |
121 | enum Event { | 127 | enum Event { |
122 | Msg(RawMessage), | 128 | Msg(RawMessage), |
123 | Task(Task), | 129 | Task(Task), |
124 | Fs(PathBuf, Vec<FileEvent>), | 130 | Vfs(VfsTask), |
125 | Ws(Result<CargoWorkspace>), | ||
126 | Lib(LibraryData), | 131 | Lib(LibraryData), |
127 | } | 132 | } |
128 | log::trace!("selecting"); | 133 | log::trace!("selecting"); |
@@ -132,77 +137,20 @@ fn main_loop_inner( | |||
132 | None => bail!("client exited without shutdown"), | 137 | None => bail!("client exited without shutdown"), |
133 | }, | 138 | }, |
134 | recv(task_receiver, task) => Event::Task(task.unwrap()), | 139 | recv(task_receiver, task) => Event::Task(task.unwrap()), |
135 | recv(fs_worker.out, events) => match events { | 140 | recv(state.vfs.read().task_receiver(), task) => match task { |
136 | None => bail!("roots watcher died"), | 141 | None => bail!("vfs died"), |
137 | Some((pb, events)) => Event::Fs(pb, events), | 142 | Some(task) => Event::Vfs(task), |
138 | } | ||
139 | recv(ws_worker.out, ws) => match ws { | ||
140 | None => bail!("workspace watcher died"), | ||
141 | Some(ws) => Event::Ws(ws), | ||
142 | } | 143 | } |
143 | recv(libdata_receiver, data) => Event::Lib(data.unwrap()) | 144 | recv(libdata_receiver, data) => Event::Lib(data.unwrap()) |
144 | }; | 145 | }; |
146 | log::info!("{:?}", event); | ||
145 | let mut state_changed = false; | 147 | let mut state_changed = false; |
146 | match event { | 148 | match event { |
147 | Event::Task(task) => on_task(task, msg_sender, pending_requests), | 149 | Event::Task(task) => on_task(task, msg_sender, pending_requests), |
148 | Event::Fs(root, events) => { | 150 | Event::Vfs(task) => { |
149 | log::info!("fs change, {}, {} events", root.display(), events.len()); | 151 | state.vfs.write().handle_task(task); |
150 | if root == ws_root { | ||
151 | state.apply_fs_changes(events); | ||
152 | } else { | ||
153 | let (files, resolver) = state.events_to_files(events); | ||
154 | let sender = libdata_sender.clone(); | ||
155 | pool.execute(move || { | ||
156 | let start = ::std::time::Instant::now(); | ||
157 | log::info!("indexing {} ... ", root.display()); | ||
158 | let data = LibraryData::prepare(files, resolver); | ||
159 | log::info!("indexed {:?} {}", start.elapsed(), root.display()); | ||
160 | sender.send(data); | ||
161 | }); | ||
162 | } | ||
163 | state_changed = true; | 152 | state_changed = true; |
164 | } | 153 | } |
165 | Event::Ws(ws) => match ws { | ||
166 | Ok(ws) => { | ||
167 | let workspaces = vec![ws]; | ||
168 | feedback(internal_mode, "workspace loaded", msg_sender); | ||
169 | for ws in workspaces.iter() { | ||
170 | // Add each library as constant input. If library is | ||
171 | // within the workspace, don't treat it as a library. | ||
172 | // | ||
173 | // HACK: If source roots are nested, pick the outer one. | ||
174 | |||
175 | let mut roots = ws | ||
176 | .packages() | ||
177 | .filter(|pkg| !pkg.is_member(ws)) | ||
178 | .filter_map(|pkg| { | ||
179 | let root = pkg.root(ws).to_path_buf(); | ||
180 | if root.starts_with(&ws_root) { | ||
181 | None | ||
182 | } else { | ||
183 | Some(root) | ||
184 | } | ||
185 | }) | ||
186 | .collect::<Vec<_>>(); | ||
187 | roots.sort_by_key(|it| it.as_os_str().len()); | ||
188 | let unique = roots | ||
189 | .iter() | ||
190 | .enumerate() | ||
191 | .filter(|&(idx, long)| { | ||
192 | !roots[..idx].iter().any(|short| long.starts_with(short)) | ||
193 | }) | ||
194 | .map(|(_idx, root)| root); | ||
195 | |||
196 | for root in unique { | ||
197 | log::debug!("sending root, {}", root.display()); | ||
198 | fs_worker.send(root.to_owned()); | ||
199 | } | ||
200 | } | ||
201 | state.set_workspaces(workspaces); | ||
202 | state_changed = true; | ||
203 | } | ||
204 | Err(e) => log::warn!("loading workspace failed: {}", e), | ||
205 | }, | ||
206 | Event::Lib(lib) => { | 154 | Event::Lib(lib) => { |
207 | feedback(internal_mode, "library loaded", msg_sender); | 155 | feedback(internal_mode, "library loaded", msg_sender); |
208 | state.add_lib(lib); | 156 | state.add_lib(lib); |
@@ -234,6 +182,21 @@ fn main_loop_inner( | |||
234 | }, | 182 | }, |
235 | }; | 183 | }; |
236 | 184 | ||
185 | for lib in state.process_changes() { | ||
186 | let (root, files) = lib; | ||
187 | let sender = libdata_sender.clone(); | ||
188 | pool.execute(move || { | ||
189 | let start = ::std::time::Instant::now(); | ||
190 | log::info!("indexing {:?} ... ", root); | ||
191 | let data = LibraryData::prepare(root, files); | ||
192 | log::info!("indexed {:?} {:?}", start.elapsed(), root); | ||
193 | sender.send(data); | ||
194 | }); | ||
195 | } | ||
196 | if state.roots_to_scan == 0 { | ||
197 | feedback(internal_mode, "workspace loaded", msg_sender); | ||
198 | } | ||
199 | |||
237 | if state_changed { | 200 | if state_changed { |
238 | update_file_notifications_on_threadpool( | 201 | update_file_notifications_on_threadpool( |
239 | pool, | 202 | pool, |
@@ -336,8 +299,13 @@ fn on_notification( | |||
336 | let path = uri | 299 | let path = uri |
337 | .to_file_path() | 300 | .to_file_path() |
338 | .map_err(|()| format_err!("invalid uri: {}", uri))?; | 301 | .map_err(|()| format_err!("invalid uri: {}", uri))?; |
339 | let file_id = state.add_mem_file(path, params.text_document.text); | 302 | if let Some(file_id) = state |
340 | subs.add_sub(file_id); | 303 | .vfs |
304 | .write() | ||
305 | .add_file_overlay(&path, params.text_document.text) | ||
306 | { | ||
307 | subs.add_sub(FileId(file_id.0)); | ||
308 | } | ||
341 | return Ok(()); | 309 | return Ok(()); |
342 | } | 310 | } |
343 | Err(not) => not, | 311 | Err(not) => not, |
@@ -353,7 +321,7 @@ fn on_notification( | |||
353 | .pop() | 321 | .pop() |
354 | .ok_or_else(|| format_err!("empty changes"))? | 322 | .ok_or_else(|| format_err!("empty changes"))? |
355 | .text; | 323 | .text; |
356 | state.change_mem_file(path.as_path(), text)?; | 324 | state.vfs.write().change_file_overlay(path.as_path(), text); |
357 | return Ok(()); | 325 | return Ok(()); |
358 | } | 326 | } |
359 | Err(not) => not, | 327 | Err(not) => not, |
@@ -364,8 +332,9 @@ fn on_notification( | |||
364 | let path = uri | 332 | let path = uri |
365 | .to_file_path() | 333 | .to_file_path() |
366 | .map_err(|()| format_err!("invalid uri: {}", uri))?; | 334 | .map_err(|()| format_err!("invalid uri: {}", uri))?; |
367 | let file_id = state.remove_mem_file(path.as_path())?; | 335 | if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) { |
368 | subs.remove_sub(file_id); | 336 | subs.remove_sub(FileId(file_id.0)); |
337 | } | ||
369 | let params = req::PublishDiagnosticsParams { | 338 | let params = req::PublishDiagnosticsParams { |
370 | uri, | 339 | uri, |
371 | diagnostics: Vec::new(), | 340 | diagnostics: Vec::new(), |
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index acca480c7..572ae7fb5 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -326,9 +326,9 @@ pub fn handle_runnables( | |||
326 | None => return Ok(None), | 326 | None => return Ok(None), |
327 | }; | 327 | }; |
328 | let file_id = world.analysis().crate_root(crate_id)?; | 328 | let file_id = world.analysis().crate_root(crate_id)?; |
329 | let path = world.path_map.get_path(file_id); | 329 | let path = world.vfs.read().file2path(ra_vfs::VfsFile(file_id.0)); |
330 | let res = world.workspaces.iter().find_map(|ws| { | 330 | let res = world.workspaces.iter().find_map(|ws| { |
331 | let tgt = ws.target_by_root(path)?; | 331 | let tgt = ws.target_by_root(&path)?; |
332 | let res = CargoTargetSpec { | 332 | let res = CargoTargetSpec { |
333 | package: tgt.package(ws).name(ws).to_string(), | 333 | package: tgt.package(ws).name(ws).to_string(), |
334 | target: tgt.name(ws).to_string(), | 334 | target: tgt.name(ws).to_string(), |
diff --git a/crates/ra_lsp_server/src/path_map.rs b/crates/ra_lsp_server/src/path_map.rs deleted file mode 100644 index 02e54629c..000000000 --- a/crates/ra_lsp_server/src/path_map.rs +++ /dev/null | |||
@@ -1,126 +0,0 @@ | |||
1 | use std::{ | ||
2 | fmt, | ||
3 | path::{Component, Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use im; | ||
7 | use ra_analysis::{FileId, FileResolver}; | ||
8 | use relative_path::RelativePath; | ||
9 | |||
10 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | ||
11 | pub enum Root { | ||
12 | Workspace, | ||
13 | Lib, | ||
14 | } | ||
15 | |||
16 | #[derive(Default, Clone)] | ||
17 | pub struct PathMap { | ||
18 | next_id: u32, | ||
19 | path2id: im::HashMap<PathBuf, FileId>, | ||
20 | id2path: im::HashMap<FileId, PathBuf>, | ||
21 | id2root: im::HashMap<FileId, Root>, | ||
22 | } | ||
23 | |||
24 | impl fmt::Debug for PathMap { | ||
25 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
26 | f.write_str("PathMap { ... }") | ||
27 | } | ||
28 | } | ||
29 | |||
30 | impl PathMap { | ||
31 | pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> (bool, FileId) { | ||
32 | let mut inserted = false; | ||
33 | let file_id = self | ||
34 | .path2id | ||
35 | .get(path.as_path()) | ||
36 | .map(|&id| id) | ||
37 | .unwrap_or_else(|| { | ||
38 | inserted = true; | ||
39 | let id = self.new_file_id(); | ||
40 | self.insert(path, id, root); | ||
41 | id | ||
42 | }); | ||
43 | (inserted, file_id) | ||
44 | } | ||
45 | pub fn get_id(&self, path: &Path) -> Option<FileId> { | ||
46 | self.path2id.get(path).cloned() | ||
47 | } | ||
48 | pub fn get_path(&self, file_id: FileId) -> &Path { | ||
49 | self.id2path.get(&file_id).unwrap().as_path() | ||
50 | } | ||
51 | pub fn get_root(&self, file_id: FileId) -> Root { | ||
52 | self.id2root[&file_id] | ||
53 | } | ||
54 | fn insert(&mut self, path: PathBuf, file_id: FileId, root: Root) { | ||
55 | self.path2id.insert(path.clone(), file_id); | ||
56 | self.id2path.insert(file_id, path.clone()); | ||
57 | self.id2root.insert(file_id, root); | ||
58 | } | ||
59 | |||
60 | fn new_file_id(&mut self) -> FileId { | ||
61 | let id = FileId(self.next_id); | ||
62 | self.next_id += 1; | ||
63 | id | ||
64 | } | ||
65 | } | ||
66 | |||
67 | impl FileResolver for PathMap { | ||
68 | fn file_stem(&self, file_id: FileId) -> String { | ||
69 | self.get_path(file_id) | ||
70 | .file_stem() | ||
71 | .unwrap() | ||
72 | .to_str() | ||
73 | .unwrap() | ||
74 | .to_string() | ||
75 | } | ||
76 | |||
77 | fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> { | ||
78 | let path = path.to_path(&self.get_path(file_id)); | ||
79 | let path = normalize(&path); | ||
80 | self.get_id(&path) | ||
81 | } | ||
82 | |||
83 | fn debug_path(&self, file_id: FileId) -> Option<PathBuf> { | ||
84 | Some(self.get_path(file_id).to_owned()) | ||
85 | } | ||
86 | } | ||
87 | |||
88 | fn normalize(path: &Path) -> PathBuf { | ||
89 | let mut components = path.components().peekable(); | ||
90 | let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { | ||
91 | components.next(); | ||
92 | PathBuf::from(c.as_os_str()) | ||
93 | } else { | ||
94 | PathBuf::new() | ||
95 | }; | ||
96 | |||
97 | for component in components { | ||
98 | match component { | ||
99 | Component::Prefix(..) => unreachable!(), | ||
100 | Component::RootDir => { | ||
101 | ret.push(component.as_os_str()); | ||
102 | } | ||
103 | Component::CurDir => {} | ||
104 | Component::ParentDir => { | ||
105 | ret.pop(); | ||
106 | } | ||
107 | Component::Normal(c) => { | ||
108 | ret.push(c); | ||
109 | } | ||
110 | } | ||
111 | } | ||
112 | ret | ||
113 | } | ||
114 | |||
115 | #[cfg(test)] | ||
116 | mod test { | ||
117 | use super::*; | ||
118 | |||
119 | #[test] | ||
120 | fn test_resolve() { | ||
121 | let mut m = PathMap::default(); | ||
122 | let (_, id1) = m.get_or_insert(PathBuf::from("/foo"), Root::Workspace); | ||
123 | let (_, id2) = m.get_or_insert(PathBuf::from("/foo/bar.rs"), Root::Workspace); | ||
124 | assert_eq!(m.resolve(id1, &RelativePath::new("bar.rs")), Some(id2),) | ||
125 | } | ||
126 | } | ||
diff --git a/crates/ra_lsp_server/src/project_model.rs b/crates/ra_lsp_server/src/project_model.rs index b881f8b6f..5852a157d 100644 --- a/crates/ra_lsp_server/src/project_model.rs +++ b/crates/ra_lsp_server/src/project_model.rs | |||
@@ -69,6 +69,7 @@ impl Package { | |||
69 | pub fn targets<'a>(self, ws: &'a CargoWorkspace) -> impl Iterator<Item = Target> + 'a { | 69 | pub fn targets<'a>(self, ws: &'a CargoWorkspace) -> impl Iterator<Item = Target> + 'a { |
70 | ws.pkg(self).targets.iter().cloned() | 70 | ws.pkg(self).targets.iter().cloned() |
71 | } | 71 | } |
72 | #[allow(unused)] | ||
72 | pub fn is_member(self, ws: &CargoWorkspace) -> bool { | 73 | pub fn is_member(self, ws: &CargoWorkspace) -> bool { |
73 | ws.pkg(self).is_member | 74 | ws.pkg(self).is_member |
74 | } | 75 | } |
diff --git a/crates/ra_lsp_server/src/server_world.rs b/crates/ra_lsp_server/src/server_world.rs index ab4c2c8aa..785877c4b 100644 --- a/crates/ra_lsp_server/src/server_world.rs +++ b/crates/ra_lsp_server/src/server_world.rs | |||
@@ -1,154 +1,66 @@ | |||
1 | use std::{ | 1 | use std::{ |
2 | fs, | 2 | path::{PathBuf}, |
3 | path::{Path, PathBuf}, | ||
4 | sync::Arc, | 3 | sync::Arc, |
5 | }; | 4 | }; |
6 | 5 | ||
7 | use languageserver_types::Url; | 6 | use languageserver_types::Url; |
8 | use ra_analysis::{ | 7 | use ra_analysis::{ |
9 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FileResolver, LibraryData, | 8 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, |
9 | SourceRootId | ||
10 | }; | 10 | }; |
11 | use ra_vfs::{Vfs, VfsChange, VfsFile}; | ||
11 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
12 | use failure::{bail, format_err}; | 13 | use relative_path::RelativePathBuf; |
14 | use parking_lot::RwLock; | ||
15 | use failure::{format_err}; | ||
13 | 16 | ||
14 | use crate::{ | 17 | use crate::{ |
15 | path_map::{PathMap, Root}, | ||
16 | project_model::{CargoWorkspace, TargetKind}, | 18 | project_model::{CargoWorkspace, TargetKind}, |
17 | vfs::{FileEvent, FileEventKind}, | ||
18 | Result, | 19 | Result, |
19 | }; | 20 | }; |
20 | 21 | ||
21 | #[derive(Debug, Default)] | 22 | #[derive(Debug)] |
22 | pub struct ServerWorldState { | 23 | pub struct ServerWorldState { |
24 | pub roots_to_scan: usize, | ||
25 | pub root: PathBuf, | ||
23 | pub workspaces: Arc<Vec<CargoWorkspace>>, | 26 | pub workspaces: Arc<Vec<CargoWorkspace>>, |
24 | pub analysis_host: AnalysisHost, | 27 | pub analysis_host: AnalysisHost, |
25 | pub path_map: PathMap, | 28 | pub vfs: Arc<RwLock<Vfs>>, |
26 | pub mem_map: FxHashMap<FileId, Option<String>>, | ||
27 | } | 29 | } |
28 | 30 | ||
29 | pub struct ServerWorld { | 31 | pub struct ServerWorld { |
30 | pub workspaces: Arc<Vec<CargoWorkspace>>, | 32 | pub workspaces: Arc<Vec<CargoWorkspace>>, |
31 | pub analysis: Analysis, | 33 | pub analysis: Analysis, |
32 | pub path_map: PathMap, | 34 | pub vfs: Arc<RwLock<Vfs>>, |
33 | } | 35 | } |
34 | 36 | ||
35 | impl ServerWorldState { | 37 | impl ServerWorldState { |
36 | pub fn apply_fs_changes(&mut self, events: Vec<FileEvent>) { | 38 | pub fn new(root: PathBuf, workspaces: Vec<CargoWorkspace>) -> ServerWorldState { |
37 | let mut change = AnalysisChange::new(); | 39 | let mut change = AnalysisChange::new(); |
38 | let mut inserted = false; | ||
39 | { | ||
40 | let pm = &mut self.path_map; | ||
41 | let mm = &mut self.mem_map; | ||
42 | events | ||
43 | .into_iter() | ||
44 | .map(|event| { | ||
45 | let text = match event.kind { | ||
46 | FileEventKind::Add(text) => text, | ||
47 | }; | ||
48 | (event.path, text) | ||
49 | }) | ||
50 | .map(|(path, text)| { | ||
51 | let (ins, file_id) = pm.get_or_insert(path, Root::Workspace); | ||
52 | inserted |= ins; | ||
53 | (file_id, text) | ||
54 | }) | ||
55 | .filter_map(|(file_id, text)| { | ||
56 | if mm.contains_key(&file_id) { | ||
57 | mm.insert(file_id, Some(text)); | ||
58 | None | ||
59 | } else { | ||
60 | Some((file_id, text)) | ||
61 | } | ||
62 | }) | ||
63 | .for_each(|(file_id, text)| change.add_file(file_id, text)); | ||
64 | } | ||
65 | if inserted { | ||
66 | change.set_file_resolver(Arc::new(self.path_map.clone())) | ||
67 | } | ||
68 | self.analysis_host.apply_change(change); | ||
69 | } | ||
70 | pub fn events_to_files( | ||
71 | &mut self, | ||
72 | events: Vec<FileEvent>, | ||
73 | ) -> (Vec<(FileId, String)>, Arc<FileResolver>) { | ||
74 | let files = { | ||
75 | let pm = &mut self.path_map; | ||
76 | events | ||
77 | .into_iter() | ||
78 | .map(|event| { | ||
79 | let FileEventKind::Add(text) = event.kind; | ||
80 | (event.path, text) | ||
81 | }) | ||
82 | .map(|(path, text)| (pm.get_or_insert(path, Root::Lib).1, text)) | ||
83 | .collect() | ||
84 | }; | ||
85 | let resolver = Arc::new(self.path_map.clone()); | ||
86 | (files, resolver) | ||
87 | } | ||
88 | pub fn add_lib(&mut self, data: LibraryData) { | ||
89 | let mut change = AnalysisChange::new(); | ||
90 | change.add_library(data); | ||
91 | self.analysis_host.apply_change(change); | ||
92 | } | ||
93 | 40 | ||
94 | pub fn add_mem_file(&mut self, path: PathBuf, text: String) -> FileId { | 41 | let mut roots = Vec::new(); |
95 | let (inserted, file_id) = self.path_map.get_or_insert(path, Root::Workspace); | 42 | roots.push(root.clone()); |
96 | if self.path_map.get_root(file_id) != Root::Lib { | 43 | for ws in workspaces.iter() { |
97 | let mut change = AnalysisChange::new(); | 44 | for pkg in ws.packages() { |
98 | if inserted { | 45 | roots.push(pkg.root(&ws).to_path_buf()); |
99 | change.add_file(file_id, text); | ||
100 | change.set_file_resolver(Arc::new(self.path_map.clone())); | ||
101 | } else { | ||
102 | change.change_file(file_id, text); | ||
103 | } | 46 | } |
104 | self.analysis_host.apply_change(change); | ||
105 | } | 47 | } |
106 | self.mem_map.insert(file_id, None); | 48 | let roots_to_scan = roots.len(); |
107 | file_id | 49 | let (mut vfs, roots) = Vfs::new(roots); |
108 | } | 50 | for r in roots { |
109 | 51 | let is_local = vfs.root2path(r).starts_with(&root); | |
110 | pub fn change_mem_file(&mut self, path: &Path, text: String) -> Result<()> { | 52 | change.add_root(SourceRootId(r.0), is_local); |
111 | let file_id = self | ||
112 | .path_map | ||
113 | .get_id(path) | ||
114 | .ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?; | ||
115 | if self.path_map.get_root(file_id) != Root::Lib { | ||
116 | let mut change = AnalysisChange::new(); | ||
117 | change.change_file(file_id, text); | ||
118 | self.analysis_host.apply_change(change); | ||
119 | } | 53 | } |
120 | Ok(()) | ||
121 | } | ||
122 | 54 | ||
123 | pub fn remove_mem_file(&mut self, path: &Path) -> Result<FileId> { | ||
124 | let file_id = self | ||
125 | .path_map | ||
126 | .get_id(path) | ||
127 | .ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?; | ||
128 | match self.mem_map.remove(&file_id) { | ||
129 | Some(_) => (), | ||
130 | None => bail!("unmatched close notification"), | ||
131 | }; | ||
132 | // Do this via file watcher ideally. | ||
133 | let text = fs::read_to_string(path).ok(); | ||
134 | if self.path_map.get_root(file_id) != Root::Lib { | ||
135 | let mut change = AnalysisChange::new(); | ||
136 | if let Some(text) = text { | ||
137 | change.change_file(file_id, text); | ||
138 | } | ||
139 | self.analysis_host.apply_change(change); | ||
140 | } | ||
141 | Ok(file_id) | ||
142 | } | ||
143 | pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) { | ||
144 | let mut crate_graph = CrateGraph::default(); | 55 | let mut crate_graph = CrateGraph::default(); |
145 | let mut pkg_to_lib_crate = FxHashMap::default(); | 56 | let mut pkg_to_lib_crate = FxHashMap::default(); |
146 | let mut pkg_crates = FxHashMap::default(); | 57 | let mut pkg_crates = FxHashMap::default(); |
147 | for ws in ws.iter() { | 58 | for ws in workspaces.iter() { |
148 | for pkg in ws.packages() { | 59 | for pkg in ws.packages() { |
149 | for tgt in pkg.targets(ws) { | 60 | for tgt in pkg.targets(ws) { |
150 | let root = tgt.root(ws); | 61 | let root = tgt.root(ws); |
151 | if let Some(file_id) = self.path_map.get_id(root) { | 62 | if let Some(file_id) = vfs.load(root) { |
63 | let file_id = FileId(file_id.0); | ||
152 | let crate_id = crate_graph.add_crate_root(file_id); | 64 | let crate_id = crate_graph.add_crate_root(file_id); |
153 | if tgt.kind(ws) == TargetKind::Lib { | 65 | if tgt.kind(ws) == TargetKind::Lib { |
154 | pkg_to_lib_crate.insert(pkg, crate_id); | 66 | pkg_to_lib_crate.insert(pkg, crate_id); |
@@ -170,16 +82,80 @@ impl ServerWorldState { | |||
170 | } | 82 | } |
171 | } | 83 | } |
172 | } | 84 | } |
173 | self.workspaces = Arc::new(ws); | ||
174 | let mut change = AnalysisChange::new(); | ||
175 | change.set_crate_graph(crate_graph); | 85 | change.set_crate_graph(crate_graph); |
86 | |||
87 | let mut analysis_host = AnalysisHost::default(); | ||
88 | analysis_host.apply_change(change); | ||
89 | ServerWorldState { | ||
90 | roots_to_scan, | ||
91 | root, | ||
92 | workspaces: Arc::new(workspaces), | ||
93 | analysis_host, | ||
94 | vfs: Arc::new(RwLock::new(vfs)), | ||
95 | } | ||
96 | } | ||
97 | |||
98 | /// Returns a vec of libraries | ||
99 | /// FIXME: better API here | ||
100 | pub fn process_changes( | ||
101 | &mut self, | ||
102 | ) -> Vec<(SourceRootId, Vec<(FileId, RelativePathBuf, Arc<String>)>)> { | ||
103 | let changes = self.vfs.write().commit_changes(); | ||
104 | if changes.is_empty() { | ||
105 | return Vec::new(); | ||
106 | } | ||
107 | let mut libs = Vec::new(); | ||
108 | let mut change = AnalysisChange::new(); | ||
109 | for c in changes { | ||
110 | log::info!("vfs change {:?}", c); | ||
111 | match c { | ||
112 | VfsChange::AddRoot { root, files } => { | ||
113 | let root_path = self.vfs.read().root2path(root); | ||
114 | if root_path.starts_with(&self.root) { | ||
115 | self.roots_to_scan -= 1; | ||
116 | for (file, path, text) in files { | ||
117 | change.add_file(SourceRootId(root.0), FileId(file.0), path, text); | ||
118 | } | ||
119 | } else { | ||
120 | let files = files | ||
121 | .into_iter() | ||
122 | .map(|(vfsfile, path, text)| (FileId(vfsfile.0), path, text)) | ||
123 | .collect(); | ||
124 | libs.push((SourceRootId(root.0), files)); | ||
125 | } | ||
126 | } | ||
127 | VfsChange::AddFile { | ||
128 | root, | ||
129 | file, | ||
130 | path, | ||
131 | text, | ||
132 | } => { | ||
133 | change.add_file(SourceRootId(root.0), FileId(file.0), path, text); | ||
134 | } | ||
135 | VfsChange::RemoveFile { root, file, path } => { | ||
136 | change.remove_file(SourceRootId(root.0), FileId(file.0), path) | ||
137 | } | ||
138 | VfsChange::ChangeFile { file, text } => { | ||
139 | change.change_file(FileId(file.0), text); | ||
140 | } | ||
141 | } | ||
142 | } | ||
176 | self.analysis_host.apply_change(change); | 143 | self.analysis_host.apply_change(change); |
144 | libs | ||
177 | } | 145 | } |
146 | |||
147 | pub fn add_lib(&mut self, data: LibraryData) { | ||
148 | self.roots_to_scan -= 1; | ||
149 | let mut change = AnalysisChange::new(); | ||
150 | change.add_library(data); | ||
151 | self.analysis_host.apply_change(change); | ||
152 | } | ||
153 | |||
178 | pub fn snapshot(&self) -> ServerWorld { | 154 | pub fn snapshot(&self) -> ServerWorld { |
179 | ServerWorld { | 155 | ServerWorld { |
180 | workspaces: Arc::clone(&self.workspaces), | 156 | workspaces: Arc::clone(&self.workspaces), |
181 | analysis: self.analysis_host.analysis(), | 157 | analysis: self.analysis_host.analysis(), |
182 | path_map: self.path_map.clone(), | 158 | vfs: Arc::clone(&self.vfs), |
183 | } | 159 | } |
184 | } | 160 | } |
185 | } | 161 | } |
@@ -193,15 +169,18 @@ impl ServerWorld { | |||
193 | let path = uri | 169 | let path = uri |
194 | .to_file_path() | 170 | .to_file_path() |
195 | .map_err(|()| format_err!("invalid uri: {}", uri))?; | 171 | .map_err(|()| format_err!("invalid uri: {}", uri))?; |
196 | self.path_map | 172 | let file = self |
197 | .get_id(&path) | 173 | .vfs |
198 | .ok_or_else(|| format_err!("unknown file: {}", path.display())) | 174 | .read() |
175 | .path2file(&path) | ||
176 | .ok_or_else(|| format_err!("unknown file: {}", path.display()))?; | ||
177 | Ok(FileId(file.0)) | ||
199 | } | 178 | } |
200 | 179 | ||
201 | pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> { | 180 | pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> { |
202 | let path = self.path_map.get_path(id); | 181 | let path = self.vfs.read().file2path(VfsFile(id.0)); |
203 | let url = Url::from_file_path(path) | 182 | let url = Url::from_file_path(&path) |
204 | .map_err(|()| format_err!("can't convert path to url: {}", path.display()))?; | 183 | .map_err(|_| format_err!("can't convert path to url: {}", path.display()))?; |
205 | Ok(url) | 184 | Ok(url) |
206 | } | 185 | } |
207 | } | 186 | } |
diff --git a/crates/ra_lsp_server/src/vfs.rs b/crates/ra_lsp_server/src/vfs.rs deleted file mode 100644 index fcf7693d8..000000000 --- a/crates/ra_lsp_server/src/vfs.rs +++ /dev/null | |||
@@ -1,67 +0,0 @@ | |||
1 | use std::{ | ||
2 | fs, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use walkdir::WalkDir; | ||
7 | use thread_worker::{WorkerHandle, Worker}; | ||
8 | |||
9 | #[derive(Debug)] | ||
10 | pub struct FileEvent { | ||
11 | pub path: PathBuf, | ||
12 | pub kind: FileEventKind, | ||
13 | } | ||
14 | |||
15 | #[derive(Debug)] | ||
16 | pub enum FileEventKind { | ||
17 | Add(String), | ||
18 | } | ||
19 | |||
20 | pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, WorkerHandle) { | ||
21 | thread_worker::spawn::<PathBuf, (PathBuf, Vec<FileEvent>), _>( | ||
22 | "roots loader", | ||
23 | 128, | ||
24 | |input_receiver, output_sender| { | ||
25 | input_receiver | ||
26 | .map(|path| { | ||
27 | log::debug!("loading {} ...", path.as_path().display()); | ||
28 | let events = load_root(path.as_path()); | ||
29 | log::debug!("... loaded {}", path.as_path().display()); | ||
30 | (path, events) | ||
31 | }) | ||
32 | .for_each(|it| output_sender.send(it)) | ||
33 | }, | ||
34 | ) | ||
35 | } | ||
36 | |||
37 | fn load_root(path: &Path) -> Vec<FileEvent> { | ||
38 | let mut res = Vec::new(); | ||
39 | for entry in WalkDir::new(path) { | ||
40 | let entry = match entry { | ||
41 | Ok(entry) => entry, | ||
42 | Err(e) => { | ||
43 | log::warn!("watcher error: {}", e); | ||
44 | continue; | ||
45 | } | ||
46 | }; | ||
47 | if !entry.file_type().is_file() { | ||
48 | continue; | ||
49 | } | ||
50 | let path = entry.path(); | ||
51 | if path.extension().and_then(|os| os.to_str()) != Some("rs") { | ||
52 | continue; | ||
53 | } | ||
54 | let text = match fs::read_to_string(path) { | ||
55 | Ok(text) => text, | ||
56 | Err(e) => { | ||
57 | log::warn!("watcher error: {}", e); | ||
58 | continue; | ||
59 | } | ||
60 | }; | ||
61 | res.push(FileEvent { | ||
62 | path: path.to_owned(), | ||
63 | kind: FileEventKind::Add(text), | ||
64 | }) | ||
65 | } | ||
66 | res | ||
67 | } | ||
diff --git a/crates/ra_lsp_server/tests/heavy_tests/main.rs b/crates/ra_lsp_server/tests/heavy_tests/main.rs index 26f5e3f20..029a55d40 100644 --- a/crates/ra_lsp_server/tests/heavy_tests/main.rs +++ b/crates/ra_lsp_server/tests/heavy_tests/main.rs | |||
@@ -1,9 +1,7 @@ | |||
1 | mod support; | 1 | mod support; |
2 | 2 | ||
3 | use serde_json::json; | 3 | use serde_json::json; |
4 | |||
5 | use ra_lsp_server::req::{Runnables, RunnablesParams, CodeActionRequest, CodeActionParams}; | 4 | use ra_lsp_server::req::{Runnables, RunnablesParams, CodeActionRequest, CodeActionParams}; |
6 | |||
7 | use languageserver_types::{Position, Range, CodeActionContext}; | 5 | use languageserver_types::{Position, Range, CodeActionContext}; |
8 | 6 | ||
9 | use crate::support::project; | 7 | use crate::support::project; |
@@ -20,6 +18,7 @@ fn foo() { | |||
20 | } | 18 | } |
21 | ", | 19 | ", |
22 | ); | 20 | ); |
21 | server.wait_for_feedback("workspace loaded"); | ||
23 | server.request::<Runnables>( | 22 | server.request::<Runnables>( |
24 | RunnablesParams { | 23 | RunnablesParams { |
25 | text_document: server.doc_id("lib.rs"), | 24 | text_document: server.doc_id("lib.rs"), |
diff --git a/crates/ra_lsp_server/tests/heavy_tests/support.rs b/crates/ra_lsp_server/tests/heavy_tests/support.rs index 07a878a26..c14d287ca 100644 --- a/crates/ra_lsp_server/tests/heavy_tests/support.rs +++ b/crates/ra_lsp_server/tests/heavy_tests/support.rs | |||
@@ -174,11 +174,11 @@ impl Server { | |||
174 | impl Drop for Server { | 174 | impl Drop for Server { |
175 | fn drop(&mut self) { | 175 | fn drop(&mut self) { |
176 | self.send_request::<Shutdown>(666, ()); | 176 | self.send_request::<Shutdown>(666, ()); |
177 | let receiver = self.worker.take().unwrap().stop(); | 177 | let receiver = self.worker.take().unwrap().shutdown(); |
178 | while let Some(msg) = recv_timeout(&receiver) { | 178 | while let Some(msg) = recv_timeout(&receiver) { |
179 | drop(msg); | 179 | drop(msg); |
180 | } | 180 | } |
181 | self.watcher.take().unwrap().stop().unwrap(); | 181 | self.watcher.take().unwrap().shutdown().unwrap(); |
182 | } | 182 | } |
183 | } | 183 | } |
184 | 184 | ||
diff --git a/crates/ra_vfs/Cargo.toml b/crates/ra_vfs/Cargo.toml new file mode 100644 index 000000000..ccea8a866 --- /dev/null +++ b/crates/ra_vfs/Cargo.toml | |||
@@ -0,0 +1,17 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_vfs" | ||
4 | version = "0.1.0" | ||
5 | authors = ["Aleksey Kladov <[email protected]>"] | ||
6 | |||
7 | [dependencies] | ||
8 | walkdir = "2.2.7" | ||
9 | relative-path = "0.4.0" | ||
10 | rustc-hash = "1.0" | ||
11 | crossbeam-channel = "0.2.4" | ||
12 | log = "0.4.6" | ||
13 | |||
14 | thread_worker = { path = "../thread_worker" } | ||
15 | |||
16 | [dev-dependencies] | ||
17 | tempfile = "3" | ||
diff --git a/crates/ra_vfs/src/arena.rs b/crates/ra_vfs/src/arena.rs new file mode 100644 index 000000000..6b42ae26d --- /dev/null +++ b/crates/ra_vfs/src/arena.rs | |||
@@ -0,0 +1,53 @@ | |||
1 | use std::{ | ||
2 | marker::PhantomData, | ||
3 | ops::{Index, IndexMut}, | ||
4 | }; | ||
5 | |||
6 | #[derive(Clone, Debug)] | ||
7 | pub(crate) struct Arena<ID: ArenaId, T> { | ||
8 | data: Vec<T>, | ||
9 | _ty: PhantomData<ID>, | ||
10 | } | ||
11 | |||
12 | pub(crate) trait ArenaId { | ||
13 | fn from_u32(id: u32) -> Self; | ||
14 | fn to_u32(self) -> u32; | ||
15 | } | ||
16 | |||
17 | impl<ID: ArenaId, T> Arena<ID, T> { | ||
18 | pub fn alloc(&mut self, value: T) -> ID { | ||
19 | let id = self.data.len() as u32; | ||
20 | self.data.push(value); | ||
21 | ID::from_u32(id) | ||
22 | } | ||
23 | pub fn iter<'a>(&'a self) -> impl Iterator<Item = (ID, &'a T)> { | ||
24 | self.data | ||
25 | .iter() | ||
26 | .enumerate() | ||
27 | .map(|(idx, value)| (ID::from_u32(idx as u32), value)) | ||
28 | } | ||
29 | } | ||
30 | |||
31 | impl<ID: ArenaId, T> Default for Arena<ID, T> { | ||
32 | fn default() -> Arena<ID, T> { | ||
33 | Arena { | ||
34 | data: Vec::new(), | ||
35 | _ty: PhantomData, | ||
36 | } | ||
37 | } | ||
38 | } | ||
39 | |||
40 | impl<ID: ArenaId, T> Index<ID> for Arena<ID, T> { | ||
41 | type Output = T; | ||
42 | fn index(&self, idx: ID) -> &T { | ||
43 | let idx = idx.to_u32() as usize; | ||
44 | &self.data[idx] | ||
45 | } | ||
46 | } | ||
47 | |||
48 | impl<ID: ArenaId, T> IndexMut<ID> for Arena<ID, T> { | ||
49 | fn index_mut(&mut self, idx: ID) -> &mut T { | ||
50 | let idx = idx.to_u32() as usize; | ||
51 | &mut self.data[idx] | ||
52 | } | ||
53 | } | ||
diff --git a/crates/ra_vfs/src/io.rs b/crates/ra_vfs/src/io.rs new file mode 100644 index 000000000..be400bae9 --- /dev/null +++ b/crates/ra_vfs/src/io.rs | |||
@@ -0,0 +1,76 @@ | |||
1 | use std::{ | ||
2 | fmt, | ||
3 | fs, | ||
4 | path::{Path, PathBuf}, | ||
5 | }; | ||
6 | |||
7 | use walkdir::{DirEntry, WalkDir}; | ||
8 | use thread_worker::{WorkerHandle}; | ||
9 | use relative_path::RelativePathBuf; | ||
10 | |||
11 | use crate::VfsRoot; | ||
12 | |||
13 | pub(crate) struct Task { | ||
14 | pub(crate) root: VfsRoot, | ||
15 | pub(crate) path: PathBuf, | ||
16 | pub(crate) filter: Box<Fn(&DirEntry) -> bool + Send>, | ||
17 | } | ||
18 | |||
19 | pub struct TaskResult { | ||
20 | pub(crate) root: VfsRoot, | ||
21 | pub(crate) files: Vec<(RelativePathBuf, String)>, | ||
22 | } | ||
23 | |||
24 | impl fmt::Debug for TaskResult { | ||
25 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
26 | f.write_str("TaskResult { ... }") | ||
27 | } | ||
28 | } | ||
29 | |||
30 | pub(crate) type Worker = thread_worker::Worker<Task, TaskResult>; | ||
31 | |||
32 | pub(crate) fn start() -> (Worker, WorkerHandle) { | ||
33 | thread_worker::spawn("vfs", 128, |input_receiver, output_sender| { | ||
34 | input_receiver | ||
35 | .map(handle_task) | ||
36 | .for_each(|it| output_sender.send(it)) | ||
37 | }) | ||
38 | } | ||
39 | |||
40 | fn handle_task(task: Task) -> TaskResult { | ||
41 | let Task { root, path, filter } = task; | ||
42 | log::debug!("loading {} ...", path.as_path().display()); | ||
43 | let files = load_root(path.as_path(), &*filter); | ||
44 | log::debug!("... loaded {}", path.as_path().display()); | ||
45 | TaskResult { root, files } | ||
46 | } | ||
47 | |||
48 | fn load_root(root: &Path, filter: &dyn Fn(&DirEntry) -> bool) -> Vec<(RelativePathBuf, String)> { | ||
49 | let mut res = Vec::new(); | ||
50 | for entry in WalkDir::new(root).into_iter().filter_entry(filter) { | ||
51 | let entry = match entry { | ||
52 | Ok(entry) => entry, | ||
53 | Err(e) => { | ||
54 | log::warn!("watcher error: {}", e); | ||
55 | continue; | ||
56 | } | ||
57 | }; | ||
58 | if !entry.file_type().is_file() { | ||
59 | continue; | ||
60 | } | ||
61 | let path = entry.path(); | ||
62 | if path.extension().and_then(|os| os.to_str()) != Some("rs") { | ||
63 | continue; | ||
64 | } | ||
65 | let text = match fs::read_to_string(path) { | ||
66 | Ok(text) => text, | ||
67 | Err(e) => { | ||
68 | log::warn!("watcher error: {}", e); | ||
69 | continue; | ||
70 | } | ||
71 | }; | ||
72 | let path = RelativePathBuf::from_path(path.strip_prefix(root).unwrap()).unwrap(); | ||
73 | res.push((path.to_owned(), text)) | ||
74 | } | ||
75 | res | ||
76 | } | ||
diff --git a/crates/ra_vfs/src/lib.rs b/crates/ra_vfs/src/lib.rs new file mode 100644 index 000000000..4de07b093 --- /dev/null +++ b/crates/ra_vfs/src/lib.rs | |||
@@ -0,0 +1,350 @@ | |||
1 | //! VFS stands for Virtual File System. | ||
2 | //! | ||
3 | //! When doing analysis, we don't want to do any IO, we want to keep all source | ||
4 | //! code in memory. However, the actual source code is stored on disk, so you | ||
5 | //! component which does this. | ||
6 | //! need to get it into the memory in the first place somehow. VFS is the | ||
7 | //! | ||
8 | //! It also is responsible for watching the disk for changes, and for merging | ||
9 | //! editor state (modified, unsaved files) with disk state. | ||
10 | //! | ||
11 | //! VFS is based on a concept of roots: a set of directories on the file system | ||
12 | //! whihc are watched for changes. Typically, there will be a root for each | ||
13 | //! Cargo package. | ||
14 | mod arena; | ||
15 | mod io; | ||
16 | |||
17 | use std::{ | ||
18 | fmt, | ||
19 | mem, | ||
20 | thread, | ||
21 | cmp::Reverse, | ||
22 | path::{Path, PathBuf}, | ||
23 | ffi::OsStr, | ||
24 | sync::Arc, | ||
25 | fs, | ||
26 | }; | ||
27 | |||
28 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
29 | use relative_path::RelativePathBuf; | ||
30 | use crossbeam_channel::Receiver; | ||
31 | use walkdir::DirEntry; | ||
32 | use thread_worker::{WorkerHandle}; | ||
33 | |||
34 | use crate::{ | ||
35 | arena::{ArenaId, Arena}, | ||
36 | }; | ||
37 | |||
38 | pub use crate::io::TaskResult as VfsTask; | ||
39 | |||
40 | /// `RootFilter` is a predicate that checks if a file can belong to a root. If | ||
41 | /// several filters match a file (nested dirs), the most nested one wins. | ||
42 | struct RootFilter { | ||
43 | root: PathBuf, | ||
44 | file_filter: fn(&Path) -> bool, | ||
45 | } | ||
46 | |||
47 | impl RootFilter { | ||
48 | fn new(root: PathBuf) -> RootFilter { | ||
49 | RootFilter { | ||
50 | root, | ||
51 | file_filter: has_rs_extension, | ||
52 | } | ||
53 | } | ||
54 | /// Check if this root can contain `path`. NB: even if this returns | ||
55 | /// true, the `path` might actually be conained in some nested root. | ||
56 | fn can_contain(&self, path: &Path) -> Option<RelativePathBuf> { | ||
57 | if !(self.file_filter)(path) { | ||
58 | return None; | ||
59 | } | ||
60 | if !(path.starts_with(&self.root)) { | ||
61 | return None; | ||
62 | } | ||
63 | let path = path.strip_prefix(&self.root).unwrap(); | ||
64 | let path = RelativePathBuf::from_path(path).unwrap(); | ||
65 | Some(path) | ||
66 | } | ||
67 | } | ||
68 | |||
69 | fn has_rs_extension(p: &Path) -> bool { | ||
70 | p.extension() == Some(OsStr::new("rs")) | ||
71 | } | ||
72 | |||
73 | #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] | ||
74 | pub struct VfsRoot(pub u32); | ||
75 | |||
76 | impl ArenaId for VfsRoot { | ||
77 | fn from_u32(idx: u32) -> VfsRoot { | ||
78 | VfsRoot(idx) | ||
79 | } | ||
80 | fn to_u32(self) -> u32 { | ||
81 | self.0 | ||
82 | } | ||
83 | } | ||
84 | |||
85 | #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] | ||
86 | pub struct VfsFile(pub u32); | ||
87 | |||
88 | impl ArenaId for VfsFile { | ||
89 | fn from_u32(idx: u32) -> VfsFile { | ||
90 | VfsFile(idx) | ||
91 | } | ||
92 | fn to_u32(self) -> u32 { | ||
93 | self.0 | ||
94 | } | ||
95 | } | ||
96 | |||
97 | struct VfsFileData { | ||
98 | root: VfsRoot, | ||
99 | path: RelativePathBuf, | ||
100 | text: Arc<String>, | ||
101 | } | ||
102 | |||
103 | pub struct Vfs { | ||
104 | roots: Arena<VfsRoot, RootFilter>, | ||
105 | files: Arena<VfsFile, VfsFileData>, | ||
106 | root2files: FxHashMap<VfsRoot, FxHashSet<VfsFile>>, | ||
107 | pending_changes: Vec<VfsChange>, | ||
108 | worker: io::Worker, | ||
109 | worker_handle: WorkerHandle, | ||
110 | } | ||
111 | |||
112 | impl fmt::Debug for Vfs { | ||
113 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
114 | f.write_str("Vfs { ... }") | ||
115 | } | ||
116 | } | ||
117 | |||
118 | impl Vfs { | ||
119 | pub fn new(mut roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) { | ||
120 | let (worker, worker_handle) = io::start(); | ||
121 | |||
122 | let mut res = Vfs { | ||
123 | roots: Arena::default(), | ||
124 | files: Arena::default(), | ||
125 | root2files: FxHashMap::default(), | ||
126 | worker, | ||
127 | worker_handle, | ||
128 | pending_changes: Vec::new(), | ||
129 | }; | ||
130 | |||
131 | // A hack to make nesting work. | ||
132 | roots.sort_by_key(|it| Reverse(it.as_os_str().len())); | ||
133 | for (i, path) in roots.iter().enumerate() { | ||
134 | let root = res.roots.alloc(RootFilter::new(path.clone())); | ||
135 | res.root2files.insert(root, Default::default()); | ||
136 | let nested = roots[..i] | ||
137 | .iter() | ||
138 | .filter(|it| it.starts_with(path)) | ||
139 | .map(|it| it.clone()) | ||
140 | .collect::<Vec<_>>(); | ||
141 | let filter = move |entry: &DirEntry| { | ||
142 | if entry.file_type().is_file() { | ||
143 | has_rs_extension(entry.path()) | ||
144 | } else { | ||
145 | nested.iter().all(|it| it != entry.path()) | ||
146 | } | ||
147 | }; | ||
148 | let task = io::Task { | ||
149 | root, | ||
150 | path: path.clone(), | ||
151 | filter: Box::new(filter), | ||
152 | }; | ||
153 | res.worker.inp.send(task); | ||
154 | } | ||
155 | let roots = res.roots.iter().map(|(id, _)| id).collect(); | ||
156 | (res, roots) | ||
157 | } | ||
158 | |||
159 | pub fn root2path(&self, root: VfsRoot) -> PathBuf { | ||
160 | self.roots[root].root.clone() | ||
161 | } | ||
162 | |||
163 | pub fn path2file(&self, path: &Path) -> Option<VfsFile> { | ||
164 | if let Some((_root, _path, Some(file))) = self.find_root(path) { | ||
165 | return Some(file); | ||
166 | } | ||
167 | None | ||
168 | } | ||
169 | |||
170 | pub fn file2path(&self, file: VfsFile) -> PathBuf { | ||
171 | let rel_path = &self.files[file].path; | ||
172 | let root_path = &self.roots[self.files[file].root].root; | ||
173 | rel_path.to_path(root_path) | ||
174 | } | ||
175 | |||
176 | pub fn file_for_path(&self, path: &Path) -> Option<VfsFile> { | ||
177 | if let Some((_root, _path, Some(file))) = self.find_root(path) { | ||
178 | return Some(file); | ||
179 | } | ||
180 | None | ||
181 | } | ||
182 | |||
183 | pub fn load(&mut self, path: &Path) -> Option<VfsFile> { | ||
184 | if let Some((root, rel_path, file)) = self.find_root(path) { | ||
185 | return if let Some(file) = file { | ||
186 | Some(file) | ||
187 | } else { | ||
188 | let text = fs::read_to_string(path).unwrap_or_default(); | ||
189 | let text = Arc::new(text); | ||
190 | let file = self.add_file(root, rel_path.clone(), Arc::clone(&text)); | ||
191 | let change = VfsChange::AddFile { | ||
192 | file, | ||
193 | text, | ||
194 | root, | ||
195 | path: rel_path, | ||
196 | }; | ||
197 | self.pending_changes.push(change); | ||
198 | Some(file) | ||
199 | }; | ||
200 | } | ||
201 | None | ||
202 | } | ||
203 | |||
204 | pub fn task_receiver(&self) -> &Receiver<io::TaskResult> { | ||
205 | &self.worker.out | ||
206 | } | ||
207 | |||
208 | pub fn handle_task(&mut self, task: io::TaskResult) { | ||
209 | let mut files = Vec::new(); | ||
210 | // While we were scanning the root in the backgound, a file might have | ||
211 | // been open in the editor, so we need to account for that. | ||
212 | let exising = self.root2files[&task.root] | ||
213 | .iter() | ||
214 | .map(|&file| (self.files[file].path.clone(), file)) | ||
215 | .collect::<FxHashMap<_, _>>(); | ||
216 | for (path, text) in task.files { | ||
217 | if let Some(&file) = exising.get(&path) { | ||
218 | let text = Arc::clone(&self.files[file].text); | ||
219 | files.push((file, path, text)); | ||
220 | continue; | ||
221 | } | ||
222 | let text = Arc::new(text); | ||
223 | let file = self.add_file(task.root, path.clone(), Arc::clone(&text)); | ||
224 | files.push((file, path, text)); | ||
225 | } | ||
226 | |||
227 | let change = VfsChange::AddRoot { | ||
228 | root: task.root, | ||
229 | files, | ||
230 | }; | ||
231 | self.pending_changes.push(change); | ||
232 | } | ||
233 | |||
234 | pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option<VfsFile> { | ||
235 | let mut res = None; | ||
236 | if let Some((root, path, file)) = self.find_root(path) { | ||
237 | let text = Arc::new(text); | ||
238 | let change = if let Some(file) = file { | ||
239 | res = Some(file); | ||
240 | self.change_file(file, Arc::clone(&text)); | ||
241 | VfsChange::ChangeFile { file, text } | ||
242 | } else { | ||
243 | let file = self.add_file(root, path.clone(), Arc::clone(&text)); | ||
244 | res = Some(file); | ||
245 | VfsChange::AddFile { | ||
246 | file, | ||
247 | text, | ||
248 | root, | ||
249 | path, | ||
250 | } | ||
251 | }; | ||
252 | self.pending_changes.push(change); | ||
253 | } | ||
254 | res | ||
255 | } | ||
256 | |||
257 | pub fn change_file_overlay(&mut self, path: &Path, new_text: String) { | ||
258 | if let Some((_root, _path, file)) = self.find_root(path) { | ||
259 | let file = file.expect("can't change a file which wasn't added"); | ||
260 | let text = Arc::new(new_text); | ||
261 | self.change_file(file, Arc::clone(&text)); | ||
262 | let change = VfsChange::ChangeFile { file, text }; | ||
263 | self.pending_changes.push(change); | ||
264 | } | ||
265 | } | ||
266 | |||
267 | pub fn remove_file_overlay(&mut self, path: &Path) -> Option<VfsFile> { | ||
268 | let mut res = None; | ||
269 | if let Some((root, path, file)) = self.find_root(path) { | ||
270 | let file = file.expect("can't remove a file which wasn't added"); | ||
271 | res = Some(file); | ||
272 | let full_path = path.to_path(&self.roots[root].root); | ||
273 | let change = if let Ok(text) = fs::read_to_string(&full_path) { | ||
274 | let text = Arc::new(text); | ||
275 | self.change_file(file, Arc::clone(&text)); | ||
276 | VfsChange::ChangeFile { file, text } | ||
277 | } else { | ||
278 | self.remove_file(file); | ||
279 | VfsChange::RemoveFile { root, file, path } | ||
280 | }; | ||
281 | self.pending_changes.push(change); | ||
282 | } | ||
283 | res | ||
284 | } | ||
285 | |||
286 | pub fn commit_changes(&mut self) -> Vec<VfsChange> { | ||
287 | mem::replace(&mut self.pending_changes, Vec::new()) | ||
288 | } | ||
289 | |||
290 | /// Sutdown the VFS and terminate the background watching thread. | ||
291 | pub fn shutdown(self) -> thread::Result<()> { | ||
292 | let _ = self.worker.shutdown(); | ||
293 | self.worker_handle.shutdown() | ||
294 | } | ||
295 | |||
296 | fn add_file(&mut self, root: VfsRoot, path: RelativePathBuf, text: Arc<String>) -> VfsFile { | ||
297 | let data = VfsFileData { root, path, text }; | ||
298 | let file = self.files.alloc(data); | ||
299 | self.root2files.get_mut(&root).unwrap().insert(file); | ||
300 | file | ||
301 | } | ||
302 | |||
303 | fn change_file(&mut self, file: VfsFile, new_text: Arc<String>) { | ||
304 | self.files[file].text = new_text; | ||
305 | } | ||
306 | |||
307 | fn remove_file(&mut self, file: VfsFile) { | ||
308 | //FIXME: use arena with removal | ||
309 | self.files[file].text = Default::default(); | ||
310 | self.files[file].path = Default::default(); | ||
311 | let root = self.files[file].root; | ||
312 | let removed = self.root2files.get_mut(&root).unwrap().remove(&file); | ||
313 | assert!(removed); | ||
314 | } | ||
315 | |||
316 | fn find_root(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf, Option<VfsFile>)> { | ||
317 | let (root, path) = self | ||
318 | .roots | ||
319 | .iter() | ||
320 | .find_map(|(root, data)| data.can_contain(path).map(|it| (root, it)))?; | ||
321 | let file = self.root2files[&root] | ||
322 | .iter() | ||
323 | .map(|&it| it) | ||
324 | .find(|&file| self.files[file].path == path); | ||
325 | Some((root, path, file)) | ||
326 | } | ||
327 | } | ||
328 | |||
329 | #[derive(Debug, Clone)] | ||
330 | pub enum VfsChange { | ||
331 | AddRoot { | ||
332 | root: VfsRoot, | ||
333 | files: Vec<(VfsFile, RelativePathBuf, Arc<String>)>, | ||
334 | }, | ||
335 | AddFile { | ||
336 | root: VfsRoot, | ||
337 | file: VfsFile, | ||
338 | path: RelativePathBuf, | ||
339 | text: Arc<String>, | ||
340 | }, | ||
341 | RemoveFile { | ||
342 | root: VfsRoot, | ||
343 | file: VfsFile, | ||
344 | path: RelativePathBuf, | ||
345 | }, | ||
346 | ChangeFile { | ||
347 | file: VfsFile, | ||
348 | text: Arc<String>, | ||
349 | }, | ||
350 | } | ||
diff --git a/crates/ra_vfs/tests/vfs.rs b/crates/ra_vfs/tests/vfs.rs new file mode 100644 index 000000000..f56fc4603 --- /dev/null +++ b/crates/ra_vfs/tests/vfs.rs | |||
@@ -0,0 +1,101 @@ | |||
1 | use std::{ | ||
2 | fs, | ||
3 | collections::HashSet, | ||
4 | }; | ||
5 | |||
6 | use tempfile::tempdir; | ||
7 | |||
8 | use ra_vfs::{Vfs, VfsChange}; | ||
9 | |||
10 | #[test] | ||
11 | fn test_vfs_works() -> std::io::Result<()> { | ||
12 | let files = [ | ||
13 | ("a/foo.rs", "hello"), | ||
14 | ("a/bar.rs", "world"), | ||
15 | ("a/b/baz.rs", "nested hello"), | ||
16 | ]; | ||
17 | |||
18 | let dir = tempdir()?; | ||
19 | for (path, text) in files.iter() { | ||
20 | let file_path = dir.path().join(path); | ||
21 | fs::create_dir_all(file_path.parent().unwrap())?; | ||
22 | fs::write(file_path, text)? | ||
23 | } | ||
24 | |||
25 | let a_root = dir.path().join("a"); | ||
26 | let b_root = dir.path().join("a/b"); | ||
27 | |||
28 | let (mut vfs, _) = Vfs::new(vec![a_root, b_root]); | ||
29 | for _ in 0..2 { | ||
30 | let task = vfs.task_receiver().recv().unwrap(); | ||
31 | vfs.handle_task(task); | ||
32 | } | ||
33 | { | ||
34 | let files = vfs | ||
35 | .commit_changes() | ||
36 | .into_iter() | ||
37 | .flat_map(|change| { | ||
38 | let files = match change { | ||
39 | VfsChange::AddRoot { files, .. } => files, | ||
40 | _ => panic!("unexpected change"), | ||
41 | }; | ||
42 | files.into_iter().map(|(_id, path, text)| { | ||
43 | let text: String = (&*text).clone(); | ||
44 | (format!("{}", path.display()), text) | ||
45 | }) | ||
46 | }) | ||
47 | .collect::<HashSet<_>>(); | ||
48 | |||
49 | let expected_files = [ | ||
50 | ("foo.rs", "hello"), | ||
51 | ("bar.rs", "world"), | ||
52 | ("baz.rs", "nested hello"), | ||
53 | ] | ||
54 | .iter() | ||
55 | .map(|(path, text)| (path.to_string(), text.to_string())) | ||
56 | .collect::<HashSet<_>>(); | ||
57 | |||
58 | assert_eq!(files, expected_files); | ||
59 | } | ||
60 | |||
61 | vfs.add_file_overlay(&dir.path().join("a/b/baz.rs"), "quux".to_string()); | ||
62 | let change = vfs.commit_changes().pop().unwrap(); | ||
63 | match change { | ||
64 | VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "quux"), | ||
65 | _ => panic!("unexpected change"), | ||
66 | } | ||
67 | |||
68 | vfs.change_file_overlay(&dir.path().join("a/b/baz.rs"), "m".to_string()); | ||
69 | let change = vfs.commit_changes().pop().unwrap(); | ||
70 | match change { | ||
71 | VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "m"), | ||
72 | _ => panic!("unexpected change"), | ||
73 | } | ||
74 | |||
75 | vfs.remove_file_overlay(&dir.path().join("a/b/baz.rs")); | ||
76 | let change = vfs.commit_changes().pop().unwrap(); | ||
77 | match change { | ||
78 | VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "nested hello"), | ||
79 | _ => panic!("unexpected change"), | ||
80 | } | ||
81 | |||
82 | vfs.add_file_overlay(&dir.path().join("a/b/spam.rs"), "spam".to_string()); | ||
83 | let change = vfs.commit_changes().pop().unwrap(); | ||
84 | match change { | ||
85 | VfsChange::AddFile { text, path, .. } => { | ||
86 | assert_eq!(&*text, "spam"); | ||
87 | assert_eq!(path, "spam.rs"); | ||
88 | } | ||
89 | _ => panic!("unexpected change"), | ||
90 | } | ||
91 | |||
92 | vfs.remove_file_overlay(&dir.path().join("a/b/spam.rs")); | ||
93 | let change = vfs.commit_changes().pop().unwrap(); | ||
94 | match change { | ||
95 | VfsChange::RemoveFile { .. } => (), | ||
96 | _ => panic!("unexpected change"), | ||
97 | } | ||
98 | |||
99 | vfs.shutdown().unwrap(); | ||
100 | Ok(()) | ||
101 | } | ||
diff --git a/crates/thread_worker/src/lib.rs b/crates/thread_worker/src/lib.rs index e558559ef..12e8bf17e 100644 --- a/crates/thread_worker/src/lib.rs +++ b/crates/thread_worker/src/lib.rs | |||
@@ -30,13 +30,16 @@ where | |||
30 | impl<I, O> Worker<I, O> { | 30 | impl<I, O> Worker<I, O> { |
31 | /// Stops the worker. Returns the message receiver to fetch results which | 31 | /// Stops the worker. Returns the message receiver to fetch results which |
32 | /// have become ready before the worker is stopped. | 32 | /// have become ready before the worker is stopped. |
33 | pub fn stop(self) -> Receiver<O> { | 33 | pub fn shutdown(self) -> Receiver<O> { |
34 | self.out | 34 | self.out |
35 | } | 35 | } |
36 | 36 | ||
37 | pub fn send(&self, item: I) { | 37 | pub fn send(&self, item: I) { |
38 | self.inp.send(item) | 38 | self.inp.send(item) |
39 | } | 39 | } |
40 | pub fn recv(&self) -> Option<O> { | ||
41 | self.out.recv() | ||
42 | } | ||
40 | } | 43 | } |
41 | 44 | ||
42 | impl WorkerHandle { | 45 | impl WorkerHandle { |
@@ -45,11 +48,11 @@ impl WorkerHandle { | |||
45 | WorkerHandle { | 48 | WorkerHandle { |
46 | name, | 49 | name, |
47 | thread, | 50 | thread, |
48 | bomb: DropBomb::new(format!("WorkerHandle {} was not stopped", name)), | 51 | bomb: DropBomb::new(format!("WorkerHandle {} was not shutdown", name)), |
49 | } | 52 | } |
50 | } | 53 | } |
51 | 54 | ||
52 | pub fn stop(mut self) -> thread::Result<()> { | 55 | pub fn shutdown(mut self) -> thread::Result<()> { |
53 | log::info!("waiting for {} to finish ...", self.name); | 56 | log::info!("waiting for {} to finish ...", self.name); |
54 | let name = self.name; | 57 | let name = self.name; |
55 | self.bomb.defuse(); | 58 | self.bomb.defuse(); |